├── .gitignore
├── .idea
├── codeStyles
│ ├── Project.xml
│ └── codeStyleConfig.xml
├── gradle.xml
├── misc.xml
├── runConfigurations.xml
└── vcs.xml
├── README.md
├── app
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── org
│ │ └── unreal
│ │ └── face
│ │ └── recognition
│ │ └── ExampleInstrumentedTest.kt
│ ├── main
│ ├── AndroidManifest.xml
│ ├── assets
│ │ ├── 1.jpg
│ │ ├── 2.jpg
│ │ ├── 3.jpg
│ │ └── 4.jpg
│ ├── java
│ │ └── org
│ │ │ └── unreal
│ │ │ └── face
│ │ │ └── recognition
│ │ │ ├── activity
│ │ │ └── MainActivity.kt
│ │ │ └── camera
│ │ │ └── CameraUtils.kt
│ └── res
│ │ ├── drawable-v24
│ │ └── ic_launcher_foreground.xml
│ │ ├── drawable
│ │ └── ic_launcher_background.xml
│ │ ├── layout
│ │ └── activity_main.xml
│ │ ├── mipmap-anydpi-v26
│ │ ├── ic_launcher.xml
│ │ └── ic_launcher_round.xml
│ │ ├── mipmap-hdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-mdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ └── values
│ │ ├── colors.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ └── test
│ └── java
│ └── org
│ └── unreal
│ └── face
│ └── recognition
│ └── ExampleUnitTest.kt
├── build.gradle
├── facenet
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── org
│ │ └── unreal
│ │ └── face
│ │ └── facenet
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── java
│ │ └── org
│ │ │ └── unreal
│ │ │ └── face
│ │ │ └── facenet
│ │ │ ├── BitmapUtils.kt
│ │ │ ├── FaceFeature.kt
│ │ │ └── FaceNet.kt
│ └── res
│ │ └── values
│ │ └── strings.xml
│ └── test
│ └── java
│ └── org
│ └── unreal
│ └── face
│ └── facenet
│ └── ExampleUnitTest.java
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── mtcnn
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── org
│ │ └── unreal
│ │ └── face
│ │ └── mtcnn
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── assets
│ │ └── mtcnn_freezed_model.pb
│ ├── java
│ │ └── org
│ │ │ └── unreal
│ │ │ └── face
│ │ │ └── mtcnn
│ │ │ ├── Box.kt
│ │ │ ├── MTCNN.kt
│ │ │ └── PicUtils.kt
│ └── res
│ │ └── values
│ │ └── strings.xml
│ └── test
│ └── java
│ └── org
│ └── unreal
│ └── face
│ └── mtcnn
│ └── ExampleUnitTest.java
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/caches
5 | /.idea/libraries
6 | /.idea/modules.xml
7 | /.idea/workspace.xml
8 | /.idea/navEditor.xml
9 | /.idea/assetWizardSettings.xml
10 | .DS_Store
11 | /build
12 | /captures
13 | .externalNativeBuild
14 |
--------------------------------------------------------------------------------
/.idea/codeStyles/Project.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/.idea/codeStyles/codeStyleConfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
19 |
20 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/.idea/runConfigurations.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # AndroidFaceNet
2 | ## 介绍
3 | 主要是人脸识别的一个Demo,使用的人脸识别是MTCNN,人脸比对使用的是FaceNet,Facenet用的模型是2018年的,人脸识别尺寸是160*160。完成了一个一比一的demo。
4 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | apply plugin: 'kotlin-android'
4 |
5 | apply plugin: 'kotlin-android-extensions'
6 |
7 | android {
8 | compileSdkVersion 28
9 | defaultConfig {
10 | applicationId "org.unreal.face.recognition"
11 | minSdkVersion 26
12 | targetSdkVersion 28
13 | versionCode 1
14 | versionName "1.0"
15 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
16 | }
17 | buildTypes {
18 | release {
19 | minifyEnabled false
20 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
21 | }
22 | }
23 | }
24 |
25 | dependencies {
26 | implementation fileTree(dir: 'libs', include: ['*.jar'])
27 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
28 | implementation 'androidx.appcompat:appcompat:1.1.0-alpha02'
29 | implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
30 | implementation project(":mtcnn")
31 | implementation project(":facenet")
32 | testImplementation 'junit:junit:4.12'
33 | androidTestImplementation 'androidx.test:runner:1.1.2-alpha01'
34 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.2-alpha01'
35 | }
36 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/app/src/androidTest/java/org/unreal/face/recognition/ExampleInstrumentedTest.kt:
--------------------------------------------------------------------------------
1 | package org.unreal.face.recognition
2 |
3 | import androidx.test.InstrumentationRegistry
4 | import androidx.test.runner.AndroidJUnit4
5 |
6 | import org.junit.Test
7 | import org.junit.runner.RunWith
8 |
9 | import org.junit.Assert.*
10 |
11 | /**
12 | * Instrumented test, which will execute on an Android device.
13 | *
14 | * See [testing documentation](http://d.android.com/tools/testing).
15 | */
16 | @RunWith(AndroidJUnit4::class)
17 | class ExampleInstrumentedTest {
18 | @Test
19 | fun useAppContext() {
20 | // Context of the app under test.
21 | val appContext = InstrumentationRegistry.getTargetContext()
22 | assertEquals("org.unreal.face.recognition", appContext.packageName)
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/app/src/main/assets/1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/assets/1.jpg
--------------------------------------------------------------------------------
/app/src/main/assets/2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/assets/2.jpg
--------------------------------------------------------------------------------
/app/src/main/assets/3.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/assets/3.jpg
--------------------------------------------------------------------------------
/app/src/main/assets/4.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/assets/4.jpg
--------------------------------------------------------------------------------
/app/src/main/java/org/unreal/face/recognition/activity/MainActivity.kt:
--------------------------------------------------------------------------------
1 | package org.unreal.face.recognition.activity
2 |
3 | import android.graphics.Bitmap
4 | import androidx.appcompat.app.AppCompatActivity
5 | import android.os.Bundle
6 | import android.util.Log
7 | import kotlinx.android.synthetic.main.activity_main.*
8 | import org.unreal.face.facenet.BitmapUtils
9 | import org.unreal.face.facenet.FaceNet
10 | import org.unreal.face.mtcnn.MTCNN
11 | import org.unreal.face.mtcnn.PicUtils
12 | import org.unreal.face.mtcnn.PicUtils.copyBitmap
13 | import org.unreal.face.mtcnn.PicUtils.drawPoints
14 | import org.unreal.face.mtcnn.PicUtils.drawRect
15 | import org.unreal.face.recognition.R
16 |
17 |
18 | class MainActivity : AppCompatActivity() {
19 |
20 | private lateinit var mtcnn: MTCNN
21 | private lateinit var faceNet: FaceNet
22 |
23 | override fun onCreate(savedInstanceState: Bundle?) {
24 | super.onCreate(savedInstanceState)
25 | initFaceEngine()
26 | setContentView(R.layout.activity_main)
27 |
28 | val startTime = System.currentTimeMillis()
29 | val source = PicUtils.getBitmapFromAssets(assets, "1.jpg")
30 | val dist = PicUtils.getBitmapFromAssets(assets, "4.jpg")
31 | imageView2.setImageBitmap(dist)
32 |
33 | val findFaceSource = mtcnn.cutFace(source,mtcnn.detectFaces(source , 40 ))
34 | imageView.setImageBitmap(findFaceSource[0])
35 |
36 | val findFaceDist = mtcnn.cutFace(dist,mtcnn.detectFaces(dist , 40 ))
37 | imageView2.setImageBitmap(findFaceDist[0])
38 |
39 |
40 | val compare = faceNet.recognizeImage(findFaceSource[0]).compare(faceNet.recognizeImage(findFaceDist[0]))
41 | val endTime = System.currentTimeMillis()
42 | Log.e("MainActivity","compare is --->$compare")
43 | Log.e("MainActivity","pass is --->${compare < 0.8}")
44 | Log.e("MainActivity","run time is ${endTime - startTime} ms")
45 | }
46 |
47 | private fun initFaceEngine() {
48 | mtcnn = MTCNN(assets)
49 | faceNet = FaceNet(assets)
50 | }
51 |
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/app/src/main/java/org/unreal/face/recognition/camera/CameraUtils.kt:
--------------------------------------------------------------------------------
1 | package org.unreal.face.recognition.camera
2 |
3 | import android.content.Context
4 | import android.view.TextureView
5 |
6 | class CameraUtils(context: Context, textureView: TextureView){
7 |
8 | init{
9 |
10 | }
11 | }
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
12 |
13 |
19 |
22 |
25 |
26 |
27 |
28 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
10 |
12 |
14 |
16 |
18 |
20 |
22 |
24 |
26 |
28 |
30 |
32 |
34 |
36 |
38 |
40 |
42 |
44 |
46 |
48 |
50 |
52 |
54 |
56 |
58 |
60 |
62 |
64 |
66 |
68 |
70 |
72 |
74 |
75 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
17 |
25 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #008577
4 | #00574B
5 | #D81B60
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | recognition
3 |
4 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/app/src/test/java/org/unreal/face/recognition/ExampleUnitTest.kt:
--------------------------------------------------------------------------------
1 | package org.unreal.face.recognition
2 |
3 | import org.junit.Test
4 |
5 | import org.junit.Assert.*
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * See [testing documentation](http://d.android.com/tools/testing).
11 | */
12 | class ExampleUnitTest {
13 | @Test
14 | fun addition_isCorrect() {
15 | assertEquals(4, 2 + 2)
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | ext.kotlin_version = '1.3.21'
5 | repositories {
6 | google()
7 | jcenter()
8 |
9 | }
10 | dependencies {
11 | classpath 'com.android.tools.build:gradle:3.3.2'
12 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
13 | // NOTE: Do not place your application dependencies here; they belong
14 | // in the individual module build.gradle files
15 | }
16 | }
17 |
18 | allprojects {
19 | repositories {
20 | google()
21 | jcenter()
22 |
23 | }
24 | }
25 |
26 | task clean(type: Delete) {
27 | delete rootProject.buildDir
28 | }
29 |
--------------------------------------------------------------------------------
/facenet/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/facenet/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 | apply plugin: 'kotlin-android-extensions'
3 | apply plugin: 'kotlin-android'
4 |
5 | android {
6 | compileSdkVersion 28
7 |
8 |
9 |
10 | defaultConfig {
11 | minSdkVersion 15
12 | targetSdkVersion 28
13 | versionCode 1
14 | versionName "1.0"
15 |
16 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
17 |
18 | }
19 |
20 | buildTypes {
21 | release {
22 | minifyEnabled false
23 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
24 | }
25 | }
26 |
27 | }
28 |
29 | dependencies {
30 | implementation fileTree(dir: 'libs', include: ['*.jar'])
31 |
32 | implementation 'com.android.support:appcompat-v7:28.0.0'
33 | testImplementation 'junit:junit:4.12'
34 | implementation 'org.tensorflow:tensorflow-android:1.13.1'
35 | androidTestImplementation 'com.android.support.test:runner:1.0.2'
36 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
37 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
38 | }
39 | repositories {
40 | mavenCentral()
41 | }
42 |
--------------------------------------------------------------------------------
/facenet/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/facenet/src/androidTest/java/org/unreal/face/facenet/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package org.unreal.face.facenet;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | /**
13 | * Instrumented test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("org.unreal.face.facenet.test", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/facenet/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
--------------------------------------------------------------------------------
/facenet/src/main/java/org/unreal/face/facenet/BitmapUtils.kt:
--------------------------------------------------------------------------------
1 | package org.unreal.face.facenet
2 |
3 | import android.graphics.Bitmap
4 | import android.graphics.Matrix
5 | import android.graphics.Rect
6 | import android.util.Log
7 | import kotlin.math.max
8 | import kotlin.math.min
9 |
10 |
11 | object BitmapUtils{
12 |
13 | fun scaleImage(inputSize:Long ,bitmap: Bitmap): Bitmap {
14 | val scaleWidth = (inputSize.toFloat() / bitmap.width.toFloat())
15 | val scaleHeight = (inputSize.toFloat() / bitmap.height.toFloat())
16 | println("scaleWidth ======= ${scaleWidth}")
17 | println("scaleHeight ======= ${scaleHeight}")
18 | val matrix = Matrix()
19 | matrix.postScale(scaleWidth ,scaleHeight)
20 | return BitmapUtils.createNewBitmap(bitmap, matrix)
21 | }
22 |
23 | private fun createNewBitmap(source: Bitmap, matrix : Matrix): Bitmap {
24 | return Bitmap.createBitmap(source , 0 , 0 , source.width , source.height , matrix , true)
25 | }
26 |
27 | }
--------------------------------------------------------------------------------
/facenet/src/main/java/org/unreal/face/facenet/FaceFeature.kt:
--------------------------------------------------------------------------------
1 | package org.unreal.face.facenet
2 |
3 | class FaceFeature internal constructor() {
4 | val feature: FloatArray
5 |
6 | init {
7 | feature = FloatArray(DIMS)
8 | }
9 |
10 | //比较当前特征和另一个特征之间的相似度
11 | fun compare(ff: FaceFeature): Double {
12 | var dist = 0.0
13 | for (i in 0 until DIMS)
14 | dist += ((feature[i] - ff.feature[i]) * (feature[i] - ff.feature[i])).toDouble()
15 | dist = Math.sqrt(dist)
16 | return dist
17 | }
18 |
19 | companion object {
20 | const val DIMS = 512
21 | }
22 | }
--------------------------------------------------------------------------------
/facenet/src/main/java/org/unreal/face/facenet/FaceNet.kt:
--------------------------------------------------------------------------------
1 | package org.unreal.face.facenet
2 |
3 | import android.content.res.AssetManager
4 | import android.graphics.Bitmap
5 | import android.graphics.Matrix
6 | import android.util.Log
7 | import org.tensorflow.contrib.android.TensorFlowInferenceInterface
8 |
9 | class FaceNet(assetManager: AssetManager){
10 |
11 | private val modelFile = "facenet-20180408-102900.pb"
12 | private val inputNode = "input:0"
13 | private val outputNode = "embeddings:0"
14 | private val phaseName = "phase_train:0"
15 |
16 | private val TAG = "FaceNet"
17 |
18 | private val tensorFlowInference = TensorFlowInferenceInterface(assetManager , modelFile)
19 |
20 | private val inputSize = 160L
21 |
22 | private val floatValues = FloatArray((inputSize * inputSize * 3).toInt())
23 | private val intValues = IntArray((inputSize * inputSize).toInt())
24 |
25 | private val outputNames = arrayOf(outputNode)
26 |
27 |
28 |
29 | private fun normalizeImage(bitmap: Bitmap){
30 | val scaleImage = BitmapUtils.scaleImage(inputSize , bitmap)
31 | val imageMean = 127.5f
32 | val imageStd = 128
33 | scaleImage?.getPixels(intValues ,
34 | 0,
35 | scaleImage.width ,
36 | 0,
37 | 0,
38 | scaleImage.width ,
39 | scaleImage.height)
40 | for (i in 0 until intValues.size ){
41 | val intVar = intValues[i]
42 | floatValues[i * 3 + 0] = ((intVar shr 16 and 0xFF) - imageMean) / imageStd
43 | floatValues[i * 3 + 1] = ((intVar shr 8 and 0xFF) - imageMean) / imageStd
44 | floatValues[i * 3 + 2] = ((intVar and 0xFF) - imageMean) / imageStd
45 | }
46 | }
47 |
48 | fun recognizeImage(bitmap: Bitmap): FaceFeature {
49 | //Log.d(TAG,"[*]recognizeImage");
50 | //(0)图片预处理,normailize
51 | normalizeImage(bitmap)
52 | //(1)Feed
53 | try {
54 | tensorFlowInference.feed(inputNode, floatValues, 1L, inputSize, inputSize, 3L)
55 | val phase = BooleanArray(1)
56 | phase[0] = false
57 | tensorFlowInference.feed(phaseName, phase)
58 | } catch (e: Exception) {
59 | Log.e(TAG, "[*] feed Error\n$e")
60 | }
61 |
62 | //(2)run
63 | // Log.d(TAG,"[*]Feed:"+INPUT_NAME);
64 | try {
65 | tensorFlowInference.run(outputNames, false)
66 | } catch (e: Exception) {
67 | Log.e(TAG, "[*] run error\n$e")
68 | }
69 |
70 | //(3)fetch
71 | val faceFeature = FaceFeature()
72 | val outputs = faceFeature.feature
73 | try {
74 | tensorFlowInference.fetch(outputNode, outputs)
75 | } catch (e: Exception) {
76 | Log.e(TAG, "[*] fetch error\n$e")
77 | }
78 |
79 | return faceFeature
80 | }
81 | }
--------------------------------------------------------------------------------
/facenet/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | facenet
3 |
4 |
--------------------------------------------------------------------------------
/facenet/src/test/java/org/unreal/face/facenet/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package org.unreal.face.facenet;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # Kotlin code style for this project: "official" or "obsolete":
15 | kotlin.code.style=official
16 | android.useAndroidX=true
17 | android.enableJetifier=true
18 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Mar 04 17:49:26 CST 2019
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.1-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/mtcnn/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/mtcnn/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 |
3 | apply plugin: 'kotlin-android'
4 |
5 | apply plugin: 'kotlin-android-extensions'
6 |
7 | android {
8 | compileSdkVersion 28
9 |
10 |
11 |
12 | defaultConfig {
13 | minSdkVersion 15
14 | targetSdkVersion 28
15 | versionCode 1
16 | versionName "1.0"
17 |
18 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
19 |
20 | }
21 |
22 | buildTypes {
23 | release {
24 | minifyEnabled false
25 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
26 | }
27 | }
28 |
29 | }
30 |
31 | dependencies {
32 | implementation fileTree(dir: 'libs', include: ['*.jar'])
33 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
34 | implementation 'com.android.support:appcompat-v7:28.0.0'
35 | testImplementation 'junit:junit:4.12'
36 | androidTestImplementation 'com.android.support.test:runner:1.0.2'
37 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
38 | implementation 'org.tensorflow:tensorflow-android:1.13.1'
39 | }
40 |
--------------------------------------------------------------------------------
/mtcnn/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/mtcnn/src/androidTest/java/org/unreal/face/mtcnn/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package org.unreal.face.mtcnn;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | /**
13 | * Instrumented test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("org.unreal.face.mtcnn.test", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/mtcnn/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
--------------------------------------------------------------------------------
/mtcnn/src/main/assets/mtcnn_freezed_model.pb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Lincoln-cn/AndroidFaceNet/f82725c8ffb9188fac22f754e3390058bd9b3fe7/mtcnn/src/main/assets/mtcnn_freezed_model.pb
--------------------------------------------------------------------------------
/mtcnn/src/main/java/org/unreal/face/mtcnn/Box.kt:
--------------------------------------------------------------------------------
1 | package org.unreal.face.mtcnn
2 |
3 | import android.graphics.Point
4 | import android.graphics.Rect
5 | import kotlin.math.max
6 |
7 |
8 | class Box{
9 | var box: IntArray = intArrayOf(0,0,0,0) //left:box[0],top:box[1],right:box[2],bottom:box[3]
10 | var score: Float = 0.toFloat() //probability
11 | var bbr: FloatArray = floatArrayOf(0f,0f,0f,0f) //bounding box regression
12 | var deleted: Boolean = false
13 | var landmark: Array = arrayOfNulls(5) //facial landmark.只有ONet输出Landmark
14 |
15 | fun left(): Int {
16 | return box[0]
17 | }
18 |
19 | fun right(): Int {
20 | return box[2]
21 | }
22 |
23 | fun top(): Int {
24 | return box[1]
25 | }
26 |
27 | fun bottom(): Int {
28 | return box[3]
29 | }
30 |
31 | fun width(): Int {
32 | return box[2] - box[0] + 1
33 | }
34 |
35 | fun height(): Int {
36 | return box[3] - box[1] + 1
37 | }
38 |
39 | //转为rect
40 | fun transform2Rect(): Rect {
41 | val rect = Rect()
42 | rect.left = Math.round(box[0].toFloat())
43 | rect.top = Math.round(box[1].toFloat())
44 | rect.right = Math.round(box[2].toFloat())
45 | rect.bottom = Math.round(box[3].toFloat())
46 | return rect
47 | }
48 |
49 | //面积
50 | fun area(): Int {
51 | return width() * height()
52 | }
53 |
54 | //Bounding Box Regression
55 | fun calibrate() {
56 | val w = box[2] - box[0] + 1
57 | val h = box[3] - box[1] + 1
58 | box[0] = (box[0] + w * bbr[0]).toInt()
59 | box[1] = (box[1] + h * bbr[1]).toInt()
60 | box[2] = (box[2] + w * bbr[2]).toInt()
61 | box[3] = (box[3] + h * bbr[3]).toInt()
62 | for (i in 0..3) bbr[i] = 0.0f
63 | }
64 |
65 | //当前box转为正方形
66 | fun toSquareShape() {
67 | val w = width()
68 | val h = height()
69 | if (w > h) {
70 | box[1] -= (w - h) / 2
71 | box[3] += (w - h + 1) / 2
72 | } else {
73 | box[0] -= (h - w) / 2
74 | box[2] += (h - w + 1) / 2
75 | }
76 | }
77 |
78 | //防止边界溢出,并维持square大小
79 | fun limitSquare(w: Int, h: Int) {
80 | if (box[0] < 0 || box[1] < 0) {
81 | val len = max(-box[0], -box[1])
82 | box[0] += len
83 | box[1] += len
84 | }
85 | if (box[2] >= w || box[3] >= h) {
86 | val len = max(box[2] - w + 1, box[3] - h + 1)
87 | box[2] -= len
88 | box[3] -= len
89 | }
90 | }
91 |
92 | fun limitSquare2(w: Int, h: Int) {
93 | if (width() > w) box[2] -= width() - w
94 | if (height() > h) box[3] -= height() - h
95 | if (box[0] < 0) {
96 | val sz = -box[0]
97 | box[0] += sz
98 | box[2] += sz
99 | }
100 | if (box[1] < 0) {
101 | val sz = -box[1]
102 | box[1] += sz
103 | box[3] += sz
104 | }
105 | if (box[2] >= w) {
106 | val sz = box[2] - w + 1
107 | box[2] -= sz
108 | box[0] -= sz
109 | }
110 | if (box[3] >= h) {
111 | val sz = box[3] - h + 1
112 | box[3] -= sz
113 | box[1] -= sz
114 | }
115 | }
116 | }
--------------------------------------------------------------------------------
/mtcnn/src/main/java/org/unreal/face/mtcnn/MTCNN.kt:
--------------------------------------------------------------------------------
1 | package org.unreal.face.mtcnn
2 |
3 | import org.tensorflow.contrib.android.TensorFlowInferenceInterface
4 | import android.content.res.AssetManager
5 | import android.graphics.Bitmap
6 | import android.graphics.Matrix
7 | import android.graphics.Point
8 | import android.util.Log
9 | import java.util.*
10 | import kotlin.math.max
11 | import kotlin.math.min
12 |
13 |
14 | class MTCNN(assetManager: AssetManager){
15 | private val factor = 0.709f
16 | private val pNetThreshold = 0.6f
17 | private val rNetThreshold = 0.7f
18 | private val outputNetThreshold = 0.7f
19 | //MODEL PATH
20 | private val modelFile = "mtcnn_freezed_model.pb"
21 | //tensor name
22 | private val pNetInName = "pnet/input:0"
23 | private val pNetOutName = arrayOf("pnet/prob1:0", "pnet/conv4-2/BiasAdd:0")
24 | private val rNetInName = "rnet/input:0"
25 | private val rNetOutName = arrayOf("rnet/prob1:0", "rnet/conv5-2/conv5-2:0")
26 | private val outputNetInName = "onet/input:0"
27 | private val outputNetOutName = arrayOf("onet/prob1:0", "onet/conv6-2/conv6-2:0", "onet/conv6-3/conv6-3:0")
28 |
29 | var lastProcessTime: Long = 0 //最后一张图片处理的时间ms
30 | private var inferenceInterface: TensorFlowInferenceInterface = TensorFlowInferenceInterface(assetManager, modelFile)
31 |
32 | private val TAG = "MTCNN"
33 |
34 |
35 | //读取Bitmap像素值,预处理(-127.5 /128),转化为一维数组返回
36 | private fun normalizeImage(bitmap: Bitmap): FloatArray {
37 | val w = bitmap.width
38 | val h = bitmap.height
39 | val floatValues = FloatArray(w * h * 3)
40 | val intValues = IntArray(w * h)
41 | bitmap.getPixels(intValues, 0, bitmap.width, 0, 0, bitmap.width, bitmap.height)
42 | val imageMean = 127.5f
43 | val imageStd = 128f
44 |
45 | for (i in intValues.indices) {
46 | val `val` = intValues[i]
47 | floatValues[i * 3 + 0] = ((`val` shr 16 and 0xFF) - imageMean) / imageStd
48 | floatValues[i * 3 + 1] = ((`val` shr 8 and 0xFF) - imageMean) / imageStd
49 | floatValues[i * 3 + 2] = ((`val` and 0xFF) - imageMean) / imageStd
50 | }
51 | return floatValues
52 | }
53 |
54 | /*
55 | 检测人脸,minSize是最小的人脸像素值
56 | */
57 | private fun bitmapResize(bm: Bitmap, scale: Float): Bitmap {
58 | val width = bm.width
59 | val height = bm.height
60 | // CREATE A MATRIX FOR THE MANIPULATION。matrix指定图片仿射变换参数
61 | val matrix = Matrix()
62 | // RESIZE THE BIT MAP
63 | matrix.postScale(scale, scale)
64 | return Bitmap.createBitmap(
65 | bm, 0, 0, width, height, matrix, true
66 | )
67 | }
68 |
69 | //输入前要翻转,输出也要翻转
70 | private fun proposalNetForward(
71 | bitmap: Bitmap,
72 | pNetOutProb: Array,
73 | pNetOutBias: Array>
74 | ): Int {
75 | val w = bitmap.width
76 | val h = bitmap.height
77 |
78 | val pNetIn = normalizeImage(bitmap)
79 | PicUtils.flipDiag(pNetIn, h, w, 3) //沿着对角线翻转
80 | inferenceInterface.feed(pNetInName, pNetIn, 1, w.toLong(), h.toLong(), 3)
81 | inferenceInterface.run(pNetOutName, false)
82 | val pNetOutSizeW = Math.ceil(w * 0.5 - 5).toInt()
83 | val pNetOutSizeH = Math.ceil(h * 0.5 - 5).toInt()
84 | val pNetOutP = FloatArray(pNetOutSizeW * pNetOutSizeH * 2)
85 | val pNetOutB = FloatArray(pNetOutSizeW * pNetOutSizeH * 4)
86 | inferenceInterface.fetch(pNetOutName[0], pNetOutP)
87 | inferenceInterface.fetch(pNetOutName[1], pNetOutB)
88 | //【写法一】先翻转,后转为2/3维数组
89 | PicUtils.flipDiag(pNetOutP, pNetOutSizeW, pNetOutSizeH, 2)
90 | PicUtils.flipDiag(pNetOutB, pNetOutSizeW, pNetOutSizeH, 4)
91 | PicUtils.expand(pNetOutB, pNetOutBias)
92 | PicUtils.expandProb(pNetOutP, pNetOutProb)
93 | /*
94 | *【写法二】这个比较快,快了3ms。意义不大,用上面的方法比较直观
95 | for (int y=0;y, threshold: Float, method: String) {
109 | //NMS.两两比对
110 | //int delete_cnt=0;
111 | val cnt = 0
112 | for (i in 0 until boxes.size) {
113 | val box = boxes[i]
114 | if (!box.deleted) {
115 | //score<0表示当前矩形框被删除
116 | for (j in i + 1 until boxes.size) {
117 | val box2 = boxes.get(j)
118 | if (!box2.deleted) {
119 | val x1 = max(box.box[0], box2.box[0])
120 | val y1 = max(box.box[1], box2.box[1])
121 | val x2 = min(box.box[2], box2.box[2])
122 | val y2 = min(box.box[3], box2.box[3])
123 | if (x2 < x1 || y2 < y1) continue
124 | val areaIoU = (x2 - x1 + 1) * (y2 - y1 + 1)
125 | var iou = 0f
126 | if (method == "Union")
127 | iou = 1.0f * areaIoU / (box.area() + box2.area() - areaIoU)
128 | else if (method == "Min") {
129 | iou = 1.0f * areaIoU / min(box.area(), box2.area())
130 | Log.i(TAG, "[*]iou=$iou")
131 | }
132 | if (iou >= threshold) { //删除prob小的那个框
133 | if (box.score > box2.score)
134 | box2.deleted = true
135 | else
136 | box.deleted = true
137 | //delete_cnt++;
138 | }
139 | }
140 | }
141 | }
142 | }
143 | //Log.i(TAG,"[*]sum:"+boxes.size+" delete:"+delete_cnt);
144 | }
145 |
146 | private fun generateBoxes(
147 | prob: Array,
148 | bias: Array>,
149 | scale: Float,
150 | threshold: Float,
151 | boxes: Vector
152 | ): Int {
153 | val h = prob.size
154 | val w = prob[0].size
155 | //Log.i(TAG,"[*]height:"+prob.length+" width:"+prob[0].length);
156 | for (y in 0 until h)
157 | for (x in 0 until w) {
158 | val score = prob[y][x]
159 | //only accept prob >threadshold(0.6 here)
160 | if (score > threshold) {
161 | val box = Box()
162 | //score
163 | box.score = score
164 | //box
165 | box.box[0] = Math.round(x * 2 / scale)
166 | box.box[1] = Math.round(y * 2 / scale)
167 | box.box[2] = Math.round((x * 2 + 11) / scale)
168 | box.box[3] = Math.round((y * 2 + 11) / scale)
169 | //bbr
170 | for (i in 0..3)
171 | box.bbr[i] = bias[y][x][i]
172 | //add
173 | boxes.addElement(box)
174 | }
175 | }
176 | return 0
177 | }
178 |
179 | private fun boundingBoxRegression(boxes: Vector) {
180 | for (i in 0 until boxes.size)
181 | boxes[i].calibrate()
182 | }
183 |
184 | //Pnet + Bounding Box Regression + Non-Maximum Regression
185 | /* NMS执行完后,才执行Regression
186 | * (1) For each scale , use NMS with threshold=0.5
187 | * (2) For all candidates , use NMS with threshold=0.7
188 | * (3) Calibrate Bounding Box
189 | * 注意:CNN输入图片最上面一行,坐标为[0..width,0]。所以Bitmap需要对折后再跑网络;网络输出同理.
190 | */
191 | private fun proposalNet(bitmap: Bitmap, minSize: Int): Vector {
192 | val whMin = min(bitmap.width, bitmap.height)
193 | var currentFaceSize = minSize.toFloat() //currentFaceSize=minSize/(factor^k) k=0,1,2... until excced whMin
194 | val totalBoxes = Vector()
195 | //【1】Image Paramid and Feed to Pnet
196 | while (currentFaceSize <= whMin) {
197 | val scale = 12.0f / currentFaceSize
198 | //(1)Image Resize
199 | val bm = bitmapResize(bitmap, scale)
200 | val w = bm.width
201 | val h = bm.height
202 | //(2)RUN CNN
203 | val pNetOutSizeW = (Math.ceil(w * 0.5 - 5) + 0.5).toInt()
204 | val pNetOutSizeH = (Math.ceil(h * 0.5 - 5) + 0.5).toInt()
205 | val pNetOutProb = Array(pNetOutSizeH) { FloatArray(pNetOutSizeW) }
206 | val pNetOutBias = Array(pNetOutSizeH) { Array(pNetOutSizeW) { FloatArray(4) } }
207 | proposalNetForward(bm, pNetOutProb, pNetOutBias)
208 | //(3)数据解析
209 | val curBoxes = Vector()
210 | generateBoxes(pNetOutProb, pNetOutBias, scale, pNetThreshold, curBoxes)
211 | //Log.i(TAG,"[*]CNN Output Box number:"+curBoxes.size+" Scale:"+scale);
212 | //(4)nms 0.5
213 | nms(curBoxes, 0.5f, "Union")
214 | //(5)add to totalBoxes
215 | for (i in 0 until curBoxes.size)
216 | if (!curBoxes[i].deleted)
217 | totalBoxes.addElement(curBoxes[i])
218 | //Face Size等比递增
219 | currentFaceSize /= factor
220 | }
221 | //NMS 0.7
222 | nms(totalBoxes, 0.7f, "Union")
223 | //BBR
224 | boundingBoxRegression(totalBoxes)
225 | return PicUtils.updateBoxes(totalBoxes)
226 | }
227 |
228 | //截取box中指定的矩形框(越界要处理),并resize到size*size大小,返回数据存放到data中。
229 | var tmp_bm: Bitmap? = null
230 |
231 | private fun cropAndResize(bitmap: Bitmap, box: Box, size: Int, data: FloatArray) {
232 | //(2)crop and resize
233 | val matrix = Matrix()
234 | val scale = 1.0f * size / box.width()
235 | matrix.postScale(scale, scale)
236 | val croped = Bitmap.createBitmap(bitmap, box.left(), box.top(), box.width(), box.height(), matrix, true)
237 | //(3)save
238 | val pixelsBuf = IntArray(size * size)
239 | croped.getPixels(pixelsBuf, 0, croped.width, 0, 0, croped.width, croped.height)
240 | val imageMean = 127.5f
241 | val imageStd = 128f
242 | for (i in pixelsBuf.indices) {
243 | val `val` = pixelsBuf[i]
244 | data[i * 3 + 0] = ((`val` shr 16 and 0xFF) - imageMean) / imageStd
245 | data[i * 3 + 1] = ((`val` shr 8 and 0xFF) - imageMean) / imageStd
246 | data[i * 3 + 2] = ((`val` and 0xFF) - imageMean) / imageStd
247 | }
248 | }
249 |
250 | /*
251 | * RNET跑神经网络,将score和bias写入boxes
252 | */
253 | private fun refineNetForward(RNetIn: FloatArray, boxes: Vector) {
254 | val num = RNetIn.size / 24 / 24 / 3
255 | //feed & run
256 | inferenceInterface.feed(rNetInName, RNetIn, num.toLong(), 24, 24, 3)
257 | inferenceInterface.run(rNetOutName, false)
258 | //fetch
259 | val rNetP = FloatArray(num * 2)
260 | val rNetB = FloatArray(num * 4)
261 | inferenceInterface.fetch(rNetOutName[0], rNetP)
262 | inferenceInterface.fetch(rNetOutName[1], rNetB)
263 | //转换
264 | for (i in 0 until num) {
265 | boxes[i].score = rNetP[i * 2 + 1]
266 | for (j in 0..3)
267 | boxes[i].bbr[j] = rNetB[i * 4 + j]
268 | }
269 | }
270 |
271 | //Refine Net
272 | private fun refineNet(bitmap: Bitmap, boxes: Vector): Vector {
273 | //refineNet Input Init
274 | val num = boxes.size
275 | val rNetIn = FloatArray(num * 24 * 24 * 3)
276 | val curCrop = FloatArray(24 * 24 * 3)
277 | var rNetInIdx = 0
278 | for (i in 0 until num) {
279 | cropAndResize(bitmap, boxes.get(i), 24, curCrop)
280 | PicUtils.flipDiag(curCrop, 24, 24, 3)
281 | //Log.i(TAG,"[*]Pixels values:"+curCrop[0]+" "+curCrop[1]);
282 | for (j in curCrop.indices) rNetIn[rNetInIdx++] = curCrop[j]
283 | }
284 | //Run refineNet
285 | refineNetForward(rNetIn, boxes)
286 | //RNetThreshold
287 | for (i in 0 until num)
288 | if (boxes[i].score < rNetThreshold)
289 | boxes[i].deleted = true
290 | //Nms
291 | nms(boxes, 0.7f, "Union")
292 | boundingBoxRegression(boxes)
293 | return PicUtils.updateBoxes(boxes)
294 | }
295 |
296 | /*
297 | * outputNet跑神经网络,将score和bias写入boxes
298 | */
299 | private fun outputNetForward(outputNetIn: FloatArray, boxes: Vector) {
300 | val num = outputNetIn.size / 48 / 48 / 3
301 | //feed & run
302 | inferenceInterface.feed(outputNetInName, outputNetIn, num.toLong(), 48, 48, 3)
303 | inferenceInterface.run(outputNetOutName, false)
304 | //fetch
305 | val outputNetP = FloatArray(num * 2) //prob
306 | val outputNetB = FloatArray(num * 4) //bias
307 | val outputNetL = FloatArray(num * 10) //landmark
308 | inferenceInterface.fetch(outputNetOutName[0], outputNetP)
309 | inferenceInterface.fetch(outputNetOutName[1], outputNetB)
310 | inferenceInterface.fetch(outputNetOutName[2], outputNetL)
311 | //转换
312 | for (i in 0 until num) {
313 | //prob
314 | boxes[i].score = outputNetP[i * 2 + 1]
315 | //bias
316 | for (j in 0..3)
317 | boxes[i].bbr[j] = outputNetB[i * 4 + j]
318 |
319 | //landmark
320 | for (j in 0..4) {
321 | val x = boxes[i].left() + (outputNetL[i * 10 + j] * boxes[i].width()).toInt()
322 | val y = boxes[i].top() + (outputNetL[i * 10 + j + 5] * boxes[i].height()).toInt()
323 | boxes[i].landmark[j] = Point(x, y)
324 | }
325 | }
326 | }
327 |
328 | //outputNet
329 | private fun outputNet(bitmap: Bitmap, boxes: Vector): Vector {
330 | //outputNet Input Init
331 | val num = boxes.size
332 | val outputNetIn = FloatArray(num * 48 * 48 * 3)
333 | val curCrop = FloatArray(48 * 48 * 3)
334 | var outputNetInIdx = 0
335 | for (i in 0 until num) {
336 | cropAndResize(bitmap, boxes[i], 48, curCrop)
337 | PicUtils.flipDiag(curCrop, 48, 48, 3)
338 | for (j in curCrop.indices) outputNetIn[outputNetInIdx++] = curCrop[j]
339 | }
340 | //Run outputNet
341 | outputNetForward(outputNetIn, boxes)
342 | //outputNetThreshold
343 | for (i in 0 until num)
344 | if (boxes[i].score < outputNetThreshold)
345 | boxes[i].deleted = true
346 | boundingBoxRegression(boxes)
347 | //Nms
348 | nms(boxes, 0.7f, "Min")
349 | return PicUtils.updateBoxes(boxes)
350 | }
351 |
352 | private fun squareLimit(boxes: Vector, w: Int, h: Int) {
353 | //square
354 | for (i in 0 until boxes.size) {
355 | boxes[i].toSquareShape()
356 | boxes[i].limitSquare(w, h)
357 | }
358 | }
359 |
360 | /*
361 | * 参数:
362 | * bitmap:要处理的图片
363 | * minFaceSize:最小的人脸像素值.(此值越大,检测越快)
364 | * 返回:
365 | * 人脸框
366 | */
367 | fun detectFaces(bitmap: Bitmap, minFaceSize: Int): Vector {
368 | val tStart = System.currentTimeMillis()
369 | //【1】proposalNet generate candidate boxes
370 | var boxes = proposalNet(bitmap, minFaceSize)
371 | squareLimit(boxes, bitmap.width, bitmap.height)
372 | //【2】refineNet
373 | boxes = refineNet(bitmap, boxes)
374 | squareLimit(boxes, bitmap.width, bitmap.height)
375 | //【3】outputNet
376 | boxes = outputNet(bitmap, boxes)
377 | //return
378 | lastProcessTime = System.currentTimeMillis() - tStart
379 | Log.i(TAG, "[*]Mtcnn Detection Time:$lastProcessTime")
380 | return boxes
381 | }
382 |
383 | fun cutFace(bitmap: Bitmap? , boxes: Vector): List {
384 | if(bitmap == null){
385 | throw IllegalArgumentException("no images!")
386 | }
387 | val findFaceBitmap = PicUtils.copyBitmap(bitmap)
388 | val faces = mutableListOf()
389 | boxes.forEach{
390 | PicUtils.drawRect(findFaceBitmap, it.transform2Rect())
391 | PicUtils.drawPoints(findFaceBitmap, it.landmark)
392 | PicUtils.rectExtend(findFaceBitmap , it.transform2Rect() , 20)
393 | faces.add(Bitmap.createScaledBitmap(PicUtils.crop(findFaceBitmap , boxes[0].transform2Rect()),160,160,true))
394 | }
395 | return faces
396 | }
397 | }
398 |
399 |
--------------------------------------------------------------------------------
/mtcnn/src/main/java/org/unreal/face/mtcnn/PicUtils.kt:
--------------------------------------------------------------------------------
1 | package org.unreal.face.mtcnn
2 |
3 | import android.content.res.AssetManager
4 | import android.graphics.*
5 | import android.util.Log
6 | import java.util.*
7 | import android.graphics.BitmapFactory
8 | import kotlin.math.max
9 | import kotlin.math.min
10 |
11 |
12 | object PicUtils {
13 | //复制图片,并设置isMutable=true
14 | fun copyBitmap(bitmap: Bitmap): Bitmap {
15 | return bitmap.copy(bitmap.config, true)
16 | }
17 |
18 | //在bitmap中画矩形
19 | fun drawRect(bitmap: Bitmap, rect: Rect) {
20 | try {
21 | val canvas = Canvas(bitmap)
22 | val paint = Paint()
23 | val r = 255//(int)(Math.random()*255);
24 | val g = 0//(int)(Math.random()*255);
25 | val b = 0//(int)(Math.random()*255);
26 | paint.color = Color.rgb(r, g, b)
27 | paint.strokeWidth = (1 + bitmap.width / 500).toFloat()
28 | paint.style = Paint.Style.STROKE
29 | canvas.drawRect(rect, paint)
30 | } catch (e: Exception) {
31 | Log.i("Utils", "[*] error$e")
32 | }
33 |
34 | }
35 |
36 | //在图中画点
37 | fun drawPoints(bitmap: Bitmap, landmark: Array) {
38 | for (i in landmark.indices) {
39 | val x = landmark[i]?.x?:0
40 | val y = landmark[i]?.y?:0
41 | //Log.i("Utils","[*] landmarkd "+x+ " "+y);
42 | drawRect(bitmap, Rect(x - 1, y - 1, x + 1, y + 1))
43 | }
44 | }
45 |
46 | //Flip alone diagonal
47 | //对角线翻转。data大小原先为h*w*stride,翻转后变成w*h*stride
48 | fun flipDiag(data: FloatArray, h: Int, w: Int, stride: Int) {
49 | val tmp = FloatArray(w * h * stride)
50 | for (i in 0 until w * h * stride) tmp[i] = data[i]
51 | for (y in 0 until h)
52 | for (x in 0 until w) {
53 | for (z in 0 until stride)
54 | data[(x * h + y) * stride + z] = tmp[(y * w + x) * stride + z]
55 | }
56 | }
57 |
58 | //src转为二维存放到dst中
59 | fun expand(src: FloatArray, dst: Array) {
60 | var idx = 0
61 | for (y in dst.indices)
62 | for (x in 0 until dst[0].size)
63 | dst[y][x] = src[idx++]
64 | }
65 |
66 | //src转为三维存放到dst中
67 | fun expand(src: FloatArray, dst: Array>) {
68 | var idx = 0
69 | for (y in dst.indices)
70 | for (x in 0 until dst[0].size)
71 | for (c in 0 until dst[0][0].size)
72 | dst[y][x][c] = src[idx++]
73 |
74 | }
75 |
76 | //dst=src[:,:,1]
77 | fun expandProb(src: FloatArray, dst: Array) {
78 | var idx = 0
79 | for (y in dst.indices)
80 | for (x in 0 until dst[0].size)
81 | dst[y][x] = src[idx++ * 2 + 1]
82 | }
83 |
84 | //box转化为rect
85 | fun boxes2rects(boxes: Vector): Array {
86 | var cnt = 0
87 | for (i in 0 until boxes.size) if (!boxes.get(i).deleted) cnt++
88 | val r = arrayOfNulls(cnt)
89 | var idx = 0
90 | for (i in 0 until boxes.size)
91 | if (!boxes.get(i).deleted)
92 | r[idx++] = boxes.get(i).transform2Rect()
93 | return r
94 | }
95 |
96 | //删除做了delete标记的box
97 | fun updateBoxes(boxes: Vector): Vector {
98 | val b = Vector()
99 | for (i in 0 until boxes.size)
100 | if (!boxes[i].deleted)
101 | b.addElement(boxes[i])
102 | return b
103 | }
104 |
105 | //
106 | fun showPixel(v: Int) {
107 | Log.i("MainActivity", "[*]Pixel:R" + (v shr 16 and 0xff) + "G:" + (v shr 8 and 0xff) + " B:" + (v and 0xff))
108 | }
109 |
110 | fun getBitmapFromAssets(assets: AssetManager?, fileName: String): Bitmap {
111 | val inputStream = assets?.open(fileName)
112 | val bitmap = BitmapFactory.decodeStream(inputStream)
113 | inputStream?.close()
114 | return bitmap
115 | }
116 |
117 |
118 | //按照rect的大小裁剪出人脸
119 | fun crop(bitmap: Bitmap, rect: Rect): Bitmap {
120 | return Bitmap.createBitmap(bitmap, rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top)
121 | }
122 |
123 | //rect上下左右各扩展pixels个像素
124 | fun rectExtend(bitmap: Bitmap, rect: Rect, pixels: Int) {
125 | rect.left = max(0, rect.left - pixels)
126 | rect.right = min(bitmap.width - 1, rect.right + pixels)
127 | rect.top = max(0, rect.top - pixels)
128 | rect.bottom = min(bitmap.height - 1, rect.bottom + pixels)
129 | }
130 |
131 | }
--------------------------------------------------------------------------------
/mtcnn/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | MTCNN
3 |
4 |
--------------------------------------------------------------------------------
/mtcnn/src/test/java/org/unreal/face/mtcnn/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package org.unreal.face.mtcnn;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app', ':mtcnn', ':facenet'
2 |
--------------------------------------------------------------------------------