├── .github
├── FUNDING.yml
└── workflows
│ └── release.yml
├── .gitignore
├── CHANGELOG.md
├── LICENSE
├── README.md
├── app
├── .gitignore
├── build.gradle.kts
├── objectbox-models
│ ├── default.json
│ └── default.json.bak
├── proguard-rules.pro
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── assets
│ ├── blaze_face_short_range.tflite
│ ├── facenet.tflite
│ ├── facenet_512.tflite
│ ├── spoof_model_scale_2_7.tflite
│ └── spoof_model_scale_4_0.tflite
│ ├── java
│ └── com
│ │ └── ml
│ │ └── shubham0204
│ │ └── facenet_android
│ │ ├── MainActivity.kt
│ │ ├── MainApplication.kt
│ │ ├── data
│ │ ├── DataModels.kt
│ │ ├── ImagesVectorDB.kt
│ │ ├── ObjectBoxStore.kt
│ │ └── PersonDB.kt
│ │ ├── di
│ │ └── AppModule.kt
│ │ ├── domain
│ │ ├── ErrorHandling.kt
│ │ ├── ImageVectorUseCase.kt
│ │ ├── PersonUseCase.kt
│ │ ├── embeddings
│ │ │ └── FaceNet.kt
│ │ └── face_detection
│ │ │ ├── FaceSpoofDetector.kt
│ │ │ └── MediapipeFaceDetector.kt
│ │ └── presentation
│ │ ├── components
│ │ ├── AppAlertDialog.kt
│ │ ├── AppProgressDialog.kt
│ │ ├── DelayedVisibility.kt
│ │ └── FaceDetectionOverlay.kt
│ │ ├── screens
│ │ ├── add_face
│ │ │ ├── AddFaceScreen.kt
│ │ │ └── AddFaceScreenViewModel.kt
│ │ ├── detect_screen
│ │ │ ├── DetectScreen.kt
│ │ │ └── DetectScreenViewModel.kt
│ │ └── face_list
│ │ │ ├── FaceListScreen.kt
│ │ │ └── FaceListScreenViewModel.kt
│ │ └── theme
│ │ ├── Color.kt
│ │ ├── Theme.kt
│ │ └── Type.kt
│ └── res
│ ├── drawable
│ ├── ic_launcher_background.xml
│ └── ic_launcher_foreground.xml
│ ├── mipmap-anydpi
│ ├── ic_launcher.xml
│ └── ic_launcher_round.xml
│ ├── mipmap-hdpi
│ ├── ic_launcher.webp
│ └── ic_launcher_round.webp
│ ├── mipmap-mdpi
│ ├── ic_launcher.webp
│ └── ic_launcher_round.webp
│ ├── mipmap-xhdpi
│ ├── ic_launcher.webp
│ └── ic_launcher_round.webp
│ ├── mipmap-xxhdpi
│ ├── ic_launcher.webp
│ └── ic_launcher_round.webp
│ ├── mipmap-xxxhdpi
│ ├── ic_launcher.webp
│ └── ic_launcher_round.webp
│ ├── values-v23
│ └── font_certs.xml
│ ├── values
│ ├── colors.xml
│ ├── strings.xml
│ └── themes.xml
│ └── xml
│ ├── backup_rules.xml
│ └── data_extraction_rules.xml
├── build.gradle.kts
├── gradle.properties
├── gradle
├── libs.versions.toml
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── resources
├── Liveness_PT_Model_to_TF.ipynb
├── banner_1.png
└── banner_2.png
└── settings.gradle.kts
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: shubham0204
2 | custom: ['https://www.paypal.me/ShubhamPanchal0204/']
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Build and Release Android APK
2 |
3 | on:
4 | push:
5 | tags:
6 | - 'v*'
7 |
8 | jobs:
9 | build_apk:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v3
13 | with:
14 | submodules: 'true'
15 |
16 | - name: Set up JDK 17
17 | uses: actions/setup-java@v3
18 | with:
19 | java-version: '17'
20 | distribution: 'temurin'
21 | cache: gradle
22 |
23 | - name: Decode keystore
24 | env:
25 | ENCODED_STRING: ${{ secrets.KEYSTORE_BASE_64 }}
26 | RELEASE_KEYSTORE_PASSWORD: ${{ secrets.RELEASE_KEYSTORE_PASSWORD }}
27 | RELEASE_KEYSTORE_ALIAS: ${{ secrets.RELEASE_KEYSTORE_ALIAS }}
28 | RELEASE_KEY_PASSWORD: ${{ secrets.RELEASE_KEY_PASSWORD }}
29 | run: |
30 | echo $ENCODED_STRING > keystore-b64.txt
31 | base64 -d keystore-b64.txt > keystore.jks
32 |
33 | - name: Grant execute permission for gradlew
34 | run: chmod +x gradlew
35 |
36 | - name: Build with Gradle
37 | env:
38 | RELEASE_KEYSTORE_PASSWORD: ${{ secrets.RELEASE_KEYSTORE_PASSWORD }}
39 | RELEASE_KEYSTORE_ALIAS: ${{ secrets.RELEASE_KEYSTORE_ALIAS }}
40 | RELEASE_KEY_PASSWORD: ${{ secrets.RELEASE_KEY_PASSWORD }}
41 | run: ./gradlew build
42 |
43 | - name: Build APK
44 | env:
45 | RELEASE_KEYSTORE_PASSWORD: ${{ secrets.RELEASE_KEYSTORE_PASSWORD }}
46 | RELEASE_KEYSTORE_ALIAS: ${{ secrets.RELEASE_KEYSTORE_ALIAS }}
47 | RELEASE_KEY_PASSWORD: ${{ secrets.RELEASE_KEY_PASSWORD }}
48 | run: ./gradlew assembleRelease --stacktrace
49 |
50 | - name: Create a release
51 | uses: actions/create-release@v1
52 | id: create_release
53 | with:
54 | tag_name: ${{ github.ref }}
55 | release_name: ${{ github.ref }}
56 | draft: false
57 | prerelease: false
58 | body_path: CHANGELOG.md
59 | env:
60 | GITHUB_TOKEN: ${{ github.token }}
61 |
62 | - name: Upload APK to release
63 | uses: actions/upload-release-asset@v1
64 | env:
65 | GITHUB_TOKEN: ${{ github.token }}
66 | with:
67 | upload_url: ${{ steps.create_release.outputs.upload_url }}
68 | asset_path: app/build/outputs/apk/release/app-release.apk
69 | asset_name: FaceNet-Android_${{ github.ref_name }}.apk
70 | asset_content_type: application/vnd.android.package-archive
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea
5 | /.kotlin
6 | /app/release
7 | .DS_Store
8 | /build
9 | /captures
10 | .externalNativeBuild
11 | .cxx
12 | local.properties
13 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | - Initial release of `FaceNet-Android`
2 | - The app allows the users to add new faces to the database and recognize them in real-time.
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2024 Shubham Panchal
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # On-Device Face Recognition In Android
2 |
3 | > A simple Android app that performs on-device face recognition by comparing FaceNet embeddings against a vector database of user-given faces
4 |
5 |
6 |
7 |
8 |
9 | > Download the APK from the [Releases](https://github.com/shubham0204/OnDevice-Face-Recognition-Android/releases)
10 |
11 | ## Updates
12 |
13 | * 2024-09: Add face-spoof detection which uses FASNet from [minivision-ai/Silent-Face-Anti-Spoofing](https://github.com/minivision-ai/Silent-Face-Anti-Spoofing)
14 | * 2024-07: Add latency metrics on the main screen. It shows the time taken (in milliseconds) to perform face detection, face embedding and vector search.
15 |
16 | ## Goals
17 |
18 | * Produce on-device face embeddings with FaceNet and use them to perform face recognition on a user-given set of images
19 | * Store face-embedding and other metadata on-device and use vector-search to determine nearest-neighbors
20 | * Use modern Android development practices and recommended architecture guidelines while maintaining code simplicity and modularity
21 |
22 | ## Setup
23 |
24 | > Download the APK from the [Releases](https://github.com/shubham0204/OnDevice-Face-Recognition-Android/releases)
25 |
26 | Clone the `main` branch,
27 |
28 | ```bash
29 | $> git clone --depth=1 https://github.com/shubham0204/OnDevice-Face-Recognition-Android
30 | ```
31 |
32 | Perform a Gradle sync, and run the application.
33 |
34 | ### Choosing the FaceNet model
35 |
36 | The app provides two FaceNet models differing in the size of the embedding they provide. `facenet.tflite` outputs a 128-dimensional embedding and `facenet_512.tflite` a 512-dimensional embedding. In [FaceNet.kt](https://github.com/shubham0204/OnDevice-Face-Recognition-Android/blob/main/app/src/main/java/com/ml/shubham0204/facenet_android/domain/embeddings/FaceNet.kt), you may change the model by modifying the path of the TFLite model,
37 |
38 | ```kotlin
39 | // facenet
40 | interpreter =
41 | Interpreter(FileUtil.loadMappedFile(context, "facenet.tflite"), interpreterOptions)
42 |
43 | // facenet-512
44 | interpreter =
45 | Interpreter(FileUtil.loadMappedFile(context, "facenet_512.tflite"), interpreterOptions)
46 | ```
47 |
48 | For change `embeddingDims` in the same file,
49 |
50 | ```kotlin
51 | // facenet
52 | private val embeddingDim = 128
53 |
54 | // facenet-512
55 | private val embeddingDim = 512
56 | ```
57 |
58 | Then, in [DataModels.kt](https://github.com/shubham0204/OnDevice-Face-Recognition-Android/blob/main/app/src/main/java/com/ml/shubham0204/facenet_android/data/DataModels.kt), change the dimensions of the `faceEmbedding` attribute,
59 |
60 | ```kotlin
61 | @Entity
62 | data class FaceImageRecord(
63 | // primary-key of `FaceImageRecord`
64 | @Id var recordID: Long = 0,
65 |
66 | // personId is derived from `PersonRecord`
67 | @Index var personID: Long = 0,
68 |
69 | var personName: String = "",
70 |
71 | // the FaceNet-512 model provides a 512-dimensional embedding
72 | // the FaceNet model provides a 128-dimensional embedding
73 | @HnswIndex(dimensions = 512)
74 | var faceEmbedding: FloatArray = floatArrayOf()
75 | )
76 | ```
77 |
78 | ## Working
79 |
80 | 
81 |
82 |
83 | We use the [FaceNet](https://arxiv.org/abs/1503.03832) model, which given a 160 * 160 cropped face image, produces an embedding of 128 or 512 elements capturing facial features that uniquely identify the face. We represent the embedding model as a function $M$ that accepts a cropped face image and returns a vector/embedding/list of FP numbers.
84 |
85 | 1. When users select an image, the app uses MLKit's `FaceDetector` to crop faces from the image. Each image is labelled with the person's name. See [`MLKitFaceDetector.kt`](https://github.com/shubham0204/OnDevice-Face-Recognition-Android/blob/main/app/src/main/java/com/ml/shubham0204/facenet_android/domain/face_detection/MLKitFaceDetector.kt).
86 | 2. Each cropped face is transformed into a vector/embedding with FaceNet. See [`FaceNet.kt`](https://github.com/shubham0204/OnDevice-Face-Recognition-Android/blob/main/app/src/main/java/com/ml/shubham0204/facenet_android/domain/embeddings/FaceNet.kt).
87 | 3. We store these face embeddings in a vector database, that enables a faster nearest-neighbor search.
88 | 4. Now, in the camera preview, for each frame, we perform face detection with MLKit's `FaceDetector` as in (1) and produce face embeddings for the face as in (2). We compare this face embedding (query vector) with those present in the vector database, and determines the name/label of the embedding (nearest-neighbor) closest to the query vector using cosine similarity.
89 | 5. The vector database performs a lossy compression on the embeddings stored in it, and hence the distance returned with the nearest-neighbor is also an estimate. Hence, we re-compute the cosine similarity between the nearest-neighbor vector and the query vector. See [`ImageVectorUseCase.kt`](https://github.com/shubham0204/OnDevice-Face-Recognition-Android/blob/main/app/src/main/java/com/ml/shubham0204/facenet_android/domain/ImageVectorUseCase.kt)
90 |
91 | ## Tools
92 |
93 | 1. [TensorFlow Lite](https://ai.google.dev/edge/lite) as a runtime to execute the FaceNet model
94 | 2. [Mediapipe Face Detection](https://ai.google.dev/edge/mediapipe/solutions/vision/face_detector/android) to crop faces from the image
95 | 3. [ObjectBox](https://objectbox.io) for on-device vector-store and NoSQL database
96 |
97 | ## Discussion
98 |
99 | ### Implementing face-liveness detection
100 |
101 | > See [issue #1](https://github.com/shubham0204/OnDevice-Face-Recognition-Android/issues/1)
102 |
103 | Face-liveness detection is the process of determining if the face captured in the camera frame is real or a spoof (photo, 3D model etc.). There are many techniques to perform face-liveness detection, the simplest ones being smile or wink detection. These are effective against static spoofs (pictures or 3D models) but do not hold for videos.
104 |
105 | While exploring the [deepface](https://github.com/serengil/deepface) library, I discovered that it had implemented an *anti-spoof* detection system using the PyTorch models from [Silent-Face-Anti-Spoofing](https://github.com/minivision-ai/Silent-Face-Anti-Spoofing) repository. It uses the combination of two models that operate on two different scales of the same image. The model is penalized for classification-loss (cross-entropy loss) and the difference between the Fourier transform and the intermediate features from the CNN.
106 |
107 | The models used by the `deepface` library (same as in the `Silent-Face-Anti-Spoofing`) are in the PyTorch format. The project already uses the TFLite runtime for executing the FaceNet model, and adding any other DL runtime would lead to unnecessary bloating of the application.
108 |
109 | I converted the PT models to TFLite using this notebook: https://github.com/shubham0204/OnDevice-Face-Recognition-Android/blob/main/resources/Liveness_PT_Model_to_TF.ipynb
110 |
111 | ### How does this project differ from my earlier [`FaceRecognition_With_FaceNet_Android`](https://github.com/shubham0204/FaceRecognition_With_FaceNet_Android) project?
112 |
113 | The [FaceRecognition_With_FaceNet_Android](https://github.com/shubham0204/FaceRecognition_With_FaceNet_Android) is a similar project initiated in 2020 and re-iterated several times since then. Here are the key similarities and differences with this project:
114 |
115 | #### Similarities
116 |
117 | 1. Use FaceNet and FaceNet-512 models executed with TensorFlow Lite
118 | 2. Perform on-device face-recognition on a user-given dataset of images
119 |
120 | #### Differences
121 |
122 | 1. Uses ObjectBox to store face embeddings and perform nearest-neighbor search.
123 | 2. Does not read a directory from the file-system, instead allows the user to select a group of photos and *label* them with name of a person
124 | 3. Considers only the nearest-neighbor to infer the identify of a person in the live camera-feed
125 | 4. Uses the Mediapipe Face Detector instead of MLKit
126 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
--------------------------------------------------------------------------------
/app/build.gradle.kts:
--------------------------------------------------------------------------------
1 | plugins {
2 | alias(libs.plugins.android.application)
3 | alias(libs.plugins.jetbrains.kotlin.android)
4 | alias(libs.plugins.compose.compiler)
5 | id("com.google.devtools.ksp")
6 | }
7 |
8 | android {
9 | namespace = "com.ml.shubham0204.facenet_android"
10 | compileSdk = 34
11 |
12 | defaultConfig {
13 | applicationId = "com.ml.shubham0204.facenet_android"
14 | minSdk = 26
15 | targetSdk = 34
16 | versionCode = 1
17 | versionName = "0.0.1"
18 |
19 | testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
20 | vectorDrawables {
21 | useSupportLibrary = true
22 | }
23 | }
24 |
25 | signingConfigs {
26 | create("release") {
27 | storeFile = file("../keystore.jks")
28 | storePassword = System.getenv("RELEASE_KEYSTORE_PASSWORD")
29 | keyAlias = System.getenv("RELEASE_KEYSTORE_ALIAS")
30 | keyPassword = System.getenv("RELEASE_KEY_PASSWORD")
31 | }
32 | }
33 |
34 | buildTypes {
35 | release {
36 | isMinifyEnabled = false
37 | proguardFiles(
38 | getDefaultProguardFile("proguard-android-optimize.txt"),
39 | "proguard-rules.pro"
40 | )
41 | signingConfig = signingConfigs.getByName("release")
42 | }
43 | }
44 | compileOptions {
45 | sourceCompatibility = JavaVersion.VERSION_1_8
46 | targetCompatibility = JavaVersion.VERSION_1_8
47 | }
48 | kotlinOptions {
49 | jvmTarget = "1.8"
50 | }
51 | buildFeatures {
52 | compose = true
53 | }
54 | packaging {
55 | resources {
56 | excludes += "/META-INF/{AL2.0,LGPL2.1}"
57 | }
58 | }
59 | applicationVariants.configureEach {
60 | kotlin.sourceSets {
61 | getByName(name) {
62 | kotlin.srcDir("build/generated/ksp/$name/kotlin")
63 | }
64 | }
65 | }
66 | }
67 |
68 | ksp {
69 | arg("KOIN_CONFIG_CHECK","true")
70 | }
71 |
72 | dependencies {
73 | implementation(libs.androidx.core.ktx)
74 | implementation(libs.androidx.lifecycle.runtime.ktx)
75 | implementation(libs.androidx.activity.compose)
76 | implementation(platform(libs.androidx.compose.bom))
77 | implementation(libs.androidx.ui)
78 | implementation(libs.androidx.ui.graphics)
79 | implementation(libs.androidx.ui.tooling.preview)
80 | implementation(libs.androidx.material3)
81 | implementation(libs.compose.material3.icons.extended)
82 | implementation(libs.androidx.compose.navigation)
83 | implementation(libs.androidx.ui.text.google.fonts)
84 |
85 | // ObjectBox - vector database
86 | debugImplementation("io.objectbox:objectbox-android-objectbrowser:4.0.0")
87 | releaseImplementation("io.objectbox:objectbox-android:4.0.0")
88 |
89 | // dependency injection
90 | implementation(libs.koin.android)
91 | implementation(libs.koin.annotations)
92 | implementation(libs.koin.androidx.compose)
93 | ksp(libs.koin.ksp.compiler)
94 |
95 | // TensorFlow Lite dependencies
96 | implementation(libs.tensorflow.lite)
97 | implementation(libs.tensorflow.lite.gpu)
98 | implementation(libs.tensorflow.lite.gpu.api)
99 | implementation(libs.tensorflow.lite.support)
100 |
101 | // DocumentFile and ExitInterface
102 | implementation(libs.androidx.documentfile)
103 | implementation(libs.androidx.exifinterface)
104 |
105 | // Kotlin Coil
106 | implementation(libs.coil)
107 | implementation(libs.coil.compose)
108 |
109 | // CameraX
110 | implementation(libs.androidx.camera.camera2)
111 | implementation(libs.androidx.camera.lifecycle)
112 | implementation(libs.androidx.camera.view)
113 |
114 | // Mediapipe Face Detection
115 | implementation(libs.tasks.vision)
116 |
117 | debugImplementation(libs.androidx.ui.tooling)
118 | debugImplementation(libs.androidx.ui.test.manifest)
119 | }
120 |
121 | apply(plugin = "io.objectbox")
--------------------------------------------------------------------------------
/app/objectbox-models/default.json:
--------------------------------------------------------------------------------
1 | {
2 | "_note1": "KEEP THIS FILE! Check it into a version control system (VCS) like git.",
3 | "_note2": "ObjectBox manages crucial IDs for your object model. See docs for details.",
4 | "_note3": "If you have VCS merge conflicts, you must resolve them according to ObjectBox docs.",
5 | "entities": [
6 | {
7 | "id": "1:7886976431312498614",
8 | "lastPropertyId": "4:1044002615928688033",
9 | "name": "FaceImageRecord",
10 | "properties": [
11 | {
12 | "id": "1:6533718932659133037",
13 | "name": "recordID",
14 | "type": 6,
15 | "flags": 1
16 | },
17 | {
18 | "id": "2:1879665807004012985",
19 | "name": "personID",
20 | "indexId": "1:4574755327814671272",
21 | "type": 6,
22 | "flags": 8
23 | },
24 | {
25 | "id": "3:3883179078775366301",
26 | "name": "personName",
27 | "type": 9
28 | },
29 | {
30 | "id": "4:1044002615928688033",
31 | "name": "faceEmbedding",
32 | "indexId": "2:1210613089703466990",
33 | "type": 28,
34 | "flags": 8
35 | }
36 | ],
37 | "relations": []
38 | },
39 | {
40 | "id": "2:5724033975412775664",
41 | "lastPropertyId": "5:6214334937147888472",
42 | "name": "PersonRecord",
43 | "properties": [
44 | {
45 | "id": "1:7617310022622233298",
46 | "name": "personID",
47 | "type": 6,
48 | "flags": 1
49 | },
50 | {
51 | "id": "2:4766740777198822324",
52 | "name": "personName",
53 | "type": 9
54 | },
55 | {
56 | "id": "3:7309860671533523406",
57 | "name": "numImages",
58 | "type": 6
59 | },
60 | {
61 | "id": "5:6214334937147888472",
62 | "name": "addTime",
63 | "type": 6
64 | }
65 | ],
66 | "relations": []
67 | }
68 | ],
69 | "lastEntityId": "2:5724033975412775664",
70 | "lastIndexId": "2:1210613089703466990",
71 | "lastRelationId": "0:0",
72 | "lastSequenceId": "0:0",
73 | "modelVersion": 5,
74 | "modelVersionParserMinimum": 5,
75 | "retiredEntityUids": [],
76 | "retiredIndexUids": [],
77 | "retiredPropertyUids": [
78 | 1955098665124372966
79 | ],
80 | "retiredRelationUids": [],
81 | "version": 1
82 | }
--------------------------------------------------------------------------------
/app/objectbox-models/default.json.bak:
--------------------------------------------------------------------------------
1 | {
2 | "_note1": "KEEP THIS FILE! Check it into a version control system (VCS) like git.",
3 | "_note2": "ObjectBox manages crucial IDs for your object model. See docs for details.",
4 | "_note3": "If you have VCS merge conflicts, you must resolve them according to ObjectBox docs.",
5 | "entities": [
6 | {
7 | "id": "1:7886976431312498614",
8 | "lastPropertyId": "4:1044002615928688033",
9 | "name": "FaceImageRecord",
10 | "properties": [
11 | {
12 | "id": "1:6533718932659133037",
13 | "name": "recordID",
14 | "type": 6,
15 | "flags": 1
16 | },
17 | {
18 | "id": "2:1879665807004012985",
19 | "name": "personID",
20 | "indexId": "1:4574755327814671272",
21 | "type": 6,
22 | "flags": 8
23 | },
24 | {
25 | "id": "3:3883179078775366301",
26 | "name": "personName",
27 | "type": 9
28 | },
29 | {
30 | "id": "4:1044002615928688033",
31 | "name": "faceEmbedding",
32 | "indexId": "2:1210613089703466990",
33 | "type": 28,
34 | "flags": 8
35 | }
36 | ],
37 | "relations": []
38 | },
39 | {
40 | "id": "2:5724033975412775664",
41 | "lastPropertyId": "4:1955098665124372966",
42 | "name": "PersonRecord",
43 | "properties": [
44 | {
45 | "id": "1:7617310022622233298",
46 | "name": "personID",
47 | "type": 6,
48 | "flags": 1
49 | },
50 | {
51 | "id": "2:4766740777198822324",
52 | "name": "personName",
53 | "type": 9
54 | },
55 | {
56 | "id": "3:7309860671533523406",
57 | "name": "numImages",
58 | "type": 6
59 | },
60 | {
61 | "id": "4:1955098665124372966",
62 | "name": "personImage",
63 | "type": 23
64 | }
65 | ],
66 | "relations": []
67 | }
68 | ],
69 | "lastEntityId": "2:5724033975412775664",
70 | "lastIndexId": "2:1210613089703466990",
71 | "lastRelationId": "0:0",
72 | "lastSequenceId": "0:0",
73 | "modelVersion": 5,
74 | "modelVersionParserMinimum": 5,
75 | "retiredEntityUids": [],
76 | "retiredIndexUids": [],
77 | "retiredPropertyUids": [],
78 | "retiredRelationUids": [],
79 | "version": 1
80 | }
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
8 |
9 |
10 |
11 |
22 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/main/assets/blaze_face_short_range.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/assets/blaze_face_short_range.tflite
--------------------------------------------------------------------------------
/app/src/main/assets/facenet.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/assets/facenet.tflite
--------------------------------------------------------------------------------
/app/src/main/assets/facenet_512.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/assets/facenet_512.tflite
--------------------------------------------------------------------------------
/app/src/main/assets/spoof_model_scale_2_7.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/assets/spoof_model_scale_2_7.tflite
--------------------------------------------------------------------------------
/app/src/main/assets/spoof_model_scale_4_0.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/assets/spoof_model_scale_4_0.tflite
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/MainActivity.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android
2 |
3 | import android.os.Bundle
4 | import androidx.activity.ComponentActivity
5 | import androidx.activity.compose.setContent
6 | import androidx.activity.enableEdgeToEdge
7 | import androidx.compose.animation.fadeIn
8 | import androidx.compose.animation.fadeOut
9 | import androidx.navigation.compose.NavHost
10 | import androidx.navigation.compose.composable
11 | import androidx.navigation.compose.rememberNavController
12 | import com.ml.shubham0204.facenet_android.presentation.screens.add_face.AddFaceScreen
13 | import com.ml.shubham0204.facenet_android.presentation.screens.detect_screen.DetectScreen
14 | import com.ml.shubham0204.facenet_android.presentation.screens.face_list.FaceListScreen
15 |
16 | class MainActivity : ComponentActivity() {
17 |
18 | override fun onCreate(savedInstanceState: Bundle?) {
19 | super.onCreate(savedInstanceState)
20 | enableEdgeToEdge()
21 | setContent {
22 | val navHostController = rememberNavController()
23 | NavHost(
24 | navController = navHostController,
25 | startDestination = "detect",
26 | enterTransition = { fadeIn() },
27 | exitTransition = { fadeOut() }
28 | ) {
29 | composable("add-face") { AddFaceScreen { navHostController.navigateUp() } }
30 | composable("detect") { DetectScreen { navHostController.navigate("face-list") } }
31 | composable("face-list") {
32 | FaceListScreen(
33 | onNavigateBack = { navHostController.navigateUp() },
34 | onAddFaceClick = { navHostController.navigate("add-face") }
35 | )
36 | }
37 | }
38 | }
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/MainApplication.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android
2 |
3 | import android.app.Application
4 | import com.ml.shubham0204.facenet_android.data.ObjectBoxStore
5 | import com.ml.shubham0204.facenet_android.di.AppModule
6 | import org.koin.android.ext.koin.androidContext
7 | import org.koin.core.context.startKoin
8 | import org.koin.ksp.generated.module
9 |
10 | class MainApplication : Application() {
11 |
12 | override fun onCreate() {
13 | super.onCreate()
14 | startKoin {
15 | androidContext(this@MainApplication)
16 | modules(AppModule().module)
17 | }
18 | ObjectBoxStore.init(this)
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/data/DataModels.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.data
2 |
3 | import io.objectbox.annotation.Entity
4 | import io.objectbox.annotation.HnswIndex
5 | import io.objectbox.annotation.Id
6 | import io.objectbox.annotation.Index
7 |
8 | @Entity
9 | data class FaceImageRecord(
10 | // primary-key of `FaceImageRecord`
11 | @Id var recordID: Long = 0,
12 |
13 | // personId is derived from `PersonRecord`
14 | @Index var personID: Long = 0,
15 | var personName: String = "",
16 |
17 | // the FaceNet-512 model provides a 512-dimensional embedding
18 | // the FaceNet model provides a 128-dimensional embedding
19 | @HnswIndex(dimensions = 512) var faceEmbedding: FloatArray = floatArrayOf()
20 | )
21 |
22 | @Entity
23 | data class PersonRecord(
24 | // primary-key
25 | @Id var personID: Long = 0,
26 | var personName: String = "",
27 |
28 | // number of images selected by the user
29 | // under the name of the person
30 | var numImages: Long = 0,
31 |
32 | // time when the record was added
33 | var addTime: Long = 0
34 | )
35 |
36 | data class RecognitionMetrics(
37 | val timeFaceDetection: Long,
38 | val timeVectorSearch: Long,
39 | val timeFaceEmbedding: Long,
40 | val timeFaceSpoofDetection: Long
41 | )
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/data/ImagesVectorDB.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.data
2 |
3 | import org.koin.core.annotation.Single
4 |
5 | @Single
6 | class ImagesVectorDB {
7 |
8 | private val imagesBox = ObjectBoxStore.store.boxFor(FaceImageRecord::class.java)
9 |
10 | fun addFaceImageRecord(record: FaceImageRecord) {
11 | imagesBox.put(record)
12 | }
13 |
14 | fun getNearestEmbeddingPersonName(embedding: FloatArray): FaceImageRecord? {
15 | /*
16 | Use maxResultCount to set the maximum number of objects to return by the ANN condition.
17 | Hint: it can also be used as the "ef" HNSW parameter to increase the search quality in combination
18 | with a query limit. For example, use maxResultCount of 100 with a Query limit of 10 to have 10 results
19 | that are of potentially better quality than just passing in 10 for maxResultCount
20 | (quality/performance tradeoff).
21 | */
22 | return imagesBox
23 | .query(FaceImageRecord_.faceEmbedding.nearestNeighbors(embedding, 10))
24 | .build()
25 | .findWithScores()
26 | .map { it.get() }
27 | .firstOrNull()
28 | }
29 |
30 | fun removeFaceRecordsWithPersonID(personID: Long) {
31 | imagesBox.removeByIds(
32 | imagesBox.query(FaceImageRecord_.personID.equal(personID)).build().findIds().toList()
33 | )
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/data/ObjectBoxStore.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.data
2 |
3 | import android.content.Context
4 | import io.objectbox.BoxStore
5 |
6 | object ObjectBoxStore {
7 |
8 | lateinit var store: BoxStore
9 | private set
10 |
11 | fun init(context: Context) {
12 | store = MyObjectBox.builder().androidContext(context).build()
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/data/PersonDB.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.data
2 |
3 | import io.objectbox.kotlin.flow
4 | import kotlinx.coroutines.Dispatchers
5 | import kotlinx.coroutines.ExperimentalCoroutinesApi
6 | import kotlinx.coroutines.flow.Flow
7 | import kotlinx.coroutines.flow.flowOn
8 | import org.koin.core.annotation.Single
9 |
10 | @Single
11 | class PersonDB {
12 |
13 | private val personBox = ObjectBoxStore.store.boxFor(PersonRecord::class.java)
14 |
15 | fun addPerson(person: PersonRecord): Long {
16 | return personBox.put(person)
17 | }
18 |
19 | fun removePerson(personID: Long) {
20 | personBox.removeByIds(listOf(personID))
21 | }
22 |
23 | // Returns the number of records present in the collection
24 | fun getCount(): Long = personBox.count()
25 |
26 | @OptIn(ExperimentalCoroutinesApi::class)
27 | fun getAll(): Flow> =
28 | personBox.query(PersonRecord_.personID.notNull()).build().flow().flowOn(Dispatchers.IO)
29 | }
30 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/di/AppModule.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.di
2 |
3 | import org.koin.core.annotation.ComponentScan
4 | import org.koin.core.annotation.Module
5 |
6 | @Module
7 | @ComponentScan("com.ml.shubham0204.facenet_android")
8 | class AppModule
9 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/domain/ErrorHandling.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.domain
2 |
3 | enum class ErrorCode(val message: String) {
4 | MULTIPLE_FACES("Multiple faces found in the image"),
5 | NO_FACE("No faces were in the image"),
6 | FACE_DETECTOR_FAILURE("Face detection failed")
7 | }
8 |
9 | class AppException(val errorCode: ErrorCode) : Exception()
10 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/domain/ImageVectorUseCase.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.domain
2 |
3 | import android.graphics.Bitmap
4 | import android.graphics.Rect
5 | import android.net.Uri
6 | import com.ml.shubham0204.facenet_android.data.FaceImageRecord
7 | import com.ml.shubham0204.facenet_android.data.ImagesVectorDB
8 | import com.ml.shubham0204.facenet_android.data.RecognitionMetrics
9 | import com.ml.shubham0204.facenet_android.domain.embeddings.FaceNet
10 | import com.ml.shubham0204.facenet_android.domain.face_detection.FaceSpoofDetector
11 | import com.ml.shubham0204.facenet_android.domain.face_detection.MediapipeFaceDetector
12 | import kotlin.math.pow
13 | import kotlin.math.sqrt
14 | import kotlin.time.DurationUnit
15 | import kotlin.time.measureTimedValue
16 | import org.koin.core.annotation.Single
17 |
18 | @Single
19 | class ImageVectorUseCase(
20 | private val mediapipeFaceDetector: MediapipeFaceDetector,
21 | private val faceSpoofDetector: FaceSpoofDetector,
22 | private val imagesVectorDB: ImagesVectorDB,
23 | private val faceNet: FaceNet
24 | ) {
25 |
26 | data class FaceRecognitionResult(
27 | val personName: String,
28 | val boundingBox: Rect,
29 | val spoofResult: FaceSpoofDetector.FaceSpoofResult? = null
30 | )
31 |
32 | // Add the person's image to the database
33 | suspend fun addImage(personID: Long, personName: String, imageUri: Uri): Result {
34 | // Perform face-detection and get the cropped face as a Bitmap
35 | val faceDetectionResult = mediapipeFaceDetector.getCroppedFace(imageUri)
36 | if (faceDetectionResult.isSuccess) {
37 | // Get the embedding for the cropped face, and store it
38 | // in the database, along with `personId` and `personName`
39 | val embedding = faceNet.getFaceEmbedding(faceDetectionResult.getOrNull()!!)
40 | imagesVectorDB.addFaceImageRecord(
41 | FaceImageRecord(
42 | personID = personID,
43 | personName = personName,
44 | faceEmbedding = embedding
45 | )
46 | )
47 | return Result.success(true)
48 | } else {
49 | return Result.failure(faceDetectionResult.exceptionOrNull()!!)
50 | }
51 | }
52 |
53 | // From the given frame, return the name of the person by performing
54 | // face recognition
55 | suspend fun getNearestPersonName(
56 | frameBitmap: Bitmap
57 | ): Pair> {
58 | // Perform face-detection and get the cropped face as a Bitmap
59 | val (faceDetectionResult, t1) =
60 | measureTimedValue { mediapipeFaceDetector.getAllCroppedFaces(frameBitmap) }
61 | val faceRecognitionResults = ArrayList()
62 | var avgT2 = 0L
63 | var avgT3 = 0L
64 | var avgT4 = 0L
65 |
66 | for (result in faceDetectionResult) {
67 | // Get the embedding for the cropped face (query embedding)
68 | val (croppedBitmap, boundingBox) = result
69 | val (embedding, t2) = measureTimedValue { faceNet.getFaceEmbedding(croppedBitmap) }
70 | avgT2 += t2.toLong(DurationUnit.MILLISECONDS)
71 | // Perform nearest-neighbor search
72 | val (recognitionResult, t3) =
73 | measureTimedValue { imagesVectorDB.getNearestEmbeddingPersonName(embedding) }
74 | avgT3 += t3.toLong(DurationUnit.MILLISECONDS)
75 | if (recognitionResult == null) {
76 | faceRecognitionResults.add(FaceRecognitionResult("Not recognized", boundingBox))
77 | continue
78 | }
79 |
80 | val spoofResult = faceSpoofDetector.detectSpoof(frameBitmap, boundingBox)
81 | avgT4 += spoofResult.timeMillis
82 |
83 | // Calculate cosine similarity between the nearest-neighbor
84 | // and the query embedding
85 | val distance = cosineDistance(embedding, recognitionResult.faceEmbedding)
86 | // If the distance > 0.4, we recognize the person
87 | // else we conclude that the face does not match enough
88 | if (distance > 0.4) {
89 | faceRecognitionResults.add(
90 | FaceRecognitionResult(recognitionResult.personName, boundingBox, spoofResult)
91 | )
92 | } else {
93 | faceRecognitionResults.add(
94 | FaceRecognitionResult("Not recognized", boundingBox, spoofResult)
95 | )
96 | }
97 | }
98 | val metrics =
99 | if (faceDetectionResult.isNotEmpty()) {
100 | RecognitionMetrics(
101 | timeFaceDetection = t1.toLong(DurationUnit.MILLISECONDS),
102 | timeFaceEmbedding = avgT2 / faceDetectionResult.size,
103 | timeVectorSearch = avgT3 / faceDetectionResult.size,
104 | timeFaceSpoofDetection = avgT4 / faceDetectionResult.size
105 | )
106 | } else {
107 | null
108 | }
109 |
110 | return Pair(metrics, faceRecognitionResults)
111 | }
112 |
113 | private fun cosineDistance(x1: FloatArray, x2: FloatArray): Float {
114 | var mag1 = 0.0f
115 | var mag2 = 0.0f
116 | var product = 0.0f
117 | for (i in x1.indices) {
118 | mag1 += x1[i].pow(2)
119 | mag2 += x2[i].pow(2)
120 | product += x1[i] * x2[i]
121 | }
122 | mag1 = sqrt(mag1)
123 | mag2 = sqrt(mag2)
124 | return product / (mag1 * mag2)
125 | }
126 |
127 | fun removeImages(personID: Long) {
128 | imagesVectorDB.removeFaceRecordsWithPersonID(personID)
129 | }
130 | }
131 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/domain/PersonUseCase.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.domain
2 |
3 | import com.ml.shubham0204.facenet_android.data.PersonDB
4 | import com.ml.shubham0204.facenet_android.data.PersonRecord
5 | import kotlinx.coroutines.flow.Flow
6 | import org.koin.core.annotation.Single
7 |
8 | @Single
9 | class PersonUseCase(private val personDB: PersonDB) {
10 |
11 | fun addPerson(name: String, numImages: Long): Long {
12 | return personDB.addPerson(
13 | PersonRecord(
14 | personName = name,
15 | numImages = numImages,
16 | addTime = System.currentTimeMillis()
17 | )
18 | )
19 | }
20 |
21 | fun removePerson(id: Long) {
22 | personDB.removePerson(id)
23 | }
24 |
25 | fun getAll(): Flow> = personDB.getAll()
26 |
27 | fun getCount(): Long = personDB.getCount()
28 | }
29 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/domain/embeddings/FaceNet.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.domain.embeddings
2 |
3 | import android.content.Context
4 | import android.graphics.Bitmap
5 | import java.nio.ByteBuffer
6 | import kotlin.math.max
7 | import kotlin.math.pow
8 | import kotlin.math.sqrt
9 | import kotlinx.coroutines.Dispatchers
10 | import kotlinx.coroutines.withContext
11 | import org.koin.core.annotation.Single
12 | import org.tensorflow.lite.DataType
13 | import org.tensorflow.lite.Interpreter
14 | import org.tensorflow.lite.gpu.CompatibilityList
15 | import org.tensorflow.lite.gpu.GpuDelegate
16 | import org.tensorflow.lite.support.common.FileUtil
17 | import org.tensorflow.lite.support.common.TensorOperator
18 | import org.tensorflow.lite.support.image.ImageProcessor
19 | import org.tensorflow.lite.support.image.TensorImage
20 | import org.tensorflow.lite.support.image.ops.ResizeOp
21 | import org.tensorflow.lite.support.tensorbuffer.TensorBuffer
22 | import org.tensorflow.lite.support.tensorbuffer.TensorBufferFloat
23 |
24 | // Derived from the original project:
25 | // https://github.com/shubham0204/FaceRecognition_With_FaceNet_Android/blob/master/app/src/main/java/com/ml/quaterion/facenetdetection/model/FaceNetModel.kt
26 | // Utility class for FaceNet model
27 | @Single
28 | class FaceNet(context: Context, useGpu: Boolean = true, useXNNPack: Boolean = true) {
29 |
30 | // Input image size for FaceNet model.
31 | private val imgSize = 160
32 |
33 | // Output embedding size
34 | private val embeddingDim = 512
35 |
36 | private var interpreter: Interpreter
37 | private val imageTensorProcessor =
38 | ImageProcessor.Builder()
39 | .add(ResizeOp(imgSize, imgSize, ResizeOp.ResizeMethod.BILINEAR))
40 | .add(StandardizeOp())
41 | .build()
42 |
43 | init {
44 | // Initialize TFLiteInterpreter
45 | val interpreterOptions =
46 | Interpreter.Options().apply {
47 | // Add the GPU Delegate if supported.
48 | // See -> https://www.tensorflow.org/lite/performance/gpu#android
49 | if (useGpu) {
50 | if (CompatibilityList().isDelegateSupportedOnThisDevice) {
51 | addDelegate(GpuDelegate(CompatibilityList().bestOptionsForThisDevice))
52 | }
53 | } else {
54 | // Number of threads for computation
55 | numThreads = 4
56 | }
57 | useXNNPACK = useXNNPack
58 | useNNAPI = true
59 | }
60 | interpreter =
61 | Interpreter(FileUtil.loadMappedFile(context, "facenet_512.tflite"), interpreterOptions)
62 | }
63 |
64 | // Gets an face embedding using FaceNet
65 | suspend fun getFaceEmbedding(image: Bitmap) =
66 | withContext(Dispatchers.Default) {
67 | return@withContext runFaceNet(convertBitmapToBuffer(image))[0]
68 | }
69 |
70 | // Run the FaceNet model
71 | private fun runFaceNet(inputs: Any): Array {
72 | val faceNetModelOutputs = Array(1) { FloatArray(embeddingDim) }
73 | interpreter.run(inputs, faceNetModelOutputs)
74 | return faceNetModelOutputs
75 | }
76 |
77 | // Resize the given bitmap and convert it to a ByteBuffer
78 | private fun convertBitmapToBuffer(image: Bitmap): ByteBuffer {
79 | return imageTensorProcessor.process(TensorImage.fromBitmap(image)).buffer
80 | }
81 |
82 | // Op to perform standardization
83 | // x' = ( x - mean ) / std_dev
84 | class StandardizeOp : TensorOperator {
85 |
86 | override fun apply(p0: TensorBuffer?): TensorBuffer {
87 | val pixels = p0!!.floatArray
88 | val mean = pixels.average().toFloat()
89 | var std = sqrt(pixels.map { pi -> (pi - mean).pow(2) }.sum() / pixels.size.toFloat())
90 | std = max(std, 1f / sqrt(pixels.size.toFloat()))
91 | for (i in pixels.indices) {
92 | pixels[i] = (pixels[i] - mean) / std
93 | }
94 | val output = TensorBufferFloat.createFixedSize(p0.shape, DataType.FLOAT32)
95 | output.loadArray(pixels)
96 | return output
97 | }
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/domain/face_detection/FaceSpoofDetector.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.domain.face_detection
2 |
3 | import android.content.Context
4 | import android.graphics.Bitmap
5 | import android.graphics.Color
6 | import android.graphics.Rect
7 | import androidx.core.graphics.get
8 | import androidx.core.graphics.set
9 | import kotlinx.coroutines.Dispatchers
10 | import kotlinx.coroutines.withContext
11 | import org.koin.core.annotation.Single
12 | import org.tensorflow.lite.DataType
13 | import org.tensorflow.lite.Interpreter
14 | import org.tensorflow.lite.gpu.CompatibilityList
15 | import org.tensorflow.lite.gpu.GpuDelegate
16 | import org.tensorflow.lite.support.common.FileUtil
17 | import org.tensorflow.lite.support.common.ops.CastOp
18 | import org.tensorflow.lite.support.image.ImageProcessor
19 | import org.tensorflow.lite.support.image.TensorImage
20 | import kotlin.math.exp
21 | import kotlin.time.DurationUnit
22 | import kotlin.time.measureTime
23 |
24 | /*
25 |
26 | Utility class for interacting with FaceSpoofDetector
27 |
28 | - It uses the MiniFASNet model from https://github.com/minivision-ai/Silent-Face-Anti-Spoofing
29 | - The preprocessing methods are derived from
30 | https://github.com/serengil/deepface/blob/master/deepface/models/spoofing/FasNet.py
31 | - The model weights are in the PyTorch format. To convert them to the TFLite format,
32 | check the notebook linked in the README of the project
33 | - An instance of this class is injected in ImageVectorUseCase.kt
34 |
35 | */
36 | @Single
37 | class FaceSpoofDetector(context: Context, useGpu: Boolean = false, useXNNPack: Boolean = false, useNNAPI: Boolean = false) {
38 |
39 | data class FaceSpoofResult(val isSpoof: Boolean, val score: Float, val timeMillis: Long)
40 |
41 | private val scale1 = 2.7f
42 | private val scale2 = 4.0f
43 | private val inputImageDim = 80
44 | private val outputDim = 3
45 |
46 | private var firstModelInterpreter: Interpreter
47 | private var secondModelInterpreter: Interpreter
48 | private val imageTensorProcessor = ImageProcessor.Builder()
49 | .add(CastOp(DataType.FLOAT32))
50 | .build()
51 |
52 | init {
53 | // Initialize TFLiteInterpreter
54 | val interpreterOptions =
55 | Interpreter.Options().apply {
56 | // Add the GPU Delegate if supported.
57 | // See -> https://www.tensorflow.org/lite/performance/gpu#android
58 | if (useGpu) {
59 | if (CompatibilityList().isDelegateSupportedOnThisDevice) {
60 | addDelegate(GpuDelegate(CompatibilityList().bestOptionsForThisDevice))
61 | }
62 | } else {
63 | // Number of threads for computation
64 | numThreads = 4
65 | }
66 | useXNNPACK = useXNNPack
67 | this.useNNAPI = useNNAPI
68 | }
69 | firstModelInterpreter =
70 | Interpreter(FileUtil.loadMappedFile(context, "spoof_model_scale_2_7.tflite"), interpreterOptions)
71 | secondModelInterpreter =
72 | Interpreter(FileUtil.loadMappedFile(context, "spoof_model_scale_4_0.tflite"), interpreterOptions)
73 | }
74 |
75 | suspend fun detectSpoof(frameImage: Bitmap, faceRect: Rect): FaceSpoofResult =
76 | withContext(Dispatchers.Default) {
77 | // Crop the images and scale the bounding boxes
78 | // with the given two constants
79 | // and perform RGB -> BGR conversion
80 | val croppedImage1 =
81 | crop(
82 | origImage = frameImage,
83 | bbox = faceRect,
84 | bboxScale = scale1,
85 | targetWidth = inputImageDim,
86 | targetHeight = inputImageDim
87 | )
88 | for (i in 0 until croppedImage1.width) {
89 | for (j in 0 until croppedImage1.height) {
90 | croppedImage1[i, j] = Color.rgb(
91 | Color.blue(croppedImage1[i, j]),
92 | Color.green(croppedImage1[i, j]),
93 | Color.red(croppedImage1[i, j])
94 | )
95 | }
96 | }
97 | val croppedImage2 =
98 | crop(
99 | origImage = frameImage,
100 | bbox = faceRect,
101 | bboxScale = scale2,
102 | targetWidth = inputImageDim,
103 | targetHeight = inputImageDim
104 | )
105 | for (i in 0 until croppedImage2.width) {
106 | for (j in 0 until croppedImage2.height) {
107 | croppedImage2[i, j] = Color.rgb(
108 | Color.blue(croppedImage2[i, j]),
109 | Color.green(croppedImage2[i, j]),
110 | Color.red(croppedImage2[i, j])
111 | )
112 | }
113 | }
114 | val input1 = imageTensorProcessor.process(TensorImage.fromBitmap(croppedImage1)).buffer
115 | val input2 = imageTensorProcessor.process(TensorImage.fromBitmap(croppedImage2)).buffer
116 | val output1 = arrayOf(FloatArray(outputDim))
117 | val output2 = arrayOf(FloatArray(outputDim))
118 |
119 | val time = measureTime {
120 | firstModelInterpreter.run(input1, output1)
121 | secondModelInterpreter.run(input2, output2)
122 | }.toLong(DurationUnit.MILLISECONDS)
123 |
124 | val output = softMax(output1[0]).zip(softMax(output2[0])).map {
125 | (it.first + it.second)
126 | }
127 | val label = output.indexOf(output.max())
128 | val iSpoof = label != 1
129 | val score = output[label] / 2f
130 |
131 | return@withContext FaceSpoofResult(isSpoof = iSpoof, score = score, timeMillis = time)
132 | }
133 |
134 | private fun softMax(x: FloatArray): FloatArray {
135 | val exp = x.map { exp(it) }
136 | val expSum = exp.sum()
137 | return exp.map { it / expSum }.toFloatArray()
138 | }
139 |
140 | private fun crop(
141 | origImage: Bitmap,
142 | bbox: Rect,
143 | bboxScale: Float,
144 | targetWidth: Int,
145 | targetHeight: Int
146 | ): Bitmap {
147 | val srcWidth = origImage.width
148 | val srcHeight = origImage.height
149 | val scaledBox = getScaledBox(srcWidth, srcHeight, bbox, bboxScale)
150 | val croppedBitmap =
151 | Bitmap.createBitmap(
152 | origImage,
153 | scaledBox.left,
154 | scaledBox.top,
155 | scaledBox.width(),
156 | scaledBox.height()
157 | )
158 | return Bitmap.createScaledBitmap(croppedBitmap, targetWidth, targetHeight, true)
159 | }
160 |
161 | private fun getScaledBox(srcWidth: Int, srcHeight: Int, box: Rect, bboxScale: Float): Rect {
162 | val x = box.left
163 | val y = box.top
164 | val w = box.width()
165 | val h = box.height()
166 | val scale = floatArrayOf((srcHeight - 1f) / h, (srcWidth - 1f) / w, bboxScale).min()
167 | val newWidth = w * scale
168 | val newHeight = h * scale
169 | val centerX = w / 2 + x
170 | val centerY = h / 2 + y
171 | var topLeftX = centerX - newWidth / 2
172 | var topLeftY = centerY - newHeight / 2
173 | var bottomRightX = centerX + newWidth / 2
174 | var bottomRightY = centerY + newHeight / 2
175 | if (topLeftX < 0) {
176 | bottomRightX -= topLeftX
177 | topLeftX = 0f
178 | }
179 | if (topLeftY < 0) {
180 | bottomRightY -= topLeftY
181 | topLeftY = 0f
182 | }
183 | if (bottomRightX > srcWidth - 1) {
184 | topLeftX -= (bottomRightX - (srcWidth - 1))
185 | bottomRightX = (srcWidth - 1).toFloat()
186 | }
187 | if (bottomRightY > srcHeight - 1) {
188 | topLeftY -= (bottomRightY - (srcHeight - 1))
189 | bottomRightY = (srcHeight - 1).toFloat()
190 | }
191 | return Rect(topLeftX.toInt(), topLeftY.toInt(), bottomRightX.toInt(), bottomRightY.toInt())
192 | }
193 | }
194 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/domain/face_detection/MediapipeFaceDetector.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.domain.face_detection
2 |
3 | import android.content.Context
4 | import android.graphics.Bitmap
5 | import android.graphics.BitmapFactory
6 | import android.graphics.Matrix
7 | import android.graphics.Rect
8 | import android.net.Uri
9 | import androidx.core.graphics.toRect
10 | import androidx.exifinterface.media.ExifInterface
11 | import com.google.mediapipe.framework.image.BitmapImageBuilder
12 | import com.google.mediapipe.tasks.core.BaseOptions
13 | import com.google.mediapipe.tasks.vision.core.RunningMode
14 | import com.google.mediapipe.tasks.vision.facedetector.FaceDetector
15 | import com.ml.shubham0204.facenet_android.domain.AppException
16 | import com.ml.shubham0204.facenet_android.domain.ErrorCode
17 | import java.io.File
18 | import java.io.FileOutputStream
19 | import kotlinx.coroutines.Dispatchers
20 | import kotlinx.coroutines.withContext
21 | import org.koin.core.annotation.Single
22 |
23 | // Utility class for interacting with Mediapipe's Face Detector
24 | // See https://ai.google.dev/edge/mediapipe/solutions/vision/face_detector/android
25 | @Single
26 | class MediapipeFaceDetector(private val context: Context) {
27 |
28 | // The model is stored in the assets folder
29 | private val modelName = "blaze_face_short_range.tflite"
30 | private val baseOptions = BaseOptions.builder().setModelAssetPath(modelName).build()
31 | private val faceDetectorOptions =
32 | FaceDetector.FaceDetectorOptions.builder()
33 | .setBaseOptions(baseOptions)
34 | .setRunningMode(RunningMode.IMAGE)
35 | .build()
36 | private val faceDetector = FaceDetector.createFromOptions(context, faceDetectorOptions)
37 |
38 | suspend fun getCroppedFace(imageUri: Uri): Result =
39 | withContext(Dispatchers.IO) {
40 | var imageInputStream =
41 | context.contentResolver.openInputStream(imageUri)
42 | ?: return@withContext Result.failure(
43 | AppException(ErrorCode.FACE_DETECTOR_FAILURE)
44 | )
45 | var imageBitmap = BitmapFactory.decodeStream(imageInputStream)
46 | imageInputStream.close()
47 |
48 | // Re-create an input-stream to reset its position
49 | // InputStream returns false with markSupported(), hence we cannot
50 | // reset its position
51 | // Without recreating the inputStream, no exif-data is read
52 | imageInputStream =
53 | context.contentResolver.openInputStream(imageUri)
54 | ?: return@withContext Result.failure(
55 | AppException(ErrorCode.FACE_DETECTOR_FAILURE)
56 | )
57 | val exifInterface = ExifInterface(imageInputStream)
58 | imageBitmap =
59 | when (
60 | exifInterface.getAttributeInt(
61 | ExifInterface.TAG_ORIENTATION,
62 | ExifInterface.ORIENTATION_UNDEFINED
63 | )
64 | ) {
65 | ExifInterface.ORIENTATION_ROTATE_90 -> rotateBitmap(imageBitmap, 90f)
66 | ExifInterface.ORIENTATION_ROTATE_180 -> rotateBitmap(imageBitmap, 180f)
67 | ExifInterface.ORIENTATION_ROTATE_270 -> rotateBitmap(imageBitmap, 270f)
68 | else -> imageBitmap
69 | }
70 | imageInputStream.close()
71 |
72 | // We need exactly one face in the image, in other cases, return the
73 | // necessary errors
74 | val faces = faceDetector.detect(BitmapImageBuilder(imageBitmap).build()).detections()
75 | if (faces.size > 1) {
76 | return@withContext Result.failure(AppException(ErrorCode.MULTIPLE_FACES))
77 | } else if (faces.size == 0) {
78 | return@withContext Result.failure(AppException(ErrorCode.NO_FACE))
79 | } else {
80 | // Validate the bounding box and
81 | // return the cropped face
82 | val rect = faces[0].boundingBox().toRect()
83 | if (validateRect(imageBitmap, rect)) {
84 | val croppedBitmap =
85 | Bitmap.createBitmap(
86 | imageBitmap,
87 | rect.left,
88 | rect.top,
89 | rect.width(),
90 | rect.height()
91 | )
92 | return@withContext Result.success(croppedBitmap)
93 | } else {
94 | return@withContext Result.failure(
95 | AppException(ErrorCode.FACE_DETECTOR_FAILURE)
96 | )
97 | }
98 | }
99 | }
100 |
101 | // Detects multiple faces from the `frameBitmap`
102 | // and returns pairs of (croppedFace , boundingBoxRect)
103 | // Used by ImageVectorUseCase.kt
104 | suspend fun getAllCroppedFaces(frameBitmap: Bitmap): List> =
105 | withContext(Dispatchers.IO) {
106 | return@withContext faceDetector
107 | .detect(BitmapImageBuilder(frameBitmap).build())
108 | .detections()
109 | .filter { validateRect(frameBitmap, it.boundingBox().toRect()) }
110 | .map { detection -> detection.boundingBox().toRect() }
111 | .map { rect ->
112 | val croppedBitmap =
113 | Bitmap.createBitmap(
114 | frameBitmap,
115 | rect.left,
116 | rect.top,
117 | rect.width(),
118 | rect.height()
119 | )
120 | Pair(croppedBitmap, rect)
121 | }
122 | }
123 |
124 | // DEBUG: For testing purpose, saves the Bitmap to the app's private storage
125 | fun saveBitmap(context: Context, image: Bitmap, name: String) {
126 | val fileOutputStream = FileOutputStream(File(context.filesDir.absolutePath + "/$name.png"))
127 | image.compress(Bitmap.CompressFormat.PNG, 100, fileOutputStream)
128 | }
129 |
130 | private fun rotateBitmap(source: Bitmap, degrees: Float): Bitmap {
131 | val matrix = Matrix()
132 | matrix.postRotate(degrees)
133 | return Bitmap.createBitmap(source, 0, 0, source.width, source.height, matrix, false)
134 | }
135 |
136 | // Check if the bounds of `boundingBox` fit within the
137 | // limits of `cameraFrameBitmap`
138 | private fun validateRect(cameraFrameBitmap: Bitmap, boundingBox: Rect): Boolean {
139 | return boundingBox.left >= 0 &&
140 | boundingBox.top >= 0 &&
141 | (boundingBox.left + boundingBox.width()) < cameraFrameBitmap.width &&
142 | (boundingBox.top + boundingBox.height()) < cameraFrameBitmap.height
143 | }
144 | }
145 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/components/AppAlertDialog.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.components
2 |
3 | import androidx.compose.material3.AlertDialog
4 | import androidx.compose.material3.Text
5 | import androidx.compose.material3.TextButton
6 | import androidx.compose.runtime.Composable
7 | import androidx.compose.runtime.getValue
8 | import androidx.compose.runtime.mutableStateOf
9 | import androidx.compose.runtime.remember
10 |
11 | private var title = ""
12 | private var text = ""
13 | private var positiveButtonText = ""
14 | private var negativeButtonText = ""
15 | private lateinit var positiveButtonOnClick: (() -> Unit)
16 | private lateinit var negativeButtonOnClick: (() -> Unit)
17 | private val alertDialogShowStatus = mutableStateOf(false)
18 |
19 | @Composable
20 | fun AppAlertDialog() {
21 | val visible by remember { alertDialogShowStatus }
22 | if (visible) {
23 | AlertDialog(
24 | title = { Text(text = title) },
25 | text = { Text(text = text) },
26 | onDismissRequest = { /* All alert dialogs are non-cancellable */ },
27 | confirmButton = {
28 | TextButton(
29 | onClick = {
30 | alertDialogShowStatus.value = false
31 | positiveButtonOnClick()
32 | }
33 | ) {
34 | Text(text = positiveButtonText)
35 | }
36 | },
37 | dismissButton = {
38 | TextButton(
39 | onClick = {
40 | alertDialogShowStatus.value = false
41 | negativeButtonOnClick()
42 | }
43 | ) {
44 | Text(text = negativeButtonText)
45 | }
46 | }
47 | )
48 | }
49 | }
50 |
51 | fun createAlertDialog(
52 | dialogTitle: String,
53 | dialogText: String,
54 | dialogPositiveButtonText: String,
55 | dialogNegativeButtonText: String?,
56 | onPositiveButtonClick: (() -> Unit),
57 | onNegativeButtonClick: (() -> Unit)?
58 | ) {
59 | title = dialogTitle
60 | text = dialogText
61 | positiveButtonOnClick = onPositiveButtonClick
62 | onNegativeButtonClick?.let { negativeButtonOnClick = it }
63 | positiveButtonText = dialogPositiveButtonText
64 | dialogNegativeButtonText?.let { negativeButtonText = it }
65 | alertDialogShowStatus.value = true
66 | }
67 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/components/AppProgressDialog.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.components
2 |
3 | import androidx.compose.foundation.background
4 | import androidx.compose.foundation.layout.Box
5 | import androidx.compose.foundation.layout.Column
6 | import androidx.compose.foundation.layout.Spacer
7 | import androidx.compose.foundation.layout.fillMaxWidth
8 | import androidx.compose.foundation.layout.padding
9 | import androidx.compose.foundation.shape.RoundedCornerShape
10 | import androidx.compose.material3.LinearProgressIndicator
11 | import androidx.compose.material3.Text
12 | import androidx.compose.runtime.Composable
13 | import androidx.compose.runtime.getValue
14 | import androidx.compose.runtime.mutableStateOf
15 | import androidx.compose.runtime.remember
16 | import androidx.compose.ui.Alignment
17 | import androidx.compose.ui.Modifier
18 | import androidx.compose.ui.graphics.Color
19 | import androidx.compose.ui.text.style.TextAlign
20 | import androidx.compose.ui.unit.dp
21 | import androidx.compose.ui.window.Dialog
22 |
23 | private val progressDialogVisibleState = mutableStateOf(false)
24 | private val progressDialogText = mutableStateOf("")
25 |
26 | @Composable
27 | fun AppProgressDialog() {
28 | val isVisible by remember { progressDialogVisibleState }
29 | if (isVisible) {
30 | Dialog(onDismissRequest = { /* Progress dialogs are non-cancellable */ }) {
31 | Box(
32 | contentAlignment = Alignment.Center,
33 | modifier =
34 | Modifier.fillMaxWidth()
35 | .background(Color.White, shape = RoundedCornerShape(8.dp))
36 | ) {
37 | Column(
38 | horizontalAlignment = Alignment.CenterHorizontally,
39 | modifier = Modifier.padding(vertical = 24.dp)
40 | ) {
41 | LinearProgressIndicator(modifier = Modifier.fillMaxWidth())
42 | Spacer(modifier = Modifier.padding(4.dp))
43 | Text(
44 | text = progressDialogText.value,
45 | textAlign = TextAlign.Center,
46 | modifier = Modifier.fillMaxWidth().padding(horizontal = 16.dp)
47 | )
48 | }
49 | }
50 | }
51 | }
52 | }
53 |
54 | fun setProgressDialogText(message: String) {
55 | progressDialogText.value = message
56 | }
57 |
58 | fun showProgressDialog() {
59 | progressDialogVisibleState.value = true
60 | progressDialogText.value = ""
61 | }
62 |
63 | fun hideProgressDialog() {
64 | progressDialogVisibleState.value = false
65 | }
66 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/components/DelayedVisibility.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.components
2 |
3 | import androidx.compose.animation.AnimatedVisibility
4 | import androidx.compose.animation.core.tween
5 | import androidx.compose.animation.fadeIn
6 | import androidx.compose.animation.fadeOut
7 | import androidx.compose.runtime.Composable
8 |
9 | @Composable
10 | fun DelayedVisibility(visible: Boolean, content: @Composable (() -> Unit)) {
11 | AnimatedVisibility(
12 | visible = visible,
13 | enter = fadeIn(animationSpec = tween(1000)),
14 | exit = fadeOut(animationSpec = tween(1000))
15 | ) {
16 | content()
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/components/FaceDetectionOverlay.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.components
2 |
3 | import android.annotation.SuppressLint
4 | import android.content.Context
5 | import android.graphics.Bitmap
6 | import android.graphics.Canvas
7 | import android.graphics.Color
8 | import android.graphics.Matrix
9 | import android.graphics.Paint
10 | import android.graphics.RectF
11 | import android.view.SurfaceHolder
12 | import android.view.SurfaceView
13 | import android.widget.FrameLayout
14 | import androidx.camera.core.AspectRatio
15 | import androidx.camera.core.CameraSelector
16 | import androidx.camera.core.ExperimentalGetImage
17 | import androidx.camera.core.ImageAnalysis
18 | import androidx.camera.core.Preview
19 | import androidx.camera.lifecycle.ProcessCameraProvider
20 | import androidx.camera.view.PreviewView
21 | import androidx.core.content.ContextCompat
22 | import androidx.core.graphics.toRectF
23 | import androidx.core.view.doOnLayout
24 | import androidx.lifecycle.LifecycleOwner
25 | import com.ml.shubham0204.facenet_android.presentation.screens.detect_screen.DetectScreenViewModel
26 | import java.util.concurrent.Executors
27 | import kotlinx.coroutines.CoroutineScope
28 | import kotlinx.coroutines.Dispatchers
29 | import kotlinx.coroutines.launch
30 | import kotlinx.coroutines.withContext
31 |
32 | @SuppressLint("ViewConstructor")
33 | @ExperimentalGetImage
34 | class FaceDetectionOverlay(
35 | private val lifecycleOwner: LifecycleOwner,
36 | private val context: Context,
37 | private val viewModel: DetectScreenViewModel
38 | ) : FrameLayout(context) {
39 |
40 | private var overlayWidth: Int = 0
41 | private var overlayHeight: Int = 0
42 |
43 | private var imageTransform: Matrix = Matrix()
44 | private var boundingBoxTransform: Matrix = Matrix()
45 | private var isImageTransformedInitialized = false
46 | private var isBoundingBoxTransformedInitialized = false
47 |
48 | private lateinit var frameBitmap: Bitmap
49 | private var isProcessing = false
50 | private var cameraFacing: Int = CameraSelector.LENS_FACING_BACK
51 | private lateinit var boundingBoxOverlay: BoundingBoxOverlay
52 | private lateinit var previewView: PreviewView
53 |
54 | var predictions: Array = arrayOf()
55 |
56 | init {
57 | initializeCamera(cameraFacing)
58 | doOnLayout {
59 | overlayHeight = it.measuredHeight
60 | overlayWidth = it.measuredWidth
61 | }
62 | }
63 |
64 | fun initializeCamera(cameraFacing: Int) {
65 | this.cameraFacing = cameraFacing
66 | this.isImageTransformedInitialized = false
67 | this.isBoundingBoxTransformedInitialized = false
68 | val cameraProviderFuture = ProcessCameraProvider.getInstance(context)
69 | val previewView = PreviewView(context)
70 | val executor = ContextCompat.getMainExecutor(context)
71 | cameraProviderFuture.addListener(
72 | {
73 | val cameraProvider = cameraProviderFuture.get()
74 | val preview =
75 | Preview.Builder().build().also {
76 | it.setSurfaceProvider(previewView.surfaceProvider)
77 | }
78 | val cameraSelector =
79 | CameraSelector.Builder().requireLensFacing(cameraFacing).build()
80 | val frameAnalyzer =
81 | ImageAnalysis.Builder()
82 | .setTargetAspectRatio(AspectRatio.RATIO_16_9)
83 | .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
84 | .setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_RGBA_8888)
85 | .build()
86 | frameAnalyzer.setAnalyzer(Executors.newSingleThreadExecutor(), analyzer)
87 | cameraProvider.unbindAll()
88 | cameraProvider.bindToLifecycle(
89 | lifecycleOwner,
90 | cameraSelector,
91 | preview,
92 | frameAnalyzer
93 | )
94 | },
95 | executor
96 | )
97 | if (childCount == 2) {
98 | removeView(this.previewView)
99 | removeView(this.boundingBoxOverlay)
100 | }
101 | this.previewView = previewView
102 | addView(this.previewView)
103 |
104 | val boundingBoxOverlayParams =
105 | LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)
106 | this.boundingBoxOverlay = BoundingBoxOverlay(context)
107 | this.boundingBoxOverlay.setWillNotDraw(false)
108 | this.boundingBoxOverlay.setZOrderOnTop(true)
109 | addView(this.boundingBoxOverlay, boundingBoxOverlayParams)
110 | }
111 |
112 | private val analyzer =
113 | ImageAnalysis.Analyzer { image ->
114 | if (isProcessing) {
115 | image.close()
116 | return@Analyzer
117 | }
118 | isProcessing = true
119 |
120 | // Transform android.net.Image to Bitmap
121 | frameBitmap =
122 | Bitmap.createBitmap(
123 | image.image!!.width,
124 | image.image!!.height,
125 | Bitmap.Config.ARGB_8888
126 | )
127 | frameBitmap.copyPixelsFromBuffer(image.planes[0].buffer)
128 |
129 | // Configure frameHeight and frameWidth for output2overlay transformation matrix
130 | // and apply it to `frameBitmap`
131 | if (!isImageTransformedInitialized) {
132 | imageTransform = Matrix()
133 | imageTransform.apply { postRotate(image.imageInfo.rotationDegrees.toFloat()) }
134 | isImageTransformedInitialized = true
135 | }
136 | frameBitmap =
137 | Bitmap.createBitmap(
138 | frameBitmap,
139 | 0,
140 | 0,
141 | frameBitmap.width,
142 | frameBitmap.height,
143 | imageTransform,
144 | false
145 | )
146 |
147 | if (!isBoundingBoxTransformedInitialized) {
148 | boundingBoxTransform = Matrix()
149 | boundingBoxTransform.apply {
150 | setScale(
151 | overlayWidth / frameBitmap.width.toFloat(),
152 | overlayHeight / frameBitmap.height.toFloat()
153 | )
154 | if (cameraFacing == CameraSelector.LENS_FACING_FRONT) {
155 | // Mirror the bounding box coordinates
156 | // for front-facing camera
157 | postScale(
158 | -1f,
159 | 1f,
160 | overlayWidth.toFloat() / 2.0f,
161 | overlayHeight.toFloat() / 2.0f
162 | )
163 | }
164 | }
165 | isBoundingBoxTransformedInitialized = true
166 | }
167 | CoroutineScope(Dispatchers.Default).launch {
168 | val predictions = ArrayList()
169 | val (metrics, results) = viewModel.imageVectorUseCase.getNearestPersonName(frameBitmap)
170 | results.forEach {
171 | (name, boundingBox, spoofResult) ->
172 | val box = boundingBox.toRectF()
173 | var personName = name
174 | if (viewModel.getNumPeople().toInt() == 0) {
175 | personName = ""
176 | }
177 | if (spoofResult != null && spoofResult.isSpoof) {
178 | personName = "$personName (Spoof: ${spoofResult.score})"
179 | }
180 | boundingBoxTransform.mapRect(box)
181 | predictions.add(Prediction(box, personName))
182 | }
183 | withContext(Dispatchers.Main) {
184 | viewModel.faceDetectionMetricsState.value = metrics
185 | this@FaceDetectionOverlay.predictions = predictions.toTypedArray()
186 | boundingBoxOverlay.invalidate()
187 | isProcessing = false
188 | }
189 | }
190 | image.close()
191 | }
192 |
193 | data class Prediction(var bbox: RectF, var label: String)
194 |
195 | inner class BoundingBoxOverlay(context: Context) :
196 | SurfaceView(context), SurfaceHolder.Callback {
197 |
198 | private val boxPaint =
199 | Paint().apply {
200 | color = Color.parseColor("#4D90caf9")
201 | style = Paint.Style.FILL
202 | }
203 | private val textPaint =
204 | Paint().apply {
205 | strokeWidth = 2.0f
206 | textSize = 36f
207 | color = Color.WHITE
208 | }
209 |
210 | override fun surfaceCreated(holder: SurfaceHolder) {}
211 |
212 | override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {}
213 |
214 | override fun surfaceDestroyed(holder: SurfaceHolder) {}
215 |
216 | override fun onDraw(canvas: Canvas) {
217 | predictions.forEach {
218 | canvas.drawRoundRect(it.bbox, 16f, 16f, boxPaint)
219 | canvas.drawText(it.label, it.bbox.centerX(), it.bbox.centerY(), textPaint)
220 | }
221 | }
222 | }
223 | }
224 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/screens/add_face/AddFaceScreen.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.screens.add_face
2 |
3 | import android.widget.Toast
4 | import androidx.activity.compose.rememberLauncherForActivityResult
5 | import androidx.activity.result.PickVisualMediaRequest
6 | import androidx.activity.result.contract.ActivityResultContracts
7 | import androidx.compose.foundation.layout.Arrangement
8 | import androidx.compose.foundation.layout.Column
9 | import androidx.compose.foundation.layout.Row
10 | import androidx.compose.foundation.layout.Spacer
11 | import androidx.compose.foundation.layout.fillMaxSize
12 | import androidx.compose.foundation.layout.fillMaxWidth
13 | import androidx.compose.foundation.layout.height
14 | import androidx.compose.foundation.layout.padding
15 | import androidx.compose.foundation.lazy.grid.GridCells
16 | import androidx.compose.foundation.lazy.grid.LazyVerticalGrid
17 | import androidx.compose.foundation.lazy.grid.items
18 | import androidx.compose.material.icons.Icons
19 | import androidx.compose.material.icons.automirrored.filled.ArrowBack
20 | import androidx.compose.material.icons.filled.Photo
21 | import androidx.compose.material3.Button
22 | import androidx.compose.material3.ExperimentalMaterial3Api
23 | import androidx.compose.material3.Icon
24 | import androidx.compose.material3.IconButton
25 | import androidx.compose.material3.MaterialTheme
26 | import androidx.compose.material3.Scaffold
27 | import androidx.compose.material3.Text
28 | import androidx.compose.material3.TextField
29 | import androidx.compose.material3.TopAppBar
30 | import androidx.compose.runtime.Composable
31 | import androidx.compose.runtime.getValue
32 | import androidx.compose.runtime.remember
33 | import androidx.compose.runtime.setValue
34 | import androidx.compose.ui.Modifier
35 | import androidx.compose.ui.platform.LocalContext
36 | import androidx.compose.ui.unit.dp
37 | import coil.compose.AsyncImage
38 | import com.ml.shubham0204.facenet_android.presentation.components.AppProgressDialog
39 | import com.ml.shubham0204.facenet_android.presentation.components.DelayedVisibility
40 | import com.ml.shubham0204.facenet_android.presentation.components.hideProgressDialog
41 | import com.ml.shubham0204.facenet_android.presentation.components.showProgressDialog
42 | import com.ml.shubham0204.facenet_android.presentation.theme.FaceNetAndroidTheme
43 | import org.koin.androidx.compose.koinViewModel
44 |
45 | @OptIn(ExperimentalMaterial3Api::class)
46 | @Composable
47 | fun AddFaceScreen(onNavigateBack: (() -> Unit)) {
48 | FaceNetAndroidTheme {
49 | Scaffold(
50 | modifier = Modifier.fillMaxSize(),
51 | topBar = {
52 | TopAppBar(
53 | title = {
54 | Text(text = "Add Faces", style = MaterialTheme.typography.headlineSmall)
55 | },
56 | navigationIcon = {
57 | IconButton(onClick = onNavigateBack) {
58 | Icon(
59 | imageVector = Icons.AutoMirrored.Default.ArrowBack,
60 | contentDescription = "Navigate Back"
61 | )
62 | }
63 | }
64 | )
65 | }
66 | ) { innerPadding ->
67 | Column(modifier = Modifier.padding(innerPadding)) {
68 | val viewModel: AddFaceScreenViewModel = koinViewModel()
69 | ScreenUI(viewModel)
70 | ImageReadProgressDialog(viewModel, onNavigateBack)
71 | }
72 | }
73 | }
74 | }
75 |
76 | @Composable
77 | private fun ScreenUI(viewModel: AddFaceScreenViewModel) {
78 | val pickVisualMediaLauncher =
79 | rememberLauncherForActivityResult(
80 | contract = ActivityResultContracts.PickMultipleVisualMedia()
81 | ) {
82 | viewModel.selectedImageURIs.value = it
83 | }
84 | var personName by remember { viewModel.personNameState }
85 | Column(modifier = Modifier.fillMaxWidth().padding(horizontal = 24.dp)) {
86 | TextField(
87 | modifier = Modifier.fillMaxWidth(),
88 | value = personName,
89 | onValueChange = { personName = it },
90 | label = { Text(text = "Enter the person's name") },
91 | singleLine = true
92 | )
93 | Spacer(modifier = Modifier.height(16.dp))
94 | Row(
95 | modifier = Modifier.fillMaxWidth(),
96 | horizontalArrangement = Arrangement.SpaceEvenly,
97 | ) {
98 | Button(
99 | enabled = viewModel.personNameState.value.isNotEmpty(),
100 | onClick = {
101 | pickVisualMediaLauncher.launch(
102 | PickVisualMediaRequest(ActivityResultContracts.PickVisualMedia.ImageOnly)
103 | )
104 | }
105 | ) {
106 | Icon(imageVector = Icons.Default.Photo, contentDescription = "Choose photos")
107 | Text(text = "Choose photos")
108 | }
109 | DelayedVisibility(viewModel.selectedImageURIs.value.isNotEmpty()) {
110 | Button(onClick = { viewModel.addImages() }) { Text(text = "Add to database") }
111 | }
112 | }
113 | DelayedVisibility(viewModel.selectedImageURIs.value.isNotEmpty()) {
114 | Text(
115 | text = "${viewModel.selectedImageURIs.value.size} image(s) selected",
116 | style = MaterialTheme.typography.labelSmall
117 | )
118 | }
119 | ImagesGrid(viewModel)
120 | }
121 | }
122 |
123 | @Composable
124 | private fun ImagesGrid(viewModel: AddFaceScreenViewModel) {
125 | val uris by remember { viewModel.selectedImageURIs }
126 | LazyVerticalGrid(columns = GridCells.Fixed(2)) {
127 | items(uris) { AsyncImage(model = it, contentDescription = null) }
128 | }
129 | }
130 |
131 | @Composable
132 | private fun ImageReadProgressDialog(viewModel: AddFaceScreenViewModel, onNavigateBack: () -> Unit) {
133 | val isProcessing by remember { viewModel.isProcessingImages }
134 | val numImagesProcessed by remember { viewModel.numImagesProcessed }
135 | val context = LocalContext.current
136 | AppProgressDialog()
137 | if (isProcessing) {
138 | showProgressDialog()
139 | } else {
140 | if (numImagesProcessed > 0) {
141 | onNavigateBack()
142 | Toast.makeText(context, "Added to database", Toast.LENGTH_SHORT).show()
143 | }
144 | hideProgressDialog()
145 | }
146 | }
147 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/screens/add_face/AddFaceScreenViewModel.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.screens.add_face
2 |
3 | import android.net.Uri
4 | import androidx.compose.runtime.MutableState
5 | import androidx.compose.runtime.mutableIntStateOf
6 | import androidx.compose.runtime.mutableStateOf
7 | import androidx.lifecycle.ViewModel
8 | import com.ml.shubham0204.facenet_android.domain.AppException
9 | import com.ml.shubham0204.facenet_android.domain.ImageVectorUseCase
10 | import com.ml.shubham0204.facenet_android.domain.PersonUseCase
11 | import com.ml.shubham0204.facenet_android.presentation.components.setProgressDialogText
12 | import kotlinx.coroutines.CoroutineScope
13 | import kotlinx.coroutines.Dispatchers
14 | import kotlinx.coroutines.launch
15 | import org.koin.android.annotation.KoinViewModel
16 |
17 | @KoinViewModel
18 | class AddFaceScreenViewModel(
19 | private val personUseCase: PersonUseCase,
20 | private val imageVectorUseCase: ImageVectorUseCase
21 | ) : ViewModel() {
22 |
23 | val personNameState: MutableState = mutableStateOf("")
24 | val selectedImageURIs: MutableState> = mutableStateOf(emptyList())
25 |
26 | val isProcessingImages: MutableState = mutableStateOf(false)
27 | val numImagesProcessed: MutableState = mutableIntStateOf(0)
28 |
29 | fun addImages() {
30 | isProcessingImages.value = true
31 | CoroutineScope(Dispatchers.Default).launch {
32 | val id =
33 | personUseCase.addPerson(
34 | personNameState.value,
35 | selectedImageURIs.value.size.toLong()
36 | )
37 | selectedImageURIs.value.forEach {
38 | imageVectorUseCase
39 | .addImage(id, personNameState.value, it)
40 | .onFailure {
41 | val errorMessage = (it as AppException).errorCode.message
42 | setProgressDialogText(errorMessage)
43 | }
44 | .onSuccess {
45 | numImagesProcessed.value += 1
46 | setProgressDialogText("Processed ${numImagesProcessed.value} image(s)")
47 | }
48 | }
49 | isProcessingImages.value = false
50 | }
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/screens/detect_screen/DetectScreen.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.screens.detect_screen
2 |
3 | import android.Manifest
4 | import android.content.pm.PackageManager
5 | import androidx.activity.compose.ManagedActivityResultLauncher
6 | import androidx.activity.compose.rememberLauncherForActivityResult
7 | import androidx.activity.result.contract.ActivityResultContracts
8 | import androidx.annotation.OptIn
9 | import androidx.camera.core.CameraSelector
10 |
11 | import androidx.camera.core.ExperimentalGetImage
12 | import androidx.compose.foundation.background
13 | import androidx.compose.foundation.layout.Arrangement
14 | import androidx.compose.foundation.layout.Box
15 | import androidx.compose.foundation.layout.Column
16 | import androidx.compose.foundation.layout.Spacer
17 | import androidx.compose.foundation.layout.fillMaxSize
18 | import androidx.compose.foundation.layout.fillMaxWidth
19 | import androidx.compose.foundation.layout.padding
20 | import androidx.compose.foundation.shape.RoundedCornerShape
21 | import androidx.compose.material.icons.Icons
22 | import androidx.compose.material.icons.filled.Cameraswitch
23 | import androidx.compose.material.icons.filled.Face
24 | import androidx.compose.material3.Button
25 | import androidx.compose.material3.ExperimentalMaterial3Api
26 | import androidx.compose.material3.Icon
27 | import androidx.compose.material3.IconButton
28 | import androidx.compose.material3.MaterialTheme
29 | import androidx.compose.material3.Scaffold
30 | import androidx.compose.material3.Text
31 | import androidx.compose.material3.TopAppBar
32 | import androidx.compose.material3.TopAppBarDefaults
33 | import androidx.compose.runtime.Composable
34 | import androidx.compose.runtime.getValue
35 | import androidx.compose.runtime.mutableIntStateOf
36 | import androidx.compose.runtime.mutableStateOf
37 | import androidx.compose.runtime.remember
38 | import androidx.compose.ui.Alignment
39 | import androidx.compose.ui.Modifier
40 | import androidx.compose.ui.graphics.Color
41 | import androidx.compose.ui.platform.LocalContext
42 | import androidx.compose.ui.platform.LocalLifecycleOwner
43 | import androidx.compose.ui.res.stringResource
44 | import androidx.compose.ui.text.style.TextAlign
45 | import androidx.compose.ui.unit.dp
46 | import androidx.compose.ui.viewinterop.AndroidView
47 | import androidx.core.app.ActivityCompat
48 | import com.ml.shubham0204.facenet_android.R
49 | import com.ml.shubham0204.facenet_android.presentation.components.AppAlertDialog
50 | import com.ml.shubham0204.facenet_android.presentation.components.DelayedVisibility
51 | import com.ml.shubham0204.facenet_android.presentation.components.FaceDetectionOverlay
52 | import com.ml.shubham0204.facenet_android.presentation.components.createAlertDialog
53 | import com.ml.shubham0204.facenet_android.presentation.theme.FaceNetAndroidTheme
54 | import org.koin.androidx.compose.koinViewModel
55 |
56 | private val cameraPermissionStatus = mutableStateOf(false)
57 | private val cameraFacing = mutableIntStateOf(CameraSelector.LENS_FACING_BACK)
58 | private lateinit var cameraPermissionLauncher: ManagedActivityResultLauncher
59 |
60 | @kotlin.OptIn(ExperimentalMaterial3Api::class)
61 | @Composable
62 | fun DetectScreen(onOpenFaceListClick: (() -> Unit)) {
63 | FaceNetAndroidTheme {
64 | Scaffold(
65 | modifier = Modifier.fillMaxSize(),
66 | topBar = {
67 | TopAppBar(
68 | colors = TopAppBarDefaults.topAppBarColors(),
69 | title = {
70 | Text(
71 | text = stringResource(id = R.string.app_name),
72 | style = MaterialTheme.typography.headlineSmall
73 | )
74 | },
75 | actions = {
76 | IconButton(onClick = onOpenFaceListClick) {
77 | Icon(
78 | imageVector = Icons.Default.Face,
79 | contentDescription = "Open Face List"
80 | )
81 | }
82 | IconButton(
83 | onClick = {
84 | if (cameraFacing.intValue == CameraSelector.LENS_FACING_BACK) {
85 | cameraFacing.intValue = CameraSelector.LENS_FACING_FRONT
86 | } else {
87 | cameraFacing.intValue = CameraSelector.LENS_FACING_BACK
88 | }
89 | }
90 | ) {
91 | Icon(
92 | imageVector = Icons.Default.Cameraswitch,
93 | contentDescription = "Switch Camera"
94 | )
95 | }
96 | }
97 | )
98 | }
99 | ) { innerPadding ->
100 | Column(modifier = Modifier.padding(innerPadding)) { ScreenUI() }
101 | }
102 | }
103 | }
104 |
105 | @Composable
106 | private fun ScreenUI() {
107 | val viewModel: DetectScreenViewModel = koinViewModel()
108 | Box {
109 | Camera(viewModel)
110 | DelayedVisibility(viewModel.getNumPeople() > 0) {
111 | val metrics by remember{ viewModel.faceDetectionMetricsState }
112 | Column {
113 | Text(
114 | text = "Recognition on ${viewModel.getNumPeople()} face(s)",
115 | color = Color.White,
116 | modifier = Modifier.fillMaxWidth(),
117 | textAlign = TextAlign.Center
118 | )
119 | Spacer(modifier = Modifier.weight(1f))
120 | metrics?.let {
121 | Text(
122 | text = "face detection: ${it.timeFaceDetection} ms" +
123 | "\nface embedding: ${it.timeFaceEmbedding} ms" +
124 | "\nvector search: ${it.timeVectorSearch} ms\n" +
125 | "spoof detection: ${it.timeFaceSpoofDetection} ms",
126 | color = Color.White,
127 | modifier = Modifier
128 | .fillMaxWidth()
129 | .padding(bottom = 24.dp),
130 | textAlign = TextAlign.Center
131 | )
132 | }
133 | }
134 | }
135 | DelayedVisibility(viewModel.getNumPeople() == 0L) {
136 | Text(
137 | text = "No images in database",
138 | color = Color.White,
139 | modifier =
140 | Modifier
141 | .fillMaxWidth()
142 | .padding(horizontal = 16.dp, vertical = 8.dp)
143 | .background(Color.Blue, RoundedCornerShape(16.dp))
144 | .padding(8.dp),
145 | textAlign = TextAlign.Center
146 | )
147 | }
148 | AppAlertDialog()
149 | }
150 | }
151 |
152 | @OptIn(ExperimentalGetImage::class)
153 | @Composable
154 | private fun Camera(viewModel: DetectScreenViewModel) {
155 | val context = LocalContext.current
156 | cameraPermissionStatus.value =
157 | ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA) ==
158 | PackageManager.PERMISSION_GRANTED
159 | val cameraFacing by remember { cameraFacing }
160 | val lifecycleOwner = LocalLifecycleOwner.current
161 |
162 | cameraPermissionLauncher =
163 | rememberLauncherForActivityResult(ActivityResultContracts.RequestPermission()) {
164 | if (it) {
165 | cameraPermissionStatus.value = true
166 | } else {
167 | camaraPermissionDialog()
168 | }
169 | }
170 |
171 | DelayedVisibility(cameraPermissionStatus.value) {
172 | AndroidView(
173 | modifier = Modifier.fillMaxSize(),
174 | factory = { FaceDetectionOverlay(lifecycleOwner, context, viewModel) },
175 | update = { it.initializeCamera(cameraFacing) }
176 | )
177 | }
178 | DelayedVisibility(!cameraPermissionStatus.value) {
179 | Column(
180 | modifier = Modifier.fillMaxSize(),
181 | verticalArrangement = Arrangement.Center,
182 | horizontalAlignment = Alignment.CenterHorizontally
183 | ) {
184 | Text(
185 | "Allow Camera Permissions\nThe app cannot work without the camera permission.",
186 | textAlign = TextAlign.Center
187 | )
188 | Button(
189 | onClick = { cameraPermissionLauncher.launch(Manifest.permission.CAMERA) },
190 | modifier = Modifier.align(Alignment.CenterHorizontally)
191 | ) {
192 | Text(text = "Allow")
193 | }
194 | }
195 | }
196 | }
197 |
198 | private fun camaraPermissionDialog() {
199 | createAlertDialog(
200 | "Camera Permission",
201 | "The app couldn't function without the camera permission.",
202 | "ALLOW",
203 | "CLOSE",
204 | onPositiveButtonClick = { cameraPermissionLauncher.launch(Manifest.permission.CAMERA) },
205 | onNegativeButtonClick = {
206 | // TODO: Handle deny camera permission action
207 | // close the app
208 | }
209 | )
210 | }
211 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/screens/detect_screen/DetectScreenViewModel.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.screens.detect_screen
2 |
3 | import androidx.compose.runtime.mutableStateOf
4 | import androidx.lifecycle.ViewModel
5 | import com.ml.shubham0204.facenet_android.data.RecognitionMetrics
6 | import com.ml.shubham0204.facenet_android.domain.ImageVectorUseCase
7 | import com.ml.shubham0204.facenet_android.domain.PersonUseCase
8 | import org.koin.android.annotation.KoinViewModel
9 |
10 | @KoinViewModel
11 | class DetectScreenViewModel(
12 | val personUseCase: PersonUseCase,
13 | val imageVectorUseCase: ImageVectorUseCase
14 | ) : ViewModel() {
15 |
16 | val faceDetectionMetricsState = mutableStateOf(null)
17 |
18 | fun getNumPeople(): Long = personUseCase.getCount()
19 | }
20 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/screens/face_list/FaceListScreen.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.screens.face_list
2 |
3 | import android.text.format.DateUtils
4 | import androidx.compose.foundation.background
5 | import androidx.compose.foundation.clickable
6 | import androidx.compose.foundation.layout.Column
7 | import androidx.compose.foundation.layout.Row
8 | import androidx.compose.foundation.layout.Spacer
9 | import androidx.compose.foundation.layout.fillMaxSize
10 | import androidx.compose.foundation.layout.fillMaxWidth
11 | import androidx.compose.foundation.layout.height
12 | import androidx.compose.foundation.layout.padding
13 | import androidx.compose.foundation.layout.width
14 | import androidx.compose.foundation.lazy.LazyColumn
15 | import androidx.compose.foundation.lazy.items
16 | import androidx.compose.material.icons.Icons
17 | import androidx.compose.material.icons.automirrored.filled.ArrowBack
18 | import androidx.compose.material.icons.filled.Add
19 | import androidx.compose.material.icons.filled.Clear
20 | import androidx.compose.material3.ExperimentalMaterial3Api
21 | import androidx.compose.material3.FloatingActionButton
22 | import androidx.compose.material3.Icon
23 | import androidx.compose.material3.IconButton
24 | import androidx.compose.material3.MaterialTheme
25 | import androidx.compose.material3.Scaffold
26 | import androidx.compose.material3.Text
27 | import androidx.compose.material3.TopAppBar
28 | import androidx.compose.runtime.Composable
29 | import androidx.compose.runtime.collectAsState
30 | import androidx.compose.runtime.getValue
31 | import androidx.compose.ui.Alignment
32 | import androidx.compose.ui.Modifier
33 | import androidx.compose.ui.graphics.Color
34 | import androidx.compose.ui.text.font.FontWeight
35 | import androidx.compose.ui.unit.dp
36 | import com.ml.shubham0204.facenet_android.data.PersonRecord
37 | import com.ml.shubham0204.facenet_android.presentation.components.AppAlertDialog
38 | import com.ml.shubham0204.facenet_android.presentation.components.createAlertDialog
39 | import com.ml.shubham0204.facenet_android.presentation.theme.FaceNetAndroidTheme
40 | import org.koin.androidx.compose.koinViewModel
41 |
42 | @OptIn(ExperimentalMaterial3Api::class)
43 | @Composable
44 | fun FaceListScreen(onNavigateBack: (() -> Unit), onAddFaceClick: (() -> Unit)) {
45 | FaceNetAndroidTheme {
46 | Scaffold(
47 | modifier = Modifier.fillMaxSize(),
48 | topBar = {
49 | TopAppBar(
50 | title = {
51 | Text(text = "Face List", style = MaterialTheme.typography.headlineSmall)
52 | },
53 | navigationIcon = {
54 | IconButton(onClick = onNavigateBack) {
55 | Icon(
56 | imageVector = Icons.AutoMirrored.Default.ArrowBack,
57 | contentDescription = "Navigate Back"
58 | )
59 | }
60 | },
61 | )
62 | },
63 | floatingActionButton = {
64 | FloatingActionButton(onClick = onAddFaceClick) {
65 | Icon(imageVector = Icons.Default.Add, contentDescription = "Add a new face")
66 | }
67 | }
68 | ) { innerPadding ->
69 | val viewModel: FaceListScreenViewModel = koinViewModel()
70 | Column(modifier = Modifier.padding(innerPadding)) {
71 | ScreenUI(viewModel)
72 | AppAlertDialog()
73 | }
74 | }
75 | }
76 | }
77 |
78 | @Composable
79 | private fun ScreenUI(viewModel: FaceListScreenViewModel) {
80 | val faces by viewModel.personFlow.collectAsState(emptyList())
81 | LazyColumn { items(faces) { FaceListItem(it) { viewModel.removeFace(it.personID) } } }
82 | }
83 |
84 | @Composable
85 | private fun FaceListItem(personRecord: PersonRecord, onRemoveFaceClick: (() -> Unit)) {
86 | Row(
87 | modifier = Modifier.fillMaxWidth().background(Color.White).padding(12.dp),
88 | verticalAlignment = Alignment.CenterVertically
89 | ) {
90 | Column(modifier = Modifier.fillMaxWidth().weight(1f)) {
91 | Text(
92 | text = personRecord.personName,
93 | style = MaterialTheme.typography.bodyLarge,
94 | fontWeight = FontWeight.Bold
95 | )
96 | Spacer(modifier = Modifier.height(4.dp))
97 | Text(
98 | text = DateUtils.getRelativeTimeSpanString(personRecord.addTime).toString(),
99 | style = MaterialTheme.typography.labelSmall,
100 | color = Color.DarkGray
101 | )
102 | }
103 | Icon(
104 | modifier =
105 | Modifier.clickable {
106 | createAlertDialog(
107 | dialogTitle = "Remove person",
108 | dialogText =
109 | "Are you sure to remove this person from the database. The face for this person will not " +
110 | "be detected in realtime",
111 | dialogPositiveButtonText = "Remove",
112 | onPositiveButtonClick = onRemoveFaceClick,
113 | dialogNegativeButtonText = "Cancel",
114 | onNegativeButtonClick = {}
115 | )
116 | },
117 | imageVector = Icons.Default.Clear,
118 | contentDescription = "Remove face"
119 | )
120 | Spacer(modifier = Modifier.width(2.dp))
121 | }
122 | }
123 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/screens/face_list/FaceListScreenViewModel.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.screens.face_list
2 |
3 | import androidx.lifecycle.ViewModel
4 | import com.ml.shubham0204.facenet_android.domain.ImageVectorUseCase
5 | import com.ml.shubham0204.facenet_android.domain.PersonUseCase
6 | import org.koin.android.annotation.KoinViewModel
7 |
8 | @KoinViewModel
9 | class FaceListScreenViewModel(
10 | val imageVectorUseCase: ImageVectorUseCase,
11 | val personUseCase: PersonUseCase
12 | ) : ViewModel() {
13 |
14 | val personFlow = personUseCase.getAll()
15 |
16 | // Remove the person from `PersonRecord`
17 | // and all associated face embeddings from `FaceImageRecord`
18 | fun removeFace(id: Long) {
19 | personUseCase.removePerson(id)
20 | imageVectorUseCase.removeImages(id)
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/theme/Color.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.theme
2 |
3 | import androidx.compose.ui.graphics.Color
4 |
5 | val primaryLight = Color(0xFF004DA6)
6 | val onPrimaryLight = Color(0xFFFFFFFF)
7 | val primaryContainerLight = Color(0xFF2771DF)
8 | val onPrimaryContainerLight = Color(0xFFFFFFFF)
9 | val secondaryLight = Color(0xFF4B5E88)
10 | val onSecondaryLight = Color(0xFFFFFFFF)
11 | val secondaryContainerLight = Color(0xFFC4D4FF)
12 | val onSecondaryContainerLight = Color(0xFF2C3F67)
13 | val tertiaryLight = Color(0xFF430093)
14 | val onTertiaryLight = Color(0xFFFFFFFF)
15 | val tertiaryContainerLight = Color(0xFF6B25D2)
16 | val onTertiaryContainerLight = Color(0xFFFEF7FF)
17 | val errorLight = Color(0xFFBA1A1A)
18 | val onErrorLight = Color(0xFFFFFFFF)
19 | val errorContainerLight = Color(0xFFFFDAD6)
20 | val onErrorContainerLight = Color(0xFF410002)
21 | val backgroundLight = Color(0xFFF9F9FF)
22 | val onBackgroundLight = Color(0xFF191B22)
23 | val surfaceLight = Color(0xFFFCF8F8)
24 | val onSurfaceLight = Color(0xFF1C1B1B)
25 | val surfaceVariantLight = Color(0xFFE0E3E3)
26 | val onSurfaceVariantLight = Color(0xFF444748)
27 | val outlineLight = Color(0xFF747878)
28 | val outlineVariantLight = Color(0xFFC4C7C7)
29 | val scrimLight = Color(0xFF000000)
30 | val inverseSurfaceLight = Color(0xFF313030)
31 | val inverseOnSurfaceLight = Color(0xFFF4F0EF)
32 | val inversePrimaryLight = Color(0xFFADC6FF)
33 | val surfaceDimLight = Color(0xFFDDD9D9)
34 | val surfaceBrightLight = Color(0xFFFCF8F8)
35 | val surfaceContainerLowestLight = Color(0xFFFFFFFF)
36 | val surfaceContainerLowLight = Color(0xFFF6F3F2)
37 | val surfaceContainerLight = Color(0xFFF1EDEC)
38 | val surfaceContainerHighLight = Color(0xFFEBE7E7)
39 | val surfaceContainerHighestLight = Color(0xFFE5E2E1)
40 |
41 | val primaryLightMediumContrast = Color(0xFF00408C)
42 | val onPrimaryLightMediumContrast = Color(0xFFFFFFFF)
43 | val primaryContainerLightMediumContrast = Color(0xFF2771DF)
44 | val onPrimaryContainerLightMediumContrast = Color(0xFFFFFFFF)
45 | val secondaryLightMediumContrast = Color(0xFF2F426B)
46 | val onSecondaryLightMediumContrast = Color(0xFFFFFFFF)
47 | val secondaryContainerLightMediumContrast = Color(0xFF6174A0)
48 | val onSecondaryContainerLightMediumContrast = Color(0xFFFFFFFF)
49 | val tertiaryLightMediumContrast = Color(0xFF430093)
50 | val onTertiaryLightMediumContrast = Color(0xFFFFFFFF)
51 | val tertiaryContainerLightMediumContrast = Color(0xFF6B25D2)
52 | val onTertiaryContainerLightMediumContrast = Color(0xFFFFFFFF)
53 | val errorLightMediumContrast = Color(0xFF8C0009)
54 | val onErrorLightMediumContrast = Color(0xFFFFFFFF)
55 | val errorContainerLightMediumContrast = Color(0xFFDA342E)
56 | val onErrorContainerLightMediumContrast = Color(0xFFFFFFFF)
57 | val backgroundLightMediumContrast = Color(0xFFF9F9FF)
58 | val onBackgroundLightMediumContrast = Color(0xFF191B22)
59 | val surfaceLightMediumContrast = Color(0xFFFCF8F8)
60 | val onSurfaceLightMediumContrast = Color(0xFF1C1B1B)
61 | val surfaceVariantLightMediumContrast = Color(0xFFE0E3E3)
62 | val onSurfaceVariantLightMediumContrast = Color(0xFF404344)
63 | val outlineLightMediumContrast = Color(0xFF5C6060)
64 | val outlineVariantLightMediumContrast = Color(0xFF787B7C)
65 | val scrimLightMediumContrast = Color(0xFF000000)
66 | val inverseSurfaceLightMediumContrast = Color(0xFF313030)
67 | val inverseOnSurfaceLightMediumContrast = Color(0xFFF4F0EF)
68 | val inversePrimaryLightMediumContrast = Color(0xFFADC6FF)
69 | val surfaceDimLightMediumContrast = Color(0xFFDDD9D9)
70 | val surfaceBrightLightMediumContrast = Color(0xFFFCF8F8)
71 | val surfaceContainerLowestLightMediumContrast = Color(0xFFFFFFFF)
72 | val surfaceContainerLowLightMediumContrast = Color(0xFFF6F3F2)
73 | val surfaceContainerLightMediumContrast = Color(0xFFF1EDEC)
74 | val surfaceContainerHighLightMediumContrast = Color(0xFFEBE7E7)
75 | val surfaceContainerHighestLightMediumContrast = Color(0xFFE5E2E1)
76 |
77 | val primaryLightHighContrast = Color(0xFF00204E)
78 | val onPrimaryLightHighContrast = Color(0xFFFFFFFF)
79 | val primaryContainerLightHighContrast = Color(0xFF00408C)
80 | val onPrimaryContainerLightHighContrast = Color(0xFFFFFFFF)
81 | val secondaryLightHighContrast = Color(0xFF0A2148)
82 | val onSecondaryLightHighContrast = Color(0xFFFFFFFF)
83 | val secondaryContainerLightHighContrast = Color(0xFF2F426B)
84 | val onSecondaryContainerLightHighContrast = Color(0xFFFFFFFF)
85 | val tertiaryLightHighContrast = Color(0xFF2E0069)
86 | val onTertiaryLightHighContrast = Color(0xFFFFFFFF)
87 | val tertiaryContainerLightHighContrast = Color(0xFF5700BA)
88 | val onTertiaryContainerLightHighContrast = Color(0xFFFFFFFF)
89 | val errorLightHighContrast = Color(0xFF4E0002)
90 | val onErrorLightHighContrast = Color(0xFFFFFFFF)
91 | val errorContainerLightHighContrast = Color(0xFF8C0009)
92 | val onErrorContainerLightHighContrast = Color(0xFFFFFFFF)
93 | val backgroundLightHighContrast = Color(0xFFF9F9FF)
94 | val onBackgroundLightHighContrast = Color(0xFF191B22)
95 | val surfaceLightHighContrast = Color(0xFFFCF8F8)
96 | val onSurfaceLightHighContrast = Color(0xFF000000)
97 | val surfaceVariantLightHighContrast = Color(0xFFE0E3E3)
98 | val onSurfaceVariantLightHighContrast = Color(0xFF212525)
99 | val outlineLightHighContrast = Color(0xFF404344)
100 | val outlineVariantLightHighContrast = Color(0xFF404344)
101 | val scrimLightHighContrast = Color(0xFF000000)
102 | val inverseSurfaceLightHighContrast = Color(0xFF313030)
103 | val inverseOnSurfaceLightHighContrast = Color(0xFFFFFFFF)
104 | val inversePrimaryLightHighContrast = Color(0xFFE6ECFF)
105 | val surfaceDimLightHighContrast = Color(0xFFDDD9D9)
106 | val surfaceBrightLightHighContrast = Color(0xFFFCF8F8)
107 | val surfaceContainerLowestLightHighContrast = Color(0xFFFFFFFF)
108 | val surfaceContainerLowLightHighContrast = Color(0xFFF6F3F2)
109 | val surfaceContainerLightHighContrast = Color(0xFFF1EDEC)
110 | val surfaceContainerHighLightHighContrast = Color(0xFFEBE7E7)
111 | val surfaceContainerHighestLightHighContrast = Color(0xFFE5E2E1)
112 |
113 | val primaryDark = Color(0xFFADC6FF)
114 | val onPrimaryDark = Color(0xFF002E69)
115 | val primaryContainerDark = Color(0xFF2771DF)
116 | val onPrimaryContainerDark = Color(0xFFFFFFFF)
117 | val secondaryDark = Color(0xFFF3F4FF)
118 | val onSecondaryDark = Color(0xFF1B2F57)
119 | val secondaryContainerDark = Color(0xFFB7CAFB)
120 | val onSecondaryContainerDark = Color(0xFF243860)
121 | val tertiaryDark = Color(0xFFD4BBFF)
122 | val onTertiaryDark = Color(0xFF40008C)
123 | val tertiaryContainerDark = Color(0xFF5200AE)
124 | val onTertiaryContainerDark = Color(0xFFDEC9FF)
125 | val errorDark = Color(0xFFFFB4AB)
126 | val onErrorDark = Color(0xFF690005)
127 | val errorContainerDark = Color(0xFF93000A)
128 | val onErrorContainerDark = Color(0xFFFFDAD6)
129 | val backgroundDark = Color(0xFF11131A)
130 | val onBackgroundDark = Color(0xFFE1E2EB)
131 | val surfaceDark = Color(0xFF141313)
132 | val onSurfaceDark = Color(0xFFE5E2E1)
133 | val surfaceVariantDark = Color(0xFF444748)
134 | val onSurfaceVariantDark = Color(0xFFC4C7C7)
135 | val outlineDark = Color(0xFF8E9192)
136 | val outlineVariantDark = Color(0xFF444748)
137 | val scrimDark = Color(0xFF000000)
138 | val inverseSurfaceDark = Color(0xFFE5E2E1)
139 | val inverseOnSurfaceDark = Color(0xFF313030)
140 | val inversePrimaryDark = Color(0xFF005AC1)
141 | val surfaceDimDark = Color(0xFF141313)
142 | val surfaceBrightDark = Color(0xFF3A3939)
143 | val surfaceContainerLowestDark = Color(0xFF0E0E0E)
144 | val surfaceContainerLowDark = Color(0xFF1C1B1B)
145 | val surfaceContainerDark = Color(0xFF201F1F)
146 | val surfaceContainerHighDark = Color(0xFF2A2A2A)
147 | val surfaceContainerHighestDark = Color(0xFF353434)
148 |
149 | val primaryDarkMediumContrast = Color(0xFFB4CBFF)
150 | val onPrimaryDarkMediumContrast = Color(0xFF001537)
151 | val primaryContainerDarkMediumContrast = Color(0xFF4D8EFE)
152 | val onPrimaryContainerDarkMediumContrast = Color(0xFF000000)
153 | val secondaryDarkMediumContrast = Color(0xFFF3F4FF)
154 | val onSecondaryDarkMediumContrast = Color(0xFF1B2F57)
155 | val secondaryContainerDarkMediumContrast = Color(0xFFB7CAFB)
156 | val onSecondaryContainerDarkMediumContrast = Color(0xFF001438)
157 | val tertiaryDarkMediumContrast = Color(0xFFD7C0FF)
158 | val onTertiaryDarkMediumContrast = Color(0xFF1F004B)
159 | val tertiaryContainerDarkMediumContrast = Color(0xFFA675FF)
160 | val onTertiaryContainerDarkMediumContrast = Color(0xFF000000)
161 | val errorDarkMediumContrast = Color(0xFFFFBAB1)
162 | val onErrorDarkMediumContrast = Color(0xFF370001)
163 | val errorContainerDarkMediumContrast = Color(0xFFFF5449)
164 | val onErrorContainerDarkMediumContrast = Color(0xFF000000)
165 | val backgroundDarkMediumContrast = Color(0xFF11131A)
166 | val onBackgroundDarkMediumContrast = Color(0xFFE1E2EB)
167 | val surfaceDarkMediumContrast = Color(0xFF141313)
168 | val onSurfaceDarkMediumContrast = Color(0xFFFEFAF9)
169 | val surfaceVariantDarkMediumContrast = Color(0xFF444748)
170 | val onSurfaceVariantDarkMediumContrast = Color(0xFFC8CBCC)
171 | val outlineDarkMediumContrast = Color(0xFFA0A3A4)
172 | val outlineVariantDarkMediumContrast = Color(0xFF808484)
173 | val scrimDarkMediumContrast = Color(0xFF000000)
174 | val inverseSurfaceDarkMediumContrast = Color(0xFFE5E2E1)
175 | val inverseOnSurfaceDarkMediumContrast = Color(0xFF2A2A2A)
176 | val inversePrimaryDarkMediumContrast = Color(0xFF004596)
177 | val surfaceDimDarkMediumContrast = Color(0xFF141313)
178 | val surfaceBrightDarkMediumContrast = Color(0xFF3A3939)
179 | val surfaceContainerLowestDarkMediumContrast = Color(0xFF0E0E0E)
180 | val surfaceContainerLowDarkMediumContrast = Color(0xFF1C1B1B)
181 | val surfaceContainerDarkMediumContrast = Color(0xFF201F1F)
182 | val surfaceContainerHighDarkMediumContrast = Color(0xFF2A2A2A)
183 | val surfaceContainerHighestDarkMediumContrast = Color(0xFF353434)
184 |
185 | val primaryDarkHighContrast = Color(0xFFFBFAFF)
186 | val onPrimaryDarkHighContrast = Color(0xFF000000)
187 | val primaryContainerDarkHighContrast = Color(0xFFB4CBFF)
188 | val onPrimaryContainerDarkHighContrast = Color(0xFF000000)
189 | val secondaryDarkHighContrast = Color(0xFFFCFAFF)
190 | val onSecondaryDarkHighContrast = Color(0xFF000000)
191 | val secondaryContainerDarkHighContrast = Color(0xFFB7CAFB)
192 | val onSecondaryContainerDarkHighContrast = Color(0xFF000000)
193 | val tertiaryDarkHighContrast = Color(0xFFFFF9FE)
194 | val onTertiaryDarkHighContrast = Color(0xFF000000)
195 | val tertiaryContainerDarkHighContrast = Color(0xFFD7C0FF)
196 | val onTertiaryContainerDarkHighContrast = Color(0xFF000000)
197 | val errorDarkHighContrast = Color(0xFFFFF9F9)
198 | val onErrorDarkHighContrast = Color(0xFF000000)
199 | val errorContainerDarkHighContrast = Color(0xFFFFBAB1)
200 | val onErrorContainerDarkHighContrast = Color(0xFF000000)
201 | val backgroundDarkHighContrast = Color(0xFF11131A)
202 | val onBackgroundDarkHighContrast = Color(0xFFE1E2EB)
203 | val surfaceDarkHighContrast = Color(0xFF141313)
204 | val onSurfaceDarkHighContrast = Color(0xFFFFFFFF)
205 | val surfaceVariantDarkHighContrast = Color(0xFF444748)
206 | val onSurfaceVariantDarkHighContrast = Color(0xFFF8FBFC)
207 | val outlineDarkHighContrast = Color(0xFFC8CBCC)
208 | val outlineVariantDarkHighContrast = Color(0xFFC8CBCC)
209 | val scrimDarkHighContrast = Color(0xFF000000)
210 | val inverseSurfaceDarkHighContrast = Color(0xFFE5E2E1)
211 | val inverseOnSurfaceDarkHighContrast = Color(0xFF000000)
212 | val inversePrimaryDarkHighContrast = Color(0xFF00285D)
213 | val surfaceDimDarkHighContrast = Color(0xFF141313)
214 | val surfaceBrightDarkHighContrast = Color(0xFF3A3939)
215 | val surfaceContainerLowestDarkHighContrast = Color(0xFF0E0E0E)
216 | val surfaceContainerLowDarkHighContrast = Color(0xFF1C1B1B)
217 | val surfaceContainerDarkHighContrast = Color(0xFF201F1F)
218 | val surfaceContainerHighDarkHighContrast = Color(0xFF2A2A2A)
219 | val surfaceContainerHighestDarkHighContrast = Color(0xFF353434)
220 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/theme/Theme.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.theme
2 |
3 | import android.app.Activity
4 | import android.os.Build
5 | import androidx.compose.foundation.isSystemInDarkTheme
6 | import androidx.compose.material3.MaterialTheme
7 | import androidx.compose.material3.darkColorScheme
8 | import androidx.compose.material3.dynamicDarkColorScheme
9 | import androidx.compose.material3.dynamicLightColorScheme
10 | import androidx.compose.material3.lightColorScheme
11 | import androidx.compose.runtime.Composable
12 | import androidx.compose.runtime.Immutable
13 | import androidx.compose.runtime.SideEffect
14 | import androidx.compose.ui.graphics.Color
15 | import androidx.compose.ui.graphics.toArgb
16 | import androidx.compose.ui.platform.LocalContext
17 | import androidx.compose.ui.platform.LocalView
18 | import androidx.core.view.WindowCompat
19 |
20 | private val lightScheme =
21 | lightColorScheme(
22 | primary = primaryLight,
23 | onPrimary = onPrimaryLight,
24 | primaryContainer = primaryContainerLight,
25 | onPrimaryContainer = onPrimaryContainerLight,
26 | secondary = secondaryLight,
27 | onSecondary = onSecondaryLight,
28 | secondaryContainer = secondaryContainerLight,
29 | onSecondaryContainer = onSecondaryContainerLight,
30 | tertiary = tertiaryLight,
31 | onTertiary = onTertiaryLight,
32 | tertiaryContainer = tertiaryContainerLight,
33 | onTertiaryContainer = onTertiaryContainerLight,
34 | error = errorLight,
35 | onError = onErrorLight,
36 | errorContainer = errorContainerLight,
37 | onErrorContainer = onErrorContainerLight,
38 | background = backgroundLight,
39 | onBackground = onBackgroundLight,
40 | surface = surfaceLight,
41 | onSurface = onSurfaceLight,
42 | surfaceVariant = surfaceVariantLight,
43 | onSurfaceVariant = onSurfaceVariantLight,
44 | outline = outlineLight,
45 | outlineVariant = outlineVariantLight,
46 | scrim = scrimLight,
47 | inverseSurface = inverseSurfaceLight,
48 | inverseOnSurface = inverseOnSurfaceLight,
49 | inversePrimary = inversePrimaryLight,
50 | surfaceDim = surfaceDimLight,
51 | surfaceBright = surfaceBrightLight,
52 | surfaceContainerLowest = surfaceContainerLowestLight,
53 | surfaceContainerLow = surfaceContainerLowLight,
54 | surfaceContainer = surfaceContainerLight,
55 | surfaceContainerHigh = surfaceContainerHighLight,
56 | surfaceContainerHighest = surfaceContainerHighestLight,
57 | )
58 |
59 | private val darkScheme =
60 | darkColorScheme(
61 | primary = primaryDark,
62 | onPrimary = onPrimaryDark,
63 | primaryContainer = primaryContainerDark,
64 | onPrimaryContainer = onPrimaryContainerDark,
65 | secondary = secondaryDark,
66 | onSecondary = onSecondaryDark,
67 | secondaryContainer = secondaryContainerDark,
68 | onSecondaryContainer = onSecondaryContainerDark,
69 | tertiary = tertiaryDark,
70 | onTertiary = onTertiaryDark,
71 | tertiaryContainer = tertiaryContainerDark,
72 | onTertiaryContainer = onTertiaryContainerDark,
73 | error = errorDark,
74 | onError = onErrorDark,
75 | errorContainer = errorContainerDark,
76 | onErrorContainer = onErrorContainerDark,
77 | background = backgroundDark,
78 | onBackground = onBackgroundDark,
79 | surface = surfaceDark,
80 | onSurface = onSurfaceDark,
81 | surfaceVariant = surfaceVariantDark,
82 | onSurfaceVariant = onSurfaceVariantDark,
83 | outline = outlineDark,
84 | outlineVariant = outlineVariantDark,
85 | scrim = scrimDark,
86 | inverseSurface = inverseSurfaceDark,
87 | inverseOnSurface = inverseOnSurfaceDark,
88 | inversePrimary = inversePrimaryDark,
89 | surfaceDim = surfaceDimDark,
90 | surfaceBright = surfaceBrightDark,
91 | surfaceContainerLowest = surfaceContainerLowestDark,
92 | surfaceContainerLow = surfaceContainerLowDark,
93 | surfaceContainer = surfaceContainerDark,
94 | surfaceContainerHigh = surfaceContainerHighDark,
95 | surfaceContainerHighest = surfaceContainerHighestDark,
96 | )
97 |
98 | private val mediumContrastLightColorScheme =
99 | lightColorScheme(
100 | primary = primaryLightMediumContrast,
101 | onPrimary = onPrimaryLightMediumContrast,
102 | primaryContainer = primaryContainerLightMediumContrast,
103 | onPrimaryContainer = onPrimaryContainerLightMediumContrast,
104 | secondary = secondaryLightMediumContrast,
105 | onSecondary = onSecondaryLightMediumContrast,
106 | secondaryContainer = secondaryContainerLightMediumContrast,
107 | onSecondaryContainer = onSecondaryContainerLightMediumContrast,
108 | tertiary = tertiaryLightMediumContrast,
109 | onTertiary = onTertiaryLightMediumContrast,
110 | tertiaryContainer = tertiaryContainerLightMediumContrast,
111 | onTertiaryContainer = onTertiaryContainerLightMediumContrast,
112 | error = errorLightMediumContrast,
113 | onError = onErrorLightMediumContrast,
114 | errorContainer = errorContainerLightMediumContrast,
115 | onErrorContainer = onErrorContainerLightMediumContrast,
116 | background = backgroundLightMediumContrast,
117 | onBackground = onBackgroundLightMediumContrast,
118 | surface = surfaceLightMediumContrast,
119 | onSurface = onSurfaceLightMediumContrast,
120 | surfaceVariant = surfaceVariantLightMediumContrast,
121 | onSurfaceVariant = onSurfaceVariantLightMediumContrast,
122 | outline = outlineLightMediumContrast,
123 | outlineVariant = outlineVariantLightMediumContrast,
124 | scrim = scrimLightMediumContrast,
125 | inverseSurface = inverseSurfaceLightMediumContrast,
126 | inverseOnSurface = inverseOnSurfaceLightMediumContrast,
127 | inversePrimary = inversePrimaryLightMediumContrast,
128 | surfaceDim = surfaceDimLightMediumContrast,
129 | surfaceBright = surfaceBrightLightMediumContrast,
130 | surfaceContainerLowest = surfaceContainerLowestLightMediumContrast,
131 | surfaceContainerLow = surfaceContainerLowLightMediumContrast,
132 | surfaceContainer = surfaceContainerLightMediumContrast,
133 | surfaceContainerHigh = surfaceContainerHighLightMediumContrast,
134 | surfaceContainerHighest = surfaceContainerHighestLightMediumContrast,
135 | )
136 |
137 | private val highContrastLightColorScheme =
138 | lightColorScheme(
139 | primary = primaryLightHighContrast,
140 | onPrimary = onPrimaryLightHighContrast,
141 | primaryContainer = primaryContainerLightHighContrast,
142 | onPrimaryContainer = onPrimaryContainerLightHighContrast,
143 | secondary = secondaryLightHighContrast,
144 | onSecondary = onSecondaryLightHighContrast,
145 | secondaryContainer = secondaryContainerLightHighContrast,
146 | onSecondaryContainer = onSecondaryContainerLightHighContrast,
147 | tertiary = tertiaryLightHighContrast,
148 | onTertiary = onTertiaryLightHighContrast,
149 | tertiaryContainer = tertiaryContainerLightHighContrast,
150 | onTertiaryContainer = onTertiaryContainerLightHighContrast,
151 | error = errorLightHighContrast,
152 | onError = onErrorLightHighContrast,
153 | errorContainer = errorContainerLightHighContrast,
154 | onErrorContainer = onErrorContainerLightHighContrast,
155 | background = backgroundLightHighContrast,
156 | onBackground = onBackgroundLightHighContrast,
157 | surface = surfaceLightHighContrast,
158 | onSurface = onSurfaceLightHighContrast,
159 | surfaceVariant = surfaceVariantLightHighContrast,
160 | onSurfaceVariant = onSurfaceVariantLightHighContrast,
161 | outline = outlineLightHighContrast,
162 | outlineVariant = outlineVariantLightHighContrast,
163 | scrim = scrimLightHighContrast,
164 | inverseSurface = inverseSurfaceLightHighContrast,
165 | inverseOnSurface = inverseOnSurfaceLightHighContrast,
166 | inversePrimary = inversePrimaryLightHighContrast,
167 | surfaceDim = surfaceDimLightHighContrast,
168 | surfaceBright = surfaceBrightLightHighContrast,
169 | surfaceContainerLowest = surfaceContainerLowestLightHighContrast,
170 | surfaceContainerLow = surfaceContainerLowLightHighContrast,
171 | surfaceContainer = surfaceContainerLightHighContrast,
172 | surfaceContainerHigh = surfaceContainerHighLightHighContrast,
173 | surfaceContainerHighest = surfaceContainerHighestLightHighContrast,
174 | )
175 |
176 | private val mediumContrastDarkColorScheme =
177 | darkColorScheme(
178 | primary = primaryDarkMediumContrast,
179 | onPrimary = onPrimaryDarkMediumContrast,
180 | primaryContainer = primaryContainerDarkMediumContrast,
181 | onPrimaryContainer = onPrimaryContainerDarkMediumContrast,
182 | secondary = secondaryDarkMediumContrast,
183 | onSecondary = onSecondaryDarkMediumContrast,
184 | secondaryContainer = secondaryContainerDarkMediumContrast,
185 | onSecondaryContainer = onSecondaryContainerDarkMediumContrast,
186 | tertiary = tertiaryDarkMediumContrast,
187 | onTertiary = onTertiaryDarkMediumContrast,
188 | tertiaryContainer = tertiaryContainerDarkMediumContrast,
189 | onTertiaryContainer = onTertiaryContainerDarkMediumContrast,
190 | error = errorDarkMediumContrast,
191 | onError = onErrorDarkMediumContrast,
192 | errorContainer = errorContainerDarkMediumContrast,
193 | onErrorContainer = onErrorContainerDarkMediumContrast,
194 | background = backgroundDarkMediumContrast,
195 | onBackground = onBackgroundDarkMediumContrast,
196 | surface = surfaceDarkMediumContrast,
197 | onSurface = onSurfaceDarkMediumContrast,
198 | surfaceVariant = surfaceVariantDarkMediumContrast,
199 | onSurfaceVariant = onSurfaceVariantDarkMediumContrast,
200 | outline = outlineDarkMediumContrast,
201 | outlineVariant = outlineVariantDarkMediumContrast,
202 | scrim = scrimDarkMediumContrast,
203 | inverseSurface = inverseSurfaceDarkMediumContrast,
204 | inverseOnSurface = inverseOnSurfaceDarkMediumContrast,
205 | inversePrimary = inversePrimaryDarkMediumContrast,
206 | surfaceDim = surfaceDimDarkMediumContrast,
207 | surfaceBright = surfaceBrightDarkMediumContrast,
208 | surfaceContainerLowest = surfaceContainerLowestDarkMediumContrast,
209 | surfaceContainerLow = surfaceContainerLowDarkMediumContrast,
210 | surfaceContainer = surfaceContainerDarkMediumContrast,
211 | surfaceContainerHigh = surfaceContainerHighDarkMediumContrast,
212 | surfaceContainerHighest = surfaceContainerHighestDarkMediumContrast,
213 | )
214 |
215 | private val highContrastDarkColorScheme =
216 | darkColorScheme(
217 | primary = primaryDarkHighContrast,
218 | onPrimary = onPrimaryDarkHighContrast,
219 | primaryContainer = primaryContainerDarkHighContrast,
220 | onPrimaryContainer = onPrimaryContainerDarkHighContrast,
221 | secondary = secondaryDarkHighContrast,
222 | onSecondary = onSecondaryDarkHighContrast,
223 | secondaryContainer = secondaryContainerDarkHighContrast,
224 | onSecondaryContainer = onSecondaryContainerDarkHighContrast,
225 | tertiary = tertiaryDarkHighContrast,
226 | onTertiary = onTertiaryDarkHighContrast,
227 | tertiaryContainer = tertiaryContainerDarkHighContrast,
228 | onTertiaryContainer = onTertiaryContainerDarkHighContrast,
229 | error = errorDarkHighContrast,
230 | onError = onErrorDarkHighContrast,
231 | errorContainer = errorContainerDarkHighContrast,
232 | onErrorContainer = onErrorContainerDarkHighContrast,
233 | background = backgroundDarkHighContrast,
234 | onBackground = onBackgroundDarkHighContrast,
235 | surface = surfaceDarkHighContrast,
236 | onSurface = onSurfaceDarkHighContrast,
237 | surfaceVariant = surfaceVariantDarkHighContrast,
238 | onSurfaceVariant = onSurfaceVariantDarkHighContrast,
239 | outline = outlineDarkHighContrast,
240 | outlineVariant = outlineVariantDarkHighContrast,
241 | scrim = scrimDarkHighContrast,
242 | inverseSurface = inverseSurfaceDarkHighContrast,
243 | inverseOnSurface = inverseOnSurfaceDarkHighContrast,
244 | inversePrimary = inversePrimaryDarkHighContrast,
245 | surfaceDim = surfaceDimDarkHighContrast,
246 | surfaceBright = surfaceBrightDarkHighContrast,
247 | surfaceContainerLowest = surfaceContainerLowestDarkHighContrast,
248 | surfaceContainerLow = surfaceContainerLowDarkHighContrast,
249 | surfaceContainer = surfaceContainerDarkHighContrast,
250 | surfaceContainerHigh = surfaceContainerHighDarkHighContrast,
251 | surfaceContainerHighest = surfaceContainerHighestDarkHighContrast,
252 | )
253 |
254 | @Immutable
255 | data class ColorFamily(
256 | val color: Color,
257 | val onColor: Color,
258 | val colorContainer: Color,
259 | val onColorContainer: Color
260 | )
261 |
262 | val unspecified_scheme =
263 | ColorFamily(Color.Unspecified, Color.Unspecified, Color.Unspecified, Color.Unspecified)
264 |
265 | @Composable
266 | fun FaceNetAndroidTheme(
267 | darkTheme: Boolean = isSystemInDarkTheme(),
268 | // Dynamic color is available on Android 12+
269 | dynamicColor: Boolean = true,
270 | content: @Composable() () -> Unit
271 | ) {
272 | val colorScheme =
273 | when {
274 | dynamicColor && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S -> {
275 | val context = LocalContext.current
276 | if (darkTheme) dynamicDarkColorScheme(context) else dynamicLightColorScheme(context)
277 | }
278 | darkTheme -> darkScheme
279 | else -> lightScheme
280 | }
281 | val view = LocalView.current
282 | if (!view.isInEditMode) {
283 | SideEffect {
284 | val window = (view.context as Activity).window
285 | window.statusBarColor = colorScheme.primary.toArgb()
286 | WindowCompat.getInsetsController(window, view).isAppearanceLightStatusBars = darkTheme
287 | }
288 | }
289 | MaterialTheme(colorScheme = colorScheme, typography = AppTypography, content = content)
290 | }
291 |
--------------------------------------------------------------------------------
/app/src/main/java/com/ml/shubham0204/facenet_android/presentation/theme/Type.kt:
--------------------------------------------------------------------------------
1 | package com.ml.shubham0204.facenet_android.presentation.theme
2 |
3 | import androidx.compose.material3.Typography
4 | import androidx.compose.ui.text.font.FontFamily
5 | import androidx.compose.ui.text.googlefonts.Font
6 | import androidx.compose.ui.text.googlefonts.GoogleFont
7 | import com.ml.shubham0204.facenet_android.R
8 |
9 | val provider =
10 | GoogleFont.Provider(
11 | providerAuthority = "com.google.android.gms.fonts",
12 | providerPackage = "com.google.android.gms",
13 | certificates = R.array.com_google_android_gms_fonts_certs
14 | )
15 |
16 | val bodyFontFamily =
17 | FontFamily(
18 | Font(
19 | googleFont = GoogleFont("Roboto"),
20 | fontProvider = provider,
21 | )
22 | )
23 |
24 | val displayFontFamily =
25 | FontFamily(
26 | Font(
27 | googleFont = GoogleFont("Work Sans"),
28 | fontProvider = provider,
29 | )
30 | )
31 |
32 | // Default Material 3 typography values
33 | val baseline = Typography()
34 |
35 | val AppTypography =
36 | Typography(
37 | displayLarge = baseline.displayLarge.copy(fontFamily = displayFontFamily),
38 | displayMedium = baseline.displayMedium.copy(fontFamily = displayFontFamily),
39 | displaySmall = baseline.displaySmall.copy(fontFamily = displayFontFamily),
40 | headlineLarge = baseline.headlineLarge.copy(fontFamily = displayFontFamily),
41 | headlineMedium = baseline.headlineMedium.copy(fontFamily = displayFontFamily),
42 | headlineSmall = baseline.headlineSmall.copy(fontFamily = displayFontFamily),
43 | titleLarge = baseline.titleLarge.copy(fontFamily = displayFontFamily),
44 | titleMedium = baseline.titleMedium.copy(fontFamily = displayFontFamily),
45 | titleSmall = baseline.titleSmall.copy(fontFamily = displayFontFamily),
46 | bodyLarge = baseline.bodyLarge.copy(fontFamily = bodyFontFamily),
47 | bodyMedium = baseline.bodyMedium.copy(fontFamily = bodyFontFamily),
48 | bodySmall = baseline.bodySmall.copy(fontFamily = bodyFontFamily),
49 | labelLarge = baseline.labelLarge.copy(fontFamily = bodyFontFamily),
50 | labelMedium = baseline.labelMedium.copy(fontFamily = bodyFontFamily),
51 | labelSmall = baseline.labelSmall.copy(fontFamily = bodyFontFamily),
52 | )
53 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
15 |
18 |
21 |
22 |
23 |
24 |
30 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/res/mipmap-hdpi/ic_launcher.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/res/mipmap-mdpi/ic_launcher.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/res/mipmap-xhdpi/ic_launcher.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp
--------------------------------------------------------------------------------
/app/src/main/res/values-v23/font_certs.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 | - @array/com_google_android_gms_fonts_certs_dev
20 | - @array/com_google_android_gms_fonts_certs_prod
21 |
22 |
23 | -
24 | MIIEqDCCA5CgAwIBAgIJANWFuGx90071MA0GCSqGSIb3DQEBBAUAMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAeFw0wODA0MTUyMzM2NTZaFw0zNTA5MDEyMzM2NTZaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgCggEBANbOLggKv+IxTdGNs8/TGFy0PTP6DHThvbbR24kT9ixcOd9W+EaBPWW+wPPKQmsHxajtWjmQwWfna8mZuSeJS48LIgAZlKkpFeVyxW0qMBujb8X8ETrWy550NaFtI6t9+u7hZeTfHwqNvacKhp1RbE6dBRGWynwMVX8XW8N1+UjFaq6GCJukT4qmpN2afb8sCjUigq0GuMwYXrFVee74bQgLHWGJwPmvmLHC69EH6kWr22ijx4OKXlSIx2xT1AsSHee70w5iDBiK4aph27yH3TxkXy9V89TDdexAcKk/cVHYNnDBapcavl7y0RiQ4biu8ymM8Ga/nmzhRKya6G0cGw8CAQOjgfwwgfkwHQYDVR0OBBYEFI0cxb6VTEM8YYY6FbBMvAPyT+CyMIHJBgNVHSMEgcEwgb6AFI0cxb6VTEM8YYY6FbBMvAPyT+CyoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJANWFuGx90071MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEEBQADggEBABnTDPEF+3iSP0wNfdIjIz1AlnrPzgAIHVvXxunW7SBrDhEglQZBbKJEk5kT0mtKoOD1JMrSu1xuTKEBahWRbqHsXclaXjoBADb0kkjVEJu/Lh5hgYZnOjvlba8Ld7HCKePCVePoTJBdI4fvugnL8TsgK05aIskyY0hKI9L8KfqfGTl1lzOv2KoWD0KWwtAWPoGChZxmQ+nBli+gwYMzM1vAkP+aayLe0a1EQimlOalO762r0GXO0ks+UeXde2Z4e+8S/pf7pITEI/tP+MxJTALw9QUWEv9lKTk+jkbqxbsh8nfBUapfKqYn0eidpwq2AzVp3juYl7//fKnaPhJD9gs=
25 |
26 |
27 |
28 | -
29 | MIIEQzCCAyugAwIBAgIJAMLgh0ZkSjCNMA0GCSqGSIb3DQEBBAUAMHQxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRQwEgYDVQQKEwtHb29nbGUgSW5jLjEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDAeFw0wODA4MjEyMzEzMzRaFw0zNjAxMDcyMzEzMzRaMHQxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRQwEgYDVQQKEwtHb29nbGUgSW5jLjEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgCggEBAKtWLgDYO6IIrgqWbxJOKdoR8qtW0I9Y4sypEwPpt1TTcvZApxsdyxMJZ2JORland2qSGT2y5b+3JKkedxiLDmpHpDsz2WCbdxgxRczfey5YZnTJ4VZbH0xqWVW/8lGmPav5xVwnIiJS6HXk+BVKZF+JcWjAsb/GEuq/eFdpuzSqeYTcfi6idkyugwfYwXFU1+5fZKUaRKYCwkkFQVfcAs1fXA5V+++FGfvjJ/CxURaSxaBvGdGDhfXE28LWuT9ozCl5xw4Yq5OGazvV24mZVSoOO0yZ31j7kYvtwYK6NeADwbSxDdJEqO4k//0zOHKrUiGYXtqw/A0LFFtqoZKFjnkCAQOjgdkwgdYwHQYDVR0OBBYEFMd9jMIhF1Ylmn/Tgt9r45jk14alMIGmBgNVHSMEgZ4wgZuAFMd9jMIhF1Ylmn/Tgt9r45jk14aloXikdjB0MQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEUMBIGA1UEChMLR29vZ2xlIEluYy4xEDAOBgNVBAsTB0FuZHJvaWQxEDAOBgNVBAMTB0FuZHJvaWSCCQDC4IdGZEowjTAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBAUAA4IBAQBt0lLO74UwLDYKqs6Tm8/yzKkEu116FmH4rkaymUIE0P9KaMftGlMexFlaYjzmB2OxZyl6euNXEsQH8gjwyxCUKRJNexBiGcCEyj6z+a1fuHHvkiaai+KL8W1EyNmgjmyy8AW7P+LLlkR+ho5zEHatRbM/YAnqGcFh5iZBqpknHf1SKMXFh4dd239FJ1jWYfbMDMy3NS5CTMQ2XFI1MvcyUTdZPErjQfTbQe3aDQsQcafEQPD+nqActifKZ0Np0IS9L9kR/wbNvyz6ENwPiTrjV2KRkEjH78ZMcUQXg0L3BYHJ3lc69Vs5Ddf9uUGGMYldX3WfMBEmh/9iFBDAaTCK
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #FFBB86FC
4 | #FF6200EE
5 | #FF3700B3
6 | #FF03DAC5
7 | #FF018786
8 | #FF000000
9 | #FFFFFFFF
10 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | FaceNet-Android
3 |
--------------------------------------------------------------------------------
/app/src/main/res/values/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/xml/backup_rules.xml:
--------------------------------------------------------------------------------
1 |
8 |
9 |
13 |
--------------------------------------------------------------------------------
/app/src/main/res/xml/data_extraction_rules.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
8 |
12 |
13 |
19 |
--------------------------------------------------------------------------------
/build.gradle.kts:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 | buildscript {
3 | dependencies {
4 | classpath(libs.objectbox)
5 | }
6 | }
7 |
8 | plugins {
9 | alias(libs.plugins.android.application) apply false
10 | alias(libs.plugins.jetbrains.kotlin.android) apply false
11 | alias(libs.plugins.compose.compiler) apply false
12 | id("com.google.devtools.ksp") version "2.0.0-1.0.24" apply false
13 | }
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. For more details, visit
12 | # https://developer.android.com/r/tools/gradle-multi-project-decoupled-projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app's APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Kotlin code style for this project: "official" or "obsolete":
19 | kotlin.code.style=official
20 | # Enables namespacing of each library's R class so that its R class includes only the
21 | # resources declared in the library itself and none from the library's dependencies,
22 | # thereby reducing the size of the R class for that library
23 | android.nonTransitiveRClass=true
--------------------------------------------------------------------------------
/gradle/libs.versions.toml:
--------------------------------------------------------------------------------
1 | [versions]
2 | agp = "8.4.1"
3 | coil = "2.6.0"
4 | koin = "3.5.6"
5 | koinAnnotations = "1.3.1"
6 | kotlin = "2.0.0"
7 | coreKtx = "1.13.1"
8 | lifecycleRuntimeKtx = "2.8.1"
9 | activityCompose = "1.9.0"
10 | composeBom = "2024.05.00"
11 | navigationComposeVersion = "2.7.7"
12 | objectboxGradlePlugin = "4.0.0"
13 | documentfile = "1.0.1"
14 | exifinterface = "1.3.7"
15 | tasksVision = "0.10.14"
16 | tensorflow-lite = "1.1.2"
17 | tensorflow-lite-support = "1.1.2"
18 | camerax = "1.3.3"
19 | uiTextGoogleFonts = "1.6.7"
20 |
21 |
22 |
23 | [libraries]
24 | androidx-core-ktx = { group = "androidx.core", name = "core-ktx", version.ref = "coreKtx" }
25 | coil = { module = "io.coil-kt:coil", version.ref = "coil" }
26 | coil-compose = { module = "io.coil-kt:coil-compose", version.ref = "coil" }
27 | androidx-lifecycle-runtime-ktx = { group = "androidx.lifecycle", name = "lifecycle-runtime-ktx", version.ref = "lifecycleRuntimeKtx" }
28 | androidx-activity-compose = { group = "androidx.activity", name = "activity-compose", version.ref = "activityCompose" }
29 | androidx-compose-bom = { group = "androidx.compose", name = "compose-bom", version.ref = "composeBom" }
30 | androidx-compose-navigation = { module = "androidx.navigation:navigation-compose", version.ref = "navigationComposeVersion" }
31 | androidx-ui = { group = "androidx.compose.ui", name = "ui" }
32 | androidx-ui-graphics = { group = "androidx.compose.ui", name = "ui-graphics" }
33 | androidx-ui-tooling = { group = "androidx.compose.ui", name = "ui-tooling" }
34 | androidx-ui-tooling-preview = { group = "androidx.compose.ui", name = "ui-tooling-preview" }
35 | androidx-ui-test-manifest = { group = "androidx.compose.ui", name = "ui-test-manifest" }
36 | androidx-material3 = { group = "androidx.compose.material3", name = "material3" }
37 | compose-material3-icons-extended = { module = "androidx.compose.material:material-icons-extended" }
38 |
39 | # Vector DB (NoSQL database)
40 | objectbox = { module = "io.objectbox:objectbox-gradle-plugin", version.ref = "objectboxGradlePlugin" }
41 |
42 | # Koin - dependency injection
43 | koin-android = { module = "io.insert-koin:koin-android", version.ref = "koin" }
44 | koin-androidx-compose = { module = "io.insert-koin:koin-androidx-compose", version.ref = "koin" }
45 | koin-annotations = { module = "io.insert-koin:koin-annotations", version.ref = "koinAnnotations" }
46 | koin-ksp-compiler = { module = "io.insert-koin:koin-ksp-compiler", version.ref = "koinAnnotations" }
47 |
48 | # TensorFlow Lite
49 | tasks-vision = { module = "com.google.mediapipe:tasks-vision", version.ref = "tasksVision" }
50 | tensorflow-lite = { module = "com.google.ai.edge.litert:litert", version.ref = "tensorflow-lite" }
51 | tensorflow-lite-gpu = { module = "com.google.ai.edge.litert:litert-gpu", version.ref = "tensorflow-lite" }
52 | tensorflow-lite-gpu-api = { module = "com.google.ai.edge.litert:litert-gpu-api", version.ref = "tensorflow-lite" }
53 | tensorflow-lite-support = { module = "com.google.ai.edge.litert:litert-support", version.ref = "tensorflow-lite-support" }
54 |
55 | # DocumentFile and ExifInterface
56 | androidx-documentfile = { module = "androidx.documentfile:documentfile", version.ref = "documentfile" }
57 | androidx-exifinterface = { module = "androidx.exifinterface:exifinterface", version.ref = "exifinterface" }
58 |
59 | # CameraX
60 | androidx-camera-camera2 = { module = "androidx.camera:camera-camera2", version.ref = "camerax" }
61 | androidx-camera-lifecycle = { module = "androidx.camera:camera-lifecycle", version.ref = "camerax" }
62 | androidx-camera-view = { module = "androidx.camera:camera-view", version.ref = "camerax" }
63 | androidx-ui-text-google-fonts = { group = "androidx.compose.ui", name = "ui-text-google-fonts", version.ref = "uiTextGoogleFonts" }
64 |
65 |
66 |
67 | [plugins]
68 | android-application = { id = "com.android.application", version.ref = "agp" }
69 | jetbrains-kotlin-android = { id = "org.jetbrains.kotlin.android", version.ref = "kotlin" }
70 | compose-compiler = { id = "org.jetbrains.kotlin.plugin.compose", version.ref = "kotlin" }
71 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Wed Jun 05 07:28:20 IST 2024
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip
5 | zipStoreBase=GRADLE_USER_HOME
6 | zipStorePath=wrapper/dists
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | #
4 | # Copyright 2015 the original author or authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | ##
21 | ## Gradle start up script for UN*X
22 | ##
23 | ##############################################################################
24 |
25 | # Attempt to set APP_HOME
26 | # Resolve links: $0 may be a link
27 | PRG="$0"
28 | # Need this for relative symlinks.
29 | while [ -h "$PRG" ] ; do
30 | ls=`ls -ld "$PRG"`
31 | link=`expr "$ls" : '.*-> \(.*\)$'`
32 | if expr "$link" : '/.*' > /dev/null; then
33 | PRG="$link"
34 | else
35 | PRG=`dirname "$PRG"`"/$link"
36 | fi
37 | done
38 | SAVED="`pwd`"
39 | cd "`dirname \"$PRG\"`/" >/dev/null
40 | APP_HOME="`pwd -P`"
41 | cd "$SAVED" >/dev/null
42 |
43 | APP_NAME="Gradle"
44 | APP_BASE_NAME=`basename "$0"`
45 |
46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
48 |
49 | # Use the maximum available, or set MAX_FD != -1 to use that value.
50 | MAX_FD="maximum"
51 |
52 | warn () {
53 | echo "$*"
54 | }
55 |
56 | die () {
57 | echo
58 | echo "$*"
59 | echo
60 | exit 1
61 | }
62 |
63 | # OS specific support (must be 'true' or 'false').
64 | cygwin=false
65 | msys=false
66 | darwin=false
67 | nonstop=false
68 | case "`uname`" in
69 | CYGWIN* )
70 | cygwin=true
71 | ;;
72 | Darwin* )
73 | darwin=true
74 | ;;
75 | MINGW* )
76 | msys=true
77 | ;;
78 | NONSTOP* )
79 | nonstop=true
80 | ;;
81 | esac
82 |
83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
84 |
85 |
86 | # Determine the Java command to use to start the JVM.
87 | if [ -n "$JAVA_HOME" ] ; then
88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
89 | # IBM's JDK on AIX uses strange locations for the executables
90 | JAVACMD="$JAVA_HOME/jre/sh/java"
91 | else
92 | JAVACMD="$JAVA_HOME/bin/java"
93 | fi
94 | if [ ! -x "$JAVACMD" ] ; then
95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
96 |
97 | Please set the JAVA_HOME variable in your environment to match the
98 | location of your Java installation."
99 | fi
100 | else
101 | JAVACMD="java"
102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
103 |
104 | Please set the JAVA_HOME variable in your environment to match the
105 | location of your Java installation."
106 | fi
107 |
108 | # Increase the maximum file descriptors if we can.
109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
110 | MAX_FD_LIMIT=`ulimit -H -n`
111 | if [ $? -eq 0 ] ; then
112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
113 | MAX_FD="$MAX_FD_LIMIT"
114 | fi
115 | ulimit -n $MAX_FD
116 | if [ $? -ne 0 ] ; then
117 | warn "Could not set maximum file descriptor limit: $MAX_FD"
118 | fi
119 | else
120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
121 | fi
122 | fi
123 |
124 | # For Darwin, add options to specify how the application appears in the dock
125 | if $darwin; then
126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
127 | fi
128 |
129 | # For Cygwin or MSYS, switch paths to Windows format before running java
130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
133 |
134 | JAVACMD=`cygpath --unix "$JAVACMD"`
135 |
136 | # We build the pattern for arguments to be converted via cygpath
137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
138 | SEP=""
139 | for dir in $ROOTDIRSRAW ; do
140 | ROOTDIRS="$ROOTDIRS$SEP$dir"
141 | SEP="|"
142 | done
143 | OURCYGPATTERN="(^($ROOTDIRS))"
144 | # Add a user-defined pattern to the cygpath arguments
145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
147 | fi
148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
149 | i=0
150 | for arg in "$@" ; do
151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
153 |
154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
156 | else
157 | eval `echo args$i`="\"$arg\""
158 | fi
159 | i=`expr $i + 1`
160 | done
161 | case $i in
162 | 0) set -- ;;
163 | 1) set -- "$args0" ;;
164 | 2) set -- "$args0" "$args1" ;;
165 | 3) set -- "$args0" "$args1" "$args2" ;;
166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
172 | esac
173 | fi
174 |
175 | # Escape application args
176 | save () {
177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
178 | echo " "
179 | }
180 | APP_ARGS=`save "$@"`
181 |
182 | # Collect all arguments for the java command, following the shell quoting and substitution rules
183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
184 |
185 | exec "$JAVACMD" "$@"
186 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto execute
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto execute
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :execute
68 | @rem Setup the command line
69 |
70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
71 |
72 |
73 | @rem Execute Gradle
74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
75 |
76 | :end
77 | @rem End local scope for the variables with windows NT shell
78 | if "%ERRORLEVEL%"=="0" goto mainEnd
79 |
80 | :fail
81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
82 | rem the _cmd.exe /c_ return code!
83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
84 | exit /b 1
85 |
86 | :mainEnd
87 | if "%OS%"=="Windows_NT" endlocal
88 |
89 | :omega
90 |
--------------------------------------------------------------------------------
/resources/Liveness_PT_Model_to_TF.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "provenance": []
7 | },
8 | "kernelspec": {
9 | "name": "python3",
10 | "display_name": "Python 3"
11 | },
12 | "language_info": {
13 | "name": "python"
14 | }
15 | },
16 | "cells": [
17 | {
18 | "cell_type": "markdown",
19 | "source": [
20 | "# Converting PT models of `Silent-Face-Anti-Spoofing` to TensorFlow Lite\n",
21 | "\n",
22 | "We use the [deepface](https://github.com/serengil/deepface) package to access the PyTorch models and then use the [ai-edge-torch](https://github.com/google-ai-edge/ai-edge-torch/blob/main/docs/pytorch_converter/README.md) package to convert them to the TFLite format."
23 | ],
24 | "metadata": {
25 | "id": "1-00aYt-ZqDq"
26 | }
27 | },
28 | {
29 | "cell_type": "code",
30 | "execution_count": null,
31 | "metadata": {
32 | "id": "2nLUl5rQWAn-"
33 | },
34 | "outputs": [],
35 | "source": [
36 | "!pip install deepface\n",
37 | "!pip install ai-edge-torch"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "source": [
43 | "!mkdir -p /root/.deepface/weights"
44 | ],
45 | "metadata": {
46 | "id": "7rWvHjJtXZzz"
47 | },
48 | "execution_count": null,
49 | "outputs": []
50 | },
51 | {
52 | "cell_type": "code",
53 | "source": [
54 | "from deepface.models.spoofing.FasNet import Fasnet\n",
55 | "\n",
56 | "fasnet = Fasnet()\n",
57 | "print(type(fasnet.first_model))\n",
58 | "print(type(fasnet.second_model))"
59 | ],
60 | "metadata": {
61 | "id": "v8_B5of3WMHr"
62 | },
63 | "execution_count": null,
64 | "outputs": []
65 | },
66 | {
67 | "cell_type": "code",
68 | "source": [
69 | "import torch\n",
70 | "import ai_edge_torch\n",
71 | "\n",
72 | "sample_inputs = ( torch.randn(1, 80, 80, 3) , )\n",
73 | "model = ai_edge_torch.to_channel_last_io(fasnet.first_model.eval(), args=[0])\n",
74 | "edge_model = ai_edge_torch.convert(model, sample_inputs)\n",
75 | "edge_model.export(\"first_model.tflite\")"
76 | ],
77 | "metadata": {
78 | "id": "SyAbc8HUWbym"
79 | },
80 | "execution_count": null,
81 | "outputs": []
82 | },
83 | {
84 | "cell_type": "code",
85 | "source": [
86 | "sample_inputs = ( torch.randn(1, 80, 80, 3) , )\n",
87 | "model = ai_edge_torch.to_channel_last_io(fasnet.second_model.eval(), args=[0])\n",
88 | "edge_model = ai_edge_torch.convert(model, sample_inputs)\n",
89 | "edge_model.export(\"second_model.tflite\")"
90 | ],
91 | "metadata": {
92 | "id": "W8T-LwMqYFWx"
93 | },
94 | "execution_count": null,
95 | "outputs": []
96 | },
97 | {
98 | "cell_type": "code",
99 | "source": [
100 | "import tensorflow as tf\n",
101 | "import pprint\n",
102 | "\n",
103 | "interpreter = tf.lite.Interpreter(\"first_model.tflite\")\n",
104 | "interpreter.allocate_tensors()\n",
105 | "pprint.pprint(interpreter.get_input_details())\n",
106 | "pprint.pprint(interpreter.get_output_details())"
107 | ],
108 | "metadata": {
109 | "id": "saP14XAtYYLP"
110 | },
111 | "execution_count": null,
112 | "outputs": []
113 | },
114 | {
115 | "cell_type": "code",
116 | "source": [
117 | "import tensorflow as tf\n",
118 | "import pprint\n",
119 | "\n",
120 | "interpreter = tf.lite.Interpreter(\"second_model.tflite\")\n",
121 | "interpreter.allocate_tensors()\n",
122 | "pprint.pprint(interpreter.get_input_details())\n",
123 | "pprint.pprint(interpreter.get_output_details())"
124 | ],
125 | "metadata": {
126 | "id": "sM4H2eCOYo0y"
127 | },
128 | "execution_count": null,
129 | "outputs": []
130 | },
131 | {
132 | "cell_type": "code",
133 | "source": [
134 | "from google.colab import files\n",
135 | "\n",
136 | "files.download('first_model.tflite')\n",
137 | "files.download('second_model.tflite')"
138 | ],
139 | "metadata": {
140 | "id": "p8VzWsZYY95R"
141 | },
142 | "execution_count": null,
143 | "outputs": []
144 | }
145 | ]
146 | }
--------------------------------------------------------------------------------
/resources/banner_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/resources/banner_1.png
--------------------------------------------------------------------------------
/resources/banner_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shubham0204/OnDevice-Face-Recognition-Android/fcce2d277764d26fbbd049755acbef582db3e445/resources/banner_2.png
--------------------------------------------------------------------------------
/settings.gradle.kts:
--------------------------------------------------------------------------------
1 | pluginManagement {
2 | repositories {
3 | google {
4 | content {
5 | includeGroupByRegex("com\\.android.*")
6 | includeGroupByRegex("com\\.google.*")
7 | includeGroupByRegex("androidx.*")
8 | }
9 | }
10 | mavenCentral()
11 | gradlePluginPortal()
12 | }
13 | }
14 | dependencyResolutionManagement {
15 | repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
16 | repositories {
17 | google()
18 | mavenCentral()
19 | }
20 | }
21 |
22 | rootProject.name = "FaceNet-Android"
23 | include(":app")
24 |
--------------------------------------------------------------------------------