├── .gitignore
├── LICENSE
├── README.md
├── app
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ └── org
│ │ └── jetbrains
│ │ └── kotlinx
│ │ └── dl
│ │ └── example
│ │ └── app
│ │ ├── DetectorView.kt
│ │ ├── ImageAnalyzer.kt
│ │ ├── MainActivity.kt
│ │ ├── PipelineSelectorAdapter.kt
│ │ ├── Softmax.kt
│ │ └── pipelines.kt
│ └── res
│ ├── drawable-v24
│ └── ic_launcher_foreground.xml
│ ├── drawable
│ ├── arrow_ccw.xml
│ ├── arrow_cw.xml
│ ├── camera_switch.xml
│ ├── ic_launcher_background.xml
│ └── shape_rectangle.xml
│ ├── layout
│ ├── activity_main.xml
│ └── pipelines_selector.xml
│ ├── mipmap-anydpi-v26
│ ├── ic_launcher.xml
│ └── ic_launcher_round.xml
│ ├── mipmap-hdpi
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── mipmap-mdpi
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── mipmap-xhdpi
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── mipmap-xxhdpi
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── mipmap-xxxhdpi
│ ├── ic_launcher.png
│ └── ic_launcher_round.png
│ ├── raw
│ └── shufflenet.ort
│ ├── values-night
│ └── themes.xml
│ └── values
│ ├── colors.xml
│ ├── dimensions.xml
│ ├── strings.xml
│ └── themes.xml
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── readme_materials
├── face.jpg
├── pose.jpg
└── sheeps.png
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.aar
4 | *.ap_
5 | *.aab
6 |
7 | # Files for the ART/Dalvik VM
8 | *.dex
9 |
10 | # Java class files
11 | *.class
12 |
13 | # Generated files
14 | bin/
15 | gen/
16 | out/
17 | # Uncomment the following line in case you need and you don't have the release build type files in your app
18 | # release/
19 |
20 | # Gradle files
21 | .gradle/
22 | build/
23 |
24 | # Local configuration file (sdk path, etc)
25 | local.properties
26 |
27 | # Proguard folder generated by Eclipse
28 | proguard/
29 |
30 | # Log Files
31 | *.log
32 |
33 | # Android Studio Navigation editor temp files
34 | .navigation/
35 |
36 | # Android Studio captures folder
37 | captures/
38 |
39 | # IntelliJ
40 | *.iml
41 | .idea/workspace.xml
42 | .idea/tasks.xml
43 | .idea/gradle.xml
44 | .idea/assetWizardSettings.xml
45 | .idea/dictionaries
46 | .idea/libraries
47 | # Android Studio 3 in .gitignore file.
48 | .idea/caches
49 | .idea/modules.xml
50 | # Comment next line if keeping position of elements in Navigation Editor is relevant for you
51 | .idea/navEditor.xml
52 |
53 | # Keystore files
54 | # Uncomment the following lines if you do not want to check your keystore files in.
55 | #*.jks
56 | #*.keystore
57 |
58 | # External native build folder generated in Android Studio 2.2 and later
59 | .externalNativeBuild
60 | .cxx/
61 |
62 | # Google Services (e.g. APIs or Firebase)
63 | # google-services.json
64 |
65 | # Freeline
66 | freeline.py
67 | freeline/
68 | freeline_project_description.json
69 |
70 | # fastlane
71 | fastlane/report.xml
72 | fastlane/Preview.html
73 | fastlane/screenshots
74 | fastlane/test_output
75 | fastlane/readme.md
76 |
77 | # Version control
78 | vcs.xml
79 |
80 | # lint
81 | lint/intermediates/
82 | lint/generated/
83 | lint/outputs/
84 | lint/tmp/
85 | # lint/reports/
86 |
87 | .idea/
88 | .DS_Store
89 | *.onnx
90 | *.ort
91 | !/app/src/main/res/raw/shufflenet.ort
92 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Guoyu Wang
4 | Copyright (c) 2022 JetBrains s.r.o. and Kotlin Programming Language contributors
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## KotlinDL Android inference demo application [](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub)
2 |
3 | [](https://kotlinlang.slack.com/messages/kotlindl/)
4 |
5 | ||||
6 | | ---------- | ----------- | ----------- |
7 | |
|
|
|
8 |
9 |
10 | This repo demonstrates how to use KotlinDL for neural network inference on Android devices.
11 | It contains a simple Android app that uses KotlinDL to demonstrate the inference of a bunch of pre-trained models for different computer vision tasks.
12 |
13 | The list of demonstrated models includes:
14 | * MobileNetV1 and EfficientNetV4Lite for image classification
15 | * SSDMobileNetV1 and EfficientDetLite0 for object detection
16 | * MoveNet for human pose estimation
17 | * UltraFace320 for Face detection
18 | * Fan2d106Face for Face Alignment
19 |
20 | This application is based on CameraX Android API and uses the latest KotlinDL version.
21 | The actual model inference is performed by the [Onnx Runtime](https://github.com/microsoft/onnxruntime).
22 |
23 | This example is based on [ort_image_classification example](https://github.com/guoyu-wang/ort_image_classification_android)
24 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'com.android.application'
3 | id 'kotlin-android'
4 | id 'kotlin-android-extensions'
5 | id 'org.jetbrains.kotlinx.kotlin-deeplearning-gradle-plugin' version '0.5.1'
6 | }
7 |
8 | android {
9 | compileSdkVersion 33
10 | buildToolsVersion "30.0.3"
11 |
12 | defaultConfig {
13 | minSdkVersion 26
14 | targetSdkVersion 33
15 | versionCode 1
16 | versionName "1.0"
17 |
18 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
19 | }
20 |
21 | buildFeatures {
22 | viewBinding true
23 | }
24 |
25 | buildTypes {
26 | release {
27 | minifyEnabled false
28 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
29 | }
30 | }
31 |
32 | compileOptions {
33 | sourceCompatibility JavaVersion.VERSION_1_8
34 | targetCompatibility JavaVersion.VERSION_1_8
35 | }
36 |
37 | kotlinOptions {
38 | jvmTarget = '1.8'
39 | }
40 | ndkVersion ndkVersion
41 | }
42 |
43 | def camerax_version = "1.1.0"
44 | def kotlindl_version = "0.5.1"
45 |
46 | dependencies {
47 | // CameraX core library using the camera2 implementation
48 | // The following line is optional, as the core library is included indirectly by camera-camera2
49 | implementation "androidx.camera:camera-camera2:${camerax_version}"
50 | // If you want to additionally use the CameraX Lifecycle library
51 | implementation "androidx.camera:camera-lifecycle:${camerax_version}"
52 | // If you want to additionally use the CameraX View class
53 | implementation "androidx.camera:camera-view:1.1.0"
54 |
55 | implementation "org.jetbrains.kotlinx:kotlin-deeplearning-visualization:$kotlindl_version"
56 | implementation "org.jetbrains.kotlinx:kotlin-deeplearning-onnx:$kotlindl_version"
57 | implementation 'androidx.core:core-ktx:1.9.0'
58 | implementation 'androidx.appcompat:appcompat:1.5.1'
59 | implementation 'com.google.android.material:material:1.6.1'
60 | implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
61 | implementation 'io.github.microutils:kotlin-logging-jvm:2.1.21'
62 | }
63 |
64 | downloadKotlinDLModels {
65 | models = ["SSDMobileNetV1", "EfficientNet4Lite", "MobilenetV1", "EfficientDetLite0", "MoveNetSinglePoseLighting", "UltraFace320", "Fan2d106"]
66 | overwrite = false
67 | }
68 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
15 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/app/src/main/java/org/jetbrains/kotlinx/dl/example/app/DetectorView.kt:
--------------------------------------------------------------------------------
1 | package org.jetbrains.kotlinx.dl.example.app
2 |
3 | import android.content.Context
4 | import android.graphics.Canvas
5 | import android.graphics.Paint
6 | import android.text.TextPaint
7 | import android.util.AttributeSet
8 | import androidx.camera.view.PreviewView.ScaleType
9 | import androidx.core.content.ContextCompat
10 | import org.jetbrains.kotlinx.dl.api.inference.FlatShape
11 | import org.jetbrains.kotlinx.dl.api.inference.facealignment.Landmark
12 | import org.jetbrains.kotlinx.dl.api.inference.objectdetection.DetectedObject
13 | import org.jetbrains.kotlinx.dl.api.inference.posedetection.DetectedPose
14 | import org.jetbrains.kotlinx.dl.visualization.*
15 |
16 | class DetectorView(context: Context, attrs: AttributeSet) :
17 | DetectorViewBase(context, attrs) {
18 | private val objectPaint = Paint().apply {
19 | color = ContextCompat.getColor(context, R.color.white)
20 | style = Paint.Style.STROKE
21 | strokeWidth = resources.getDimensionPixelSize(R.dimen.object_stroke_width).toFloat()
22 | }
23 | private val textPaint = TextPaint().apply {
24 | textSize = resources.getDimensionPixelSize(R.dimen.label_font_size).toFloat()
25 | color = ContextCompat.getColor(context, R.color.white)
26 | }
27 | private val landmarkPaint = Paint().apply {
28 | color = ContextCompat.getColor(context, R.color.white)
29 | style = Paint.Style.FILL
30 | strokeWidth = resources.getDimensionPixelSize(R.dimen.object_stroke_width).toFloat()
31 | }
32 | private val radius = resources.getDimensionPixelSize(R.dimen.object_stroke_width).toFloat()
33 | private var bounds: PreviewImageBounds? = null
34 |
35 | var scaleType: ScaleType = ScaleType.FILL_CENTER
36 |
37 | override fun onDetectionSet(detection: AnalysisResult.WithPrediction?) {
38 | bounds = detection?.let {
39 | getPreviewImageBounds(it.metadata.width, it.metadata.height, width, height, scaleType)
40 | }
41 | }
42 |
43 | override fun Canvas.drawDetection(detection: AnalysisResult.WithPrediction) {
44 | val currentBounds = bounds ?: bounds()
45 | for (s in detection.prediction.shapes) {
46 | when (val shape = if (detection.metadata.isImageFlipped) s.flip() else s) {
47 | is DetectedObject -> drawObject(
48 | shape,
49 | objectPaint, textPaint,
50 | currentBounds
51 | )
52 |
53 | is DetectedPose -> drawPose(
54 | shape,
55 | landmarkPaint, objectPaint, radius,
56 | currentBounds
57 | )
58 |
59 | is Landmark -> {
60 | drawLandmarks(
61 | listOf(shape),
62 | landmarkPaint,
63 | radius,
64 | currentBounds
65 | )
66 | }
67 | }
68 | }
69 | }
70 | }
71 |
72 | private fun FlatShape<*>.flip(): FlatShape<*> {
73 | return map { x, y -> 1 - x to y }
74 | }
--------------------------------------------------------------------------------
/app/src/main/java/org/jetbrains/kotlinx/dl/example/app/ImageAnalyzer.kt:
--------------------------------------------------------------------------------
1 | package org.jetbrains.kotlinx.dl.example.app
2 |
3 | import android.content.Context
4 | import android.content.res.Resources
5 | import android.os.SystemClock
6 | import androidx.camera.core.ImageProxy
7 | import org.jetbrains.kotlinx.dl.api.inference.FlatShape
8 | import org.jetbrains.kotlinx.dl.onnx.inference.ONNXModelHub
9 |
10 | internal class ImageAnalyzer(
11 | context: Context,
12 | private val resources: Resources,
13 | private val uiUpdateCallBack: (AnalysisResult?) -> Unit,
14 | initialPipelineIndex: Int = 0
15 | ) {
16 | private val hub = ONNXModelHub(context)
17 |
18 | val pipelinesList = Pipelines.values().sortedWith(Comparator { o1, o2 ->
19 | if (o1.task != o2.task) return@Comparator o1.task.ordinal - o2.task.ordinal
20 | o1.ordinal - o2.ordinal
21 | })
22 | private val pipelines = pipelinesList.map { it.createPipeline(hub, resources) }
23 |
24 | @Volatile
25 | var currentPipelineIndex: Int = initialPipelineIndex
26 | private set
27 | private val currentPipeline: InferencePipeline? get() = pipelines.getOrNull(currentPipelineIndex)
28 |
29 | fun analyze(image: ImageProxy, isImageFlipped: Boolean) {
30 | val pipeline = currentPipeline
31 | if (pipeline == null) {
32 | uiUpdateCallBack(null)
33 | return
34 | }
35 |
36 | val start = SystemClock.uptimeMillis()
37 | val result = pipeline.analyze(image, confidenceThreshold)
38 | val end = SystemClock.uptimeMillis()
39 |
40 | val rotationDegrees = image.imageInfo.rotationDegrees
41 | image.close()
42 |
43 | if (result == null || result.confidence < confidenceThreshold) {
44 | uiUpdateCallBack(AnalysisResult.Empty(end - start))
45 | } else {
46 | uiUpdateCallBack(
47 | AnalysisResult.WithPrediction(
48 | result, end - start,
49 | ImageMetadata(image.width, image.height, isImageFlipped, rotationDegrees)
50 | )
51 | )
52 | }
53 | }
54 |
55 | fun setPipeline(index: Int) {
56 | currentPipelineIndex = index
57 | }
58 |
59 | fun clear() {
60 | currentPipelineIndex = -1
61 | }
62 |
63 | fun close() {
64 | clear()
65 | pipelines.forEach(InferencePipeline::close)
66 | }
67 |
68 | companion object {
69 | private const val confidenceThreshold = 0.5f
70 | }
71 | }
72 |
73 | sealed class AnalysisResult(val processTimeMs: Long) {
74 | class Empty(processTimeMs: Long) : AnalysisResult(processTimeMs)
75 | class WithPrediction(
76 | val prediction: Prediction,
77 | processTimeMs: Long,
78 | val metadata: ImageMetadata
79 | ) : AnalysisResult(processTimeMs)
80 | }
81 |
82 | interface Prediction {
83 | val shapes: List>
84 | val confidence: Float
85 | fun getText(context: Context): String
86 | }
87 |
88 | data class ImageMetadata(
89 | val width: Int,
90 | val height: Int,
91 | val isImageFlipped: Boolean
92 | ) {
93 |
94 | constructor(width: Int, height: Int, isImageFlipped: Boolean, rotationDegrees: Int)
95 | : this(
96 | if (areDimensionSwitched(rotationDegrees)) height else width,
97 | if (areDimensionSwitched(rotationDegrees)) width else height,
98 | isImageFlipped
99 | )
100 |
101 | companion object {
102 | private fun areDimensionSwitched(rotationDegrees: Int): Boolean {
103 | return rotationDegrees == 90 || rotationDegrees == 270
104 | }
105 | }
106 | }
--------------------------------------------------------------------------------
/app/src/main/java/org/jetbrains/kotlinx/dl/example/app/MainActivity.kt:
--------------------------------------------------------------------------------
1 | package org.jetbrains.kotlinx.dl.example.app
2 |
3 | import android.Manifest
4 | import android.content.pm.PackageManager
5 | import android.os.Bundle
6 | import android.util.Log
7 | import android.view.View
8 | import android.widget.AdapterView
9 | import android.widget.AdapterView.OnItemSelectedListener
10 | import android.widget.Toast
11 | import androidx.appcompat.app.AppCompatActivity
12 | import androidx.camera.core.*
13 | import androidx.camera.core.CameraSelector.DEFAULT_BACK_CAMERA
14 | import androidx.camera.core.CameraSelector.DEFAULT_FRONT_CAMERA
15 | import androidx.camera.lifecycle.ProcessCameraProvider
16 | import androidx.core.app.ActivityCompat
17 | import androidx.core.content.ContextCompat
18 | import androidx.lifecycle.LifecycleOwner
19 | import kotlinx.android.synthetic.main.activity_main.*
20 | import java.util.concurrent.ExecutorService
21 | import java.util.concurrent.Executors
22 | import java.util.concurrent.TimeUnit
23 | import java.util.concurrent.TimeoutException
24 |
25 |
26 | class MainActivity : AppCompatActivity() {
27 | private val backgroundExecutor: ExecutorService by lazy { Executors.newSingleThreadExecutor() }
28 |
29 | @Volatile
30 | private lateinit var cameraProcessor: CameraProcessor
31 | private var currentPipeline: Int = 0
32 | private var isBackCamera: Boolean = true
33 |
34 | override fun onCreate(savedInstanceState: Bundle?) {
35 | super.onCreate(savedInstanceState)
36 | setContentView(R.layout.activity_main)
37 |
38 | savedInstanceState?.apply {
39 | currentPipeline = getInt(CURRENT_PIPELINE, 0)
40 | isBackCamera = getBoolean(IS_BACK_CAMERA, true)
41 | }
42 |
43 | if (allPermissionsGranted()) {
44 | startCamera(currentPipeline, isBackCamera)
45 | } else {
46 | ActivityCompat.requestPermissions(
47 | this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS
48 | )
49 | }
50 |
51 | detector_view.scaleType = viewFinder.scaleType
52 | }
53 |
54 | private fun startCamera(currentPipelineIndex: Int, isBackCamera: Boolean) {
55 | val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
56 | cameraProviderFuture.addListener({
57 | val imageAnalyzer = ImageAnalyzer(
58 | applicationContext, resources, ::updateUI,
59 | currentPipelineIndex
60 | )
61 | runOnUiThread {
62 | cameraProcessor = CameraProcessor(
63 | imageAnalyzer,
64 | cameraProviderFuture.get(),
65 | viewFinder.surfaceProvider,
66 | backgroundExecutor,
67 | isBackCamera
68 | )
69 | if (!cameraProcessor.bindCameraUseCases(this)) {
70 | showError("Could not initialize camera.")
71 | }
72 |
73 | val modelsSpinnerAdapter = PipelineSelectorAdapter(
74 | this,
75 | R.layout.pipelines_selector,
76 | imageAnalyzer.pipelinesList
77 | )
78 | models.adapter = modelsSpinnerAdapter
79 | models.onItemSelectedListener = ModelItemSelectedListener()
80 | models.setSelection(imageAnalyzer.currentPipelineIndex, false)
81 |
82 | backCameraSwitch.isChecked = cameraProcessor.isBackCamera
83 | backCameraSwitch.setOnCheckedChangeListener { _, isChecked ->
84 | if (!cameraProcessor.setBackCamera(isChecked, this)) {
85 | showError("Could not switch to the lens facing ${if (cameraProcessor.isBackCamera) "back" else "front"}.")
86 | }
87 | }
88 | }
89 | }, backgroundExecutor)
90 | }
91 |
92 | private fun allPermissionsGranted() = REQUIRED_PERMISSIONS.all {
93 | ContextCompat.checkSelfPermission(baseContext, it) == PackageManager.PERMISSION_GRANTED
94 | }
95 |
96 | override fun onRequestPermissionsResult(
97 | requestCode: Int,
98 | permissions: Array,
99 | grantResults: IntArray
100 | ) {
101 | super.onRequestPermissionsResult(requestCode, permissions, grantResults)
102 | if (requestCode == REQUEST_CODE_PERMISSIONS) {
103 | if (allPermissionsGranted()) {
104 | startCamera(currentPipeline, isBackCamera)
105 | } else {
106 | showError("Permissions not granted by the user.")
107 | }
108 | }
109 | }
110 |
111 | override fun onSaveInstanceState(outState: Bundle) {
112 | super.onSaveInstanceState(outState)
113 | if (::cameraProcessor.isInitialized) {
114 | outState.putInt(CURRENT_PIPELINE, cameraProcessor.imageAnalyzer.currentPipelineIndex)
115 | outState.putBoolean(IS_BACK_CAMERA, cameraProcessor.isBackCamera)
116 | }
117 | }
118 |
119 | private fun showError(text: String) {
120 | Toast.makeText(this, text, Toast.LENGTH_SHORT).show()
121 | finish()
122 | }
123 |
124 | private fun updateUI(result: AnalysisResult?) {
125 | runOnUiThread {
126 | clearUi()
127 | if (result == null) {
128 | detector_view.setDetection(null)
129 | return@runOnUiThread
130 | }
131 |
132 | if (result is AnalysisResult.WithPrediction) {
133 | detector_view.setDetection(result)
134 | detected_item_text.text = result.prediction.getText(this)
135 | val confidencePercent = result.prediction.confidence * 100
136 | percentMeter.progress = confidencePercent.toInt()
137 | detected_item_confidence.text = "%.2f%%".format(confidencePercent)
138 | } else {
139 | detector_view.setDetection(null)
140 | }
141 | inference_time_value.text = getString(R.string.inference_time_placeholder, result.processTimeMs)
142 | }
143 | }
144 |
145 | private fun clearUi() {
146 | detected_item_text.text = ""
147 | detected_item_confidence.text = ""
148 | inference_time_value.text = ""
149 | percentMeter.progress = 0
150 | }
151 |
152 | override fun onDestroy() {
153 | super.onDestroy()
154 | if (::cameraProcessor.isInitialized) cameraProcessor.close()
155 | backgroundExecutor.shutdown()
156 | }
157 |
158 | companion object {
159 | const val TAG = "KotlinDL demo app"
160 | private const val REQUEST_CODE_PERMISSIONS = 10
161 | private val REQUIRED_PERMISSIONS = arrayOf(Manifest.permission.CAMERA)
162 | private const val CURRENT_PIPELINE = "current_pipeline"
163 | private const val IS_BACK_CAMERA = "is_back_camera"
164 | }
165 |
166 | private inner class ModelItemSelectedListener : OnItemSelectedListener {
167 | override fun onItemSelected(parent: AdapterView<*>?, view: View?, position: Int, id: Long) {
168 | if (::cameraProcessor.isInitialized) cameraProcessor.imageAnalyzer.setPipeline(position)
169 | }
170 |
171 | override fun onNothingSelected(p0: AdapterView<*>?) {
172 | if (::cameraProcessor.isInitialized) cameraProcessor.imageAnalyzer.clear()
173 | }
174 | }
175 | }
176 |
177 | private class CameraProcessor(
178 | val imageAnalyzer: ImageAnalyzer,
179 | private val cameraProvider: ProcessCameraProvider,
180 | private val surfaceProvider: Preview.SurfaceProvider,
181 | private val executor: ExecutorService,
182 | isInitialBackCamera: Boolean
183 | ) {
184 | @Volatile
185 | var isBackCamera: Boolean = isInitialBackCamera
186 | private set
187 | private val cameraSelector get() = if (isBackCamera) DEFAULT_BACK_CAMERA else DEFAULT_FRONT_CAMERA
188 |
189 | fun bindCameraUseCases(lifecycleOwner: LifecycleOwner): Boolean {
190 | try {
191 | cameraProvider.unbindAll()
192 |
193 | val imagePreview = Preview.Builder()
194 | .setTargetAspectRatio(AspectRatio.RATIO_4_3)
195 | .build()
196 | .also {
197 | it.setSurfaceProvider(surfaceProvider)
198 | }
199 | val imageAnalysis = ImageAnalysis.Builder()
200 | .setTargetAspectRatio(AspectRatio.RATIO_4_3)
201 | .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
202 | .build()
203 | .also {
204 | it.setAnalyzer(executor, ImageAnalyzerProxy(imageAnalyzer, isBackCamera))
205 | }
206 |
207 | if (cameraProvider.hasCamera(cameraSelector)) {
208 | cameraProvider.bindToLifecycle(
209 | lifecycleOwner,
210 | cameraSelector,
211 | imagePreview,
212 | imageAnalysis
213 | )
214 | return true
215 | }
216 | } catch (exc: RuntimeException) {
217 | Log.e(MainActivity.TAG, "Use case binding failed", exc)
218 | }
219 | return false
220 | }
221 |
222 | fun setBackCamera(backCamera: Boolean, lifecycleOwner: LifecycleOwner): Boolean {
223 | if (backCamera == isBackCamera) return true
224 |
225 | isBackCamera = backCamera
226 | return bindCameraUseCases(lifecycleOwner)
227 | }
228 |
229 | fun close() {
230 | cameraProvider.unbindAll()
231 | try {
232 | executor.submit { imageAnalyzer.close() }.get(500, TimeUnit.MILLISECONDS)
233 | } catch (_: InterruptedException) {
234 | } catch (_: TimeoutException) {
235 | }
236 | }
237 | }
238 |
239 | private class ImageAnalyzerProxy(private val delegate: ImageAnalyzer, private val isBackCamera: Boolean): ImageAnalysis.Analyzer {
240 | override fun analyze(image: ImageProxy) {
241 | delegate.analyze(image, !isBackCamera)
242 | }
243 | }
--------------------------------------------------------------------------------
/app/src/main/java/org/jetbrains/kotlinx/dl/example/app/PipelineSelectorAdapter.kt:
--------------------------------------------------------------------------------
1 | package org.jetbrains.kotlinx.dl.example.app
2 |
3 | import android.content.Context
4 | import android.util.Log
5 | import android.view.LayoutInflater
6 | import android.view.View
7 | import android.view.ViewGroup
8 | import android.widget.ArrayAdapter
9 | import kotlinx.android.synthetic.main.pipelines_selector.view.text1
10 | import kotlinx.android.synthetic.main.pipelines_selector.view.text2
11 |
12 | class PipelineSelectorAdapter(context: Context, private val resource: Int, items: List) :
13 | ArrayAdapter(context, resource, items) {
14 | override fun getView(position: Int, convertView: View?, parent: ViewGroup): View {
15 | return createView(position, convertView, parent)
16 | }
17 |
18 | override fun getDropDownView(position: Int, convertView: View?, parent: ViewGroup): View {
19 | return createView(position, convertView, parent)
20 | }
21 |
22 | private fun createView(position: Int, recycledView: View?, parent: ViewGroup): View {
23 | val view = recycledView ?: LayoutInflater.from(context).inflate(
24 | resource,
25 | parent,
26 | false
27 | )
28 |
29 | val pipeline = getItem(position)
30 | view.text1.text = pipeline?.descriptionId?.let { context.getString(it) } ?: ""
31 | view.text2.text = pipeline?.task?.descriptionId?.let { context.getString(it) } ?: ""
32 |
33 | return view
34 | }
35 | }
--------------------------------------------------------------------------------
/app/src/main/java/org/jetbrains/kotlinx/dl/example/app/Softmax.kt:
--------------------------------------------------------------------------------
1 | package org.jetbrains.kotlinx.dl.example.app
2 |
3 | import org.jetbrains.kotlinx.dl.api.core.shape.TensorShape
4 | import org.jetbrains.kotlinx.dl.impl.preprocessing.FloatArrayOperation
5 | import org.jetbrains.kotlinx.dl.impl.util.argmax
6 | import kotlin.math.exp
7 |
8 | class Softmax : FloatArrayOperation() {
9 | override fun applyImpl(data: FloatArray, shape: TensorShape): FloatArray {
10 | val logits = data.copyOf()
11 | val max = logits[logits.argmax()]
12 | var sum = 0.0f
13 |
14 | for (i in logits.indices) {
15 | logits[i] = exp(logits[i] - max)
16 | sum += logits[i]
17 | }
18 |
19 | if (sum != 0.0f) {
20 | for (i in logits.indices) {
21 | logits[i] /= sum
22 | }
23 | }
24 |
25 | return logits
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/app/src/main/java/org/jetbrains/kotlinx/dl/example/app/pipelines.kt:
--------------------------------------------------------------------------------
1 | package org.jetbrains.kotlinx.dl.example.app
2 |
3 | import android.content.Context
4 | import android.content.res.Resources
5 | import android.graphics.Bitmap
6 | import android.graphics.Rect
7 | import androidx.camera.core.ImageProxy
8 | import org.jetbrains.kotlinx.dl.api.inference.FlatShape
9 | import org.jetbrains.kotlinx.dl.api.inference.facealignment.Landmark
10 | import org.jetbrains.kotlinx.dl.api.inference.objectdetection.DetectedObject
11 | import org.jetbrains.kotlinx.dl.api.inference.posedetection.DetectedPose
12 | import org.jetbrains.kotlinx.dl.api.preprocessing.Operation
13 | import org.jetbrains.kotlinx.dl.api.preprocessing.pipeline
14 | import org.jetbrains.kotlinx.dl.impl.dataset.Imagenet
15 | import org.jetbrains.kotlinx.dl.impl.inference.imagerecognition.InputType
16 | import org.jetbrains.kotlinx.dl.impl.preprocessing.*
17 | import org.jetbrains.kotlinx.dl.impl.preprocessing.camerax.toBitmap
18 | import org.jetbrains.kotlinx.dl.impl.util.argmax
19 | import org.jetbrains.kotlinx.dl.onnx.inference.ONNXModelHub
20 | import org.jetbrains.kotlinx.dl.onnx.inference.ONNXModels
21 | import org.jetbrains.kotlinx.dl.onnx.inference.OnnxHighLevelModel
22 | import org.jetbrains.kotlinx.dl.onnx.inference.OnnxInferenceModel
23 | import org.jetbrains.kotlinx.dl.onnx.inference.classification.ImageRecognitionModel
24 | import org.jetbrains.kotlinx.dl.onnx.inference.classification.predictTopKObjects
25 | import org.jetbrains.kotlinx.dl.onnx.inference.executionproviders.ExecutionProvider.CPU
26 | import org.jetbrains.kotlinx.dl.onnx.inference.executionproviders.ExecutionProvider.NNAPI
27 | import org.jetbrains.kotlinx.dl.onnx.inference.facealignment.FaceDetectionModel
28 | import org.jetbrains.kotlinx.dl.onnx.inference.facealignment.Fan2D106FaceAlignmentModel
29 | import org.jetbrains.kotlinx.dl.onnx.inference.inferUsing
30 | import org.jetbrains.kotlinx.dl.onnx.inference.objectdetection.SSDLikeModel
31 | import org.jetbrains.kotlinx.dl.onnx.inference.objectdetection.detectObjects
32 | import org.jetbrains.kotlinx.dl.onnx.inference.posedetection.SinglePoseDetectionModel
33 | import org.jetbrains.kotlinx.dl.onnx.inference.posedetection.detectPose
34 |
35 |
36 | interface InferencePipeline {
37 | fun analyze(image: ImageProxy, confidenceThreshold: Float): Prediction?
38 | fun close()
39 | }
40 |
41 | enum class Tasks(val descriptionId: Int) {
42 | Classification(R.string.model_type_classification),
43 | ObjectDetection(R.string.model_type_object_detection),
44 | PoseDetection(R.string.model_type_pose_detection),
45 | FaceAlignment(R.string.model_type_face_alignment)
46 | }
47 |
48 | enum class Pipelines(val task: Tasks, val descriptionId: Int) {
49 | SSDMobilenetV1(Tasks.ObjectDetection, R.string.pipeline_ssd_mobilenet_v1) {
50 | override fun createPipeline(hub: ONNXModelHub, resources: Resources): InferencePipeline {
51 | return DetectionPipeline(ONNXModels.ObjectDetection.SSDMobileNetV1.pretrainedModel(hub))
52 | }
53 | },
54 | EfficientNetLite4(Tasks.Classification, R.string.pipeline_efficient_net_lite_4) {
55 | override fun createPipeline(hub: ONNXModelHub, resources: Resources): InferencePipeline {
56 | return ClassificationPipeline(ONNXModels.CV.EfficientNet4Lite.pretrainedModel(hub))
57 | }
58 | },
59 | MobilenetV1(Tasks.Classification, R.string.pipeline_mobilenet_v1) {
60 | override fun createPipeline(hub: ONNXModelHub, resources: Resources): InferencePipeline {
61 | return ClassificationPipeline(ONNXModels.CV.MobilenetV1.pretrainedModel(hub))
62 | }
63 | },
64 | Shufflenet(Tasks.Classification, R.string.pipeline_shufflenet) {
65 | override fun createPipeline(hub: ONNXModelHub, resources: Resources): InferencePipeline {
66 | return ShufflenetPipeline(
67 | OnnxInferenceModel {
68 | resources.openRawResource(R.raw.shufflenet).use { it.readBytes() }
69 | }
70 | )
71 | }
72 | },
73 | EfficientDetLite0(Tasks.ObjectDetection, R.string.pipeline_efficient_det_lite_0) {
74 | override fun createPipeline(hub: ONNXModelHub, resources: Resources): InferencePipeline {
75 | return DetectionPipeline(ONNXModels.ObjectDetection.EfficientDetLite0.pretrainedModel(hub))
76 | }
77 | },
78 | MoveNetSinglePoseLighting(Tasks.PoseDetection, R.string.pipeline_move_net_single_pose_lighting) {
79 | override fun createPipeline(hub: ONNXModelHub, resources: Resources): InferencePipeline {
80 | return PoseDetectionPipeline(ONNXModels.PoseDetection.MoveNetSinglePoseLighting.pretrainedModel(hub))
81 | }
82 | },
83 | FaceAlignment(Tasks.FaceAlignment, R.string.pipeline_face_alignment) {
84 | override fun createPipeline(hub: ONNXModelHub, resources: Resources): InferencePipeline {
85 | val detectionModel = ONNXModels.FaceDetection.UltraFace320.pretrainedModel(hub)
86 | val alignmentModel = ONNXModels.FaceAlignment.Fan2d106.pretrainedModel(hub)
87 | return FaceAlignmentPipeline(detectionModel, alignmentModel)
88 | }
89 | };
90 |
91 | abstract fun createPipeline(hub: ONNXModelHub, resources: Resources): InferencePipeline
92 | }
93 |
94 | internal class DetectionPipeline(private val model: SSDLikeModel) : InferencePipeline {
95 | override fun analyze(image: ImageProxy, confidenceThreshold: Float): Prediction? {
96 | val detections = model.inferUsing(CPU()) {
97 | it.detectObjects(image, -1)
98 | }.filter { it.probability >= confidenceThreshold }
99 | if (detections.isEmpty()) return null
100 |
101 | return PredictedObject(detections)
102 | }
103 |
104 | override fun close() {
105 | model.close()
106 | }
107 |
108 | class PredictedObject(private val detections: List) : Prediction {
109 | override val shapes: List> get() = detections
110 | override val confidence: Float get() = detections.first().probability
111 | override fun getText(context: Context): String {
112 | val singleObject = detections.singleOrNull()
113 | if (singleObject != null) return singleObject.label ?: ""
114 | return context.getString(R.string.label_objects, detections.size)
115 | }
116 | }
117 | }
118 |
119 | internal class ClassificationPipeline(private val model: ImageRecognitionModel) :
120 | InferencePipeline {
121 |
122 | override fun analyze(image: ImageProxy, confidenceThreshold: Float): Prediction? {
123 | val predictions = model.inferUsing(NNAPI()) {
124 | it.predictTopKObjects(image, 1)
125 | }
126 | if (predictions.isEmpty()) return null
127 | val (label, confidence) = predictions.single()
128 | return PredictedClass(label, confidence)
129 | }
130 |
131 | override fun close() {
132 | model.close()
133 | }
134 |
135 | class PredictedClass(private val label: String, override val confidence: Float) : Prediction {
136 | override val shapes: List> get() = emptyList()
137 | override fun getText(context: Context): String = label
138 | }
139 | }
140 |
141 | internal class ShufflenetPipeline(
142 | private val model: OnnxInferenceModel
143 | ) : InferencePipeline {
144 | private val labels = Imagenet.V1k.labels()
145 |
146 | override fun analyze(image: ImageProxy, confidenceThreshold: Float): ClassificationPipeline.PredictedClass {
147 | val bitmap = image.toBitmap()
148 | val rotation = image.imageInfo.rotationDegrees.toFloat()
149 |
150 | val preprocessing = pipeline()
151 | .resize {
152 | outputHeight = 224
153 | outputWidth = 224
154 | }
155 | .rotate { degrees = rotation }
156 | .toFloatArray { layout = TensorLayout.NCHW }
157 | .call(InputType.TORCH.preprocessing(channelsLast = false))
158 |
159 | val (label, confidence) = model.inferUsing(CPU()) {
160 | val (tensor, shape) = preprocessing.apply(bitmap)
161 | val logits = model.predictSoftly(tensor)
162 | val (confidence, _) = Softmax().apply(logits to shape)
163 | val labelId = confidence.argmax()
164 | labels[labelId]!! to confidence[labelId]
165 | }
166 |
167 | return ClassificationPipeline.PredictedClass(label, confidence)
168 | }
169 |
170 | override fun close() {
171 | model.close()
172 | }
173 | }
174 |
175 | class PoseDetectionPipeline(private val model: SinglePoseDetectionModel) : InferencePipeline {
176 | override fun analyze(image: ImageProxy, confidenceThreshold: Float): Prediction? {
177 | val detectedPose = model.inferUsing(CPU()) {
178 | it.detectPose(image)
179 | }
180 |
181 | if (detectedPose.landmarks.isEmpty()) return null
182 |
183 | return PredictedPose(detectedPose)
184 | }
185 |
186 | override fun close() = model.close()
187 |
188 | class PredictedPose(private val pose: DetectedPose) : Prediction {
189 | override val shapes: List> get() = listOf(pose)
190 | override val confidence: Float get() = pose.landmarks.maxOf { it.probability }
191 | override fun getText(context: Context): String = context.getString(R.string.label_pose)
192 | }
193 | }
194 |
195 | class FaceAlignmentPipeline(
196 | private val detectionModel: FaceDetectionModel,
197 | private val alignmentModel: Fan2D106FaceAlignmentModel
198 | ) : InferencePipeline {
199 | override fun analyze(image: ImageProxy, confidenceThreshold: Float): Prediction? {
200 | val bitmap = image.toBitmap(applyRotation = true)
201 |
202 | val detectedObjects = detectionModel.detectFaces(bitmap, 1)
203 | if (detectedObjects.isEmpty()) {
204 | return null
205 | }
206 |
207 | val face = detectedObjects.first()
208 | if (face.probability < confidenceThreshold) return null
209 |
210 | val faceRect = Rect(
211 | (face.xMin * 0.9f * bitmap.width).toInt().coerceAtLeast(0),
212 | (face.yMin * 0.9f * bitmap.height).toInt().coerceAtLeast(0),
213 | (face.xMax * 1.1f * bitmap.width).toInt().coerceAtMost(bitmap.width),
214 | (face.yMax * 1.1f * bitmap.height).toInt().coerceAtMost(bitmap.height)
215 | )
216 |
217 | val landmarks = alignmentModel.predictOnCrop(bitmap, faceRect)
218 | return FaceAlignmentPrediction(face, landmarks)
219 | }
220 |
221 | override fun close() {
222 | detectionModel.close()
223 | alignmentModel.close()
224 | }
225 |
226 | data class FaceAlignmentPrediction(val face: DetectedObject, val landmarks: List): Prediction {
227 | override val shapes: List> get() = landmarks + face
228 | override val confidence: Float get() = face.probability
229 | override fun getText(context: Context): String = context.getString(R.string.label_face)
230 | }
231 | }
232 |
233 | private fun Operation.cropRect(rect: Rect): Operation {
234 | return crop {
235 | x = rect.left
236 | y = rect.top
237 | width = rect.width()
238 | height = rect.height()
239 | }
240 | }
241 |
242 | private fun > OnnxHighLevelModel>.predictOnCrop(
243 | bitmap: Bitmap,
244 | crop: Rect
245 | ): List {
246 | val cropBitmap = pipeline().cropRect(crop).apply(bitmap)
247 | return predict(cropBitmap).map { shape ->
248 | shape.map { x, y ->
249 | (crop.left + x * crop.width()) / bitmap.width to
250 | (crop.top + y * crop.height()) / bitmap.height
251 | }
252 | }
253 | }
254 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
15 |
18 |
21 |
22 |
23 |
24 |
30 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/arrow_ccw.xml:
--------------------------------------------------------------------------------
1 |
6 |
9 |
10 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/arrow_cw.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/camera_switch.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | -
4 |
5 |
-
6 |
7 |
8 |
9 |
10 |
11 | -
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/shape_rectangle.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
19 |
24 |
27 |
28 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
14 |
15 |
19 |
20 |
27 |
28 |
38 |
39 |
45 |
46 |
53 |
54 |
61 |
62 |
63 |
69 |
70 |
76 |
77 |
84 |
85 |
86 |
93 |
94 |
103 |
104 |
107 |
108 |
118 |
119 |
120 |
121 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/pipelines_selector.xml:
--------------------------------------------------------------------------------
1 |
5 |
6 |
17 |
18 |
29 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/raw/shufflenet.ort:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/app/src/main/res/raw/shufflenet.ort
--------------------------------------------------------------------------------
/app/src/main/res/values-night/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
18 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #FFBB86FC
4 | #FF6200EE
5 | #FF3700B3
6 | #FF03DAC5
7 | #FF018786
8 | #FF000000
9 | #FFFFFFFF
10 |
11 | #66000000
12 | #77FFFFFF
13 | #FF777777
14 |
15 |
--------------------------------------------------------------------------------
/app/src/main/res/values/dimensions.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 3dp
4 | 24sp
5 | 5dp
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | KotlinDL demo app
3 | Select model
4 | Inference Time
5 | KotlinDL Demo
6 | %d ms
7 | Classification
8 | Object Detection
9 | Pose Det.
10 | Face Alignment
11 | SSDMobilenetV1
12 | EfficientNet4Lite
13 | MobilenetV1
14 | Shufflenet
15 | EfficientDetLite0
16 | MoveNetSinglePose
17 | UltraFace320 + Fan2d106
18 | pose
19 | face
20 | %d objects
21 |
--------------------------------------------------------------------------------
/app/src/main/res/values/themes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
18 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 | buildscript {
3 | ext.kotlin_version = "1.6.21"
4 | repositories {
5 | google()
6 | mavenCentral()
7 | }
8 | dependencies {
9 | classpath "com.android.tools.build:gradle:7.2.1"
10 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
11 | }
12 | }
13 |
14 | allprojects {
15 | repositories {
16 | google()
17 | mavenCentral()
18 | mavenLocal()
19 | }
20 | }
21 |
22 | task clean(type: Delete) {
23 | delete rootProject.buildDir
24 | }
25 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app"s APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Automatically convert third-party libraries to use AndroidX
19 | android.enableJetifier=true
20 | # Kotlin code style for this project: "official" or "obsolete":
21 | kotlin.code.style=official
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Tue Mar 09 23:19:29 PST 2021
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/readme_materials/face.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/readme_materials/face.jpg
--------------------------------------------------------------------------------
/readme_materials/pose.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/readme_materials/pose.jpg
--------------------------------------------------------------------------------
/readme_materials/sheeps.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kotlin/kotlindl-app-sample/e82c40e91f1996798639fd0b561c1d2f00800c8a/readme_materials/sheeps.png
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | pluginManagement {
2 | repositories {
3 | google()
4 | gradlePluginPortal()
5 | }
6 | }
7 |
8 | include ':app'
9 | rootProject.name = "ort_image_classifier"
--------------------------------------------------------------------------------