├── .gitattributes
├── .gitignore
├── LICENSE
├── README.md
└── android
├── .gitignore
├── .idea
├── .name
├── codeStyles
│ ├── Project.xml
│ └── codeStyleConfig.xml
├── compiler.xml
├── gradle.xml
├── jarRepositories.xml
├── misc.xml
└── runConfigurations.xml
├── app
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── jp
│ │ └── araobp
│ │ └── camera
│ │ └── ExampleInstrumentedTest.kt
│ ├── main
│ ├── AndroidManifest.xml
│ ├── assets
│ │ ├── detect.tflite
│ │ └── labelmap.txt
│ ├── ic_launcher-playstore.png
│ ├── java
│ │ └── jp
│ │ │ └── araobp
│ │ │ └── camera
│ │ │ ├── MainActivity.kt
│ │ │ ├── Properties.kt
│ │ │ ├── aicamera
│ │ │ ├── DrawPaint.kt
│ │ │ └── ObjectDetector.kt
│ │ │ ├── net
│ │ │ ├── IMqttReceiver.kt
│ │ │ └── MqttClient.kt
│ │ │ ├── opecv
│ │ │ ├── ColorFilter.kt
│ │ │ ├── DifferenceExtractor.kt
│ │ │ ├── DrawColor.kt
│ │ │ ├── OpticalFlowFarneback.kt
│ │ │ └── Utils.kt
│ │ │ ├── tflite
│ │ │ ├── Classifier.kt
│ │ │ ├── SsdMobilenetV2.kt
│ │ │ └── TFLiteObjectDetectionAPIModel.java
│ │ │ └── util
│ │ │ ├── Fps.kt
│ │ │ ├── MediaStorage.kt
│ │ │ └── Utils.kt
│ └── res
│ │ ├── drawable-v24
│ │ └── ic_launcher_foreground.xml
│ │ ├── drawable
│ │ ├── ic_baseline_camera_alt_24.xml
│ │ ├── ic_launcher_background.xml
│ │ └── ic_launcher_foreground.xml
│ │ ├── layout
│ │ ├── activity_main.xml
│ │ └── settings.xml
│ │ ├── mipmap-anydpi-v26
│ │ ├── ic_launcher.xml
│ │ └── ic_launcher_round.xml
│ │ ├── mipmap-hdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-mdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ └── ic_launcher_round.png
│ │ └── values
│ │ ├── colors.xml
│ │ ├── dimens.xml
│ │ ├── ic_launcher_background.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ └── test
│ └── java
│ └── jp
│ └── araobp
│ └── camera
│ └── ExampleUnitTest.kt
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.ap_
4 | *.aab
5 |
6 | # Files for the ART/Dalvik VM
7 | *.dex
8 |
9 | # Java class files
10 | *.class
11 |
12 | # Generated files
13 | bin/
14 | gen/
15 | out/
16 | # Uncomment the following line in case you need and you don't have the release build type files in your app
17 | # release/
18 |
19 | # Gradle files
20 | .gradle/
21 | build/
22 |
23 | # Local configuration file (sdk path, etc)
24 | local.properties
25 |
26 | # Proguard folder generated by Eclipse
27 | proguard/
28 |
29 | # Log Files
30 | *.log
31 |
32 | # Android Studio Navigation editor temp files
33 | .navigation/
34 |
35 | # Android Studio captures folder
36 | captures/
37 |
38 | # IntelliJ
39 | *.iml
40 | .idea/workspace.xml
41 | .idea/tasks.xml
42 | .idea/gradle.xml
43 | .idea/assetWizardSettings.xml
44 | .idea/dictionaries
45 | .idea/libraries
46 | # Android Studio 3 in .gitignore file.
47 | .idea/caches
48 | .idea/modules.xml
49 | # Comment next line if keeping position of elements in Navigation Editor is relevant for you
50 | .idea/navEditor.xml
51 |
52 | # Keystore files
53 | # Uncomment the following lines if you do not want to check your keystore files in.
54 | #*.jks
55 | #*.keystore
56 |
57 | # External native build folder generated in Android Studio 2.2 and later
58 | .externalNativeBuild
59 |
60 | # Google Services (e.g. APIs or Firebase)
61 | # google-services.json
62 |
63 | # Freeline
64 | freeline.py
65 | freeline/
66 | freeline_project_description.json
67 |
68 | # fastlane
69 | fastlane/report.xml
70 | fastlane/Preview.html
71 | fastlane/screenshots
72 | fastlane/test_output
73 | fastlane/readme.md
74 |
75 | # Version control
76 | vcs.xml
77 |
78 | # lint
79 | lint/intermediates/
80 | lint/generated/
81 | lint/outputs/
82 | lint/tmp/
83 | # lint/reports/
84 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Android camera
2 |
3 | Android CameraX image analysis demo with OpenCV4 and TesorFlow Lite
4 |
5 | (Work in progress) Develop virtual camara mode.
6 |
7 | ```
8 | Real camera mode
9 |
10 | [Image sensor]-->[CameraX]-->[OpenCV4]--+-----------------------+
11 | | | |
12 | +------------------+--->[TensorFlow Lite]--+---> Final output
13 |
14 |
15 | Virtual camera mode (for Unity)
16 |
17 | [Unity camera]-->[RenderTexture]-- MQTT --->[OpenCV4]--+-----------------------+
18 | | | |
19 | +------------------+--->[TensorFlow Lite]--+---> Final output
20 | ```
21 |
22 | ## Background and motivation
23 |
24 | I think 4G/5G smart phones (or 4G/5G with Android-based cameras) with 4K/8K image sensors will replace the traditional PTZ monitoring cameras.
25 |
26 | ```
27 | Traditional PTZ monitoring camera
28 |
29 | Edge AI
30 | [Camera]---ONVIF/PoE---[Edge computer]---Ethernet---[Video recorder]---[Video management system]
31 |
32 |
33 | Android-based monitoring cameras are cheaper and more flexible than the traditional ones.
34 |
35 | Edge AI
36 | [Smartphone]---4G/5G network---[Cloud storage]---[Video management app on cloud]
37 | |
38 | Direct communication over 4G/5G network
39 | |
40 | [Smartphone]
41 | ```
42 |
43 | This project is just a skeleton of such an AI camera.
44 |
45 | ## Requirements
46 |
47 | OpenCV4 Android SDK has to be installed properly in this project -- it is not included in this repo.
48 |
49 | The other components are installed by Gradle automatically.
50 |
51 | ## Image processing filters with OpenCV4
52 |
53 | - Color filter
54 | - Optical flow
55 | - Difference extraction
56 |
57 | ## TensorFlow Lite
58 |
59 | - Object detection
60 |
61 | ## CameraX beta getting started
62 |
63 | - https://codelabs.developers.google.com/codelabs/camerax-getting-started/
64 |
65 | ## opencv-4.4.0-android-sdk.zip
66 |
67 | - https://sourceforge.net/projects/opencvlibrary/files/4.4.0/
68 |
--------------------------------------------------------------------------------
/android/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/caches
5 | /.idea/libraries
6 | /.idea/modules.xml
7 | /.idea/workspace.xml
8 | /.idea/navEditor.xml
9 | /.idea/assetWizardSettings.xml
10 | .DS_Store
11 | /build
12 | /captures
13 | .externalNativeBuild
14 | .cxx
15 |
16 | /sdk
17 |
--------------------------------------------------------------------------------
/android/.idea/.name:
--------------------------------------------------------------------------------
1 | My Application
--------------------------------------------------------------------------------
/android/.idea/codeStyles/Project.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 | xmlns:android
33 |
34 | ^$
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 | xmlns:.*
44 |
45 | ^$
46 |
47 |
48 | BY_NAME
49 |
50 |
51 |
52 |
53 |
54 |
55 | .*:id
56 |
57 | http://schemas.android.com/apk/res/android
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 | .*:name
67 |
68 | http://schemas.android.com/apk/res/android
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 | name
78 |
79 | ^$
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 | style
89 |
90 | ^$
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 | .*
100 |
101 | ^$
102 |
103 |
104 | BY_NAME
105 |
106 |
107 |
108 |
109 |
110 |
111 | .*
112 |
113 | http://schemas.android.com/apk/res/android
114 |
115 |
116 | ANDROID_ATTRIBUTE_ORDER
117 |
118 |
119 |
120 |
121 |
122 |
123 | .*
124 |
125 | .*
126 |
127 |
128 | BY_NAME
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
--------------------------------------------------------------------------------
/android/.idea/codeStyles/codeStyleConfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/android/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/android/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/android/.idea/jarRepositories.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/android/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/android/.idea/runConfigurations.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/android/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
--------------------------------------------------------------------------------
/android/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 | apply plugin: 'kotlin-android'
3 | apply plugin: 'kotlin-android-extensions'
4 |
5 | android {
6 | compileSdkVersion 29
7 | buildToolsVersion "29.0.3"
8 |
9 | defaultConfig {
10 | applicationId "jp.araobp.myapplication"
11 | minSdkVersion 28
12 | targetSdkVersion 29
13 | versionCode 1
14 | versionName "1.0"
15 |
16 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
17 | }
18 |
19 | aaptOptions {
20 | noCompress "tflite"
21 | }
22 |
23 | buildTypes {
24 | release {
25 | minifyEnabled false
26 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
27 | }
28 | }
29 |
30 | compileOptions {
31 | sourceCompatibility JavaVersion.VERSION_1_8
32 | targetCompatibility JavaVersion.VERSION_1_8
33 | }
34 | }
35 |
36 | dependencies {
37 | implementation fileTree(dir: "libs", include: ["*.jar"])
38 | implementation "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version"
39 | implementation 'androidx.core:core-ktx:1.3.1'
40 | implementation 'androidx.appcompat:appcompat:1.2.0'
41 | implementation 'androidx.constraintlayout:constraintlayout:2.0.1'
42 | implementation project(path: ':sdk')
43 | testImplementation 'junit:junit:4.12'
44 | androidTestImplementation 'androidx.test.ext:junit:1.1.2'
45 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
46 |
47 | // CameraX core library using the camera2 implementation
48 | def camerax_version = "1.0.0-beta11"
49 | // The following line is optional, as the core library is included indirectly by camera-camera2
50 | implementation "androidx.camera:camera-core:${camerax_version}"
51 | implementation "androidx.camera:camera-camera2:${camerax_version}"
52 | // If you want to additionally use the CameraX Lifecycle library
53 | implementation "androidx.camera:camera-lifecycle:${camerax_version}"
54 | // If you want to additionally use the CameraX View class
55 | implementation "androidx.camera:camera-view:1.0.0-alpha18"
56 | // If you want to additionally use the CameraX Extensions library
57 | implementation "androidx.camera:camera-extensions:1.0.0-alpha18"
58 |
59 | // TensorFlow Lite
60 | implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly'
61 | implementation 'org.tensorflow:tensorflow-lite-gpu:0.0.0-nightly'
62 |
63 | // MQTT
64 | implementation 'org.eclipse.paho:org.eclipse.paho.client.mqttv3:1.1.0'
65 | implementation 'org.eclipse.paho:org.eclipse.paho.android.service:1.1.1'
66 | implementation 'androidx.localbroadcastmanager:localbroadcastmanager:1.0.0'
67 | }
--------------------------------------------------------------------------------
/android/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
--------------------------------------------------------------------------------
/android/app/src/androidTest/java/jp/araobp/camera/ExampleInstrumentedTest.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera
2 |
3 | import androidx.test.platform.app.InstrumentationRegistry
4 | import androidx.test.ext.junit.runners.AndroidJUnit4
5 |
6 | import org.junit.Test
7 | import org.junit.runner.RunWith
8 |
9 | import org.junit.Assert.*
10 |
11 | /**
12 | * Instrumented test, which will execute on an Android device.
13 | *
14 | * See [testing documentation](http://d.android.com/tools/testing).
15 | */
16 | @RunWith(AndroidJUnit4::class)
17 | class ExampleInstrumentedTest {
18 | @Test
19 | fun useAppContext() {
20 | // Context of the app under test.
21 | val appContext = InstrumentationRegistry.getInstrumentation().targetContext
22 | assertEquals("jp.araobp.myapplication", appContext.packageName)
23 | }
24 | }
--------------------------------------------------------------------------------
/android/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/android/app/src/main/assets/detect.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/assets/detect.tflite
--------------------------------------------------------------------------------
/android/app/src/main/assets/labelmap.txt:
--------------------------------------------------------------------------------
1 | ???
2 | person
3 | bicycle
4 | car
5 | motorcycle
6 | airplane
7 | bus
8 | train
9 | truck
10 | boat
11 | traffic light
12 | fire hydrant
13 | ???
14 | stop sign
15 | parking meter
16 | bench
17 | bird
18 | cat
19 | dog
20 | horse
21 | sheep
22 | cow
23 | elephant
24 | bear
25 | zebra
26 | giraffe
27 | ???
28 | backpack
29 | umbrella
30 | ???
31 | ???
32 | handbag
33 | tie
34 | suitcase
35 | frisbee
36 | skis
37 | snowboard
38 | sports ball
39 | kite
40 | baseball bat
41 | baseball glove
42 | skateboard
43 | surfboard
44 | tennis racket
45 | bottle
46 | ???
47 | wine glass
48 | cup
49 | fork
50 | knife
51 | spoon
52 | bowl
53 | banana
54 | apple
55 | sandwich
56 | orange
57 | broccoli
58 | carrot
59 | hot dog
60 | pizza
61 | donut
62 | cake
63 | chair
64 | couch
65 | potted plant
66 | bed
67 | ???
68 | dining table
69 | ???
70 | ???
71 | toilet
72 | ???
73 | tv
74 | laptop
75 | mouse
76 | remote
77 | keyboard
78 | cell phone
79 | microwave
80 | oven
81 | toaster
82 | sink
83 | refrigerator
84 | ???
85 | book
86 | clock
87 | vase
88 | scissors
89 | teddy bear
90 | hair drier
91 | toothbrush
--------------------------------------------------------------------------------
/android/app/src/main/ic_launcher-playstore.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/ic_launcher-playstore.png
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/MainActivity.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera
2 |
3 | import android.Manifest
4 | import android.annotation.SuppressLint
5 | import android.app.Dialog
6 | import android.content.pm.ActivityInfo
7 | import android.content.pm.PackageManager
8 | import android.graphics.Bitmap
9 | import android.graphics.BitmapFactory
10 | import android.graphics.PorterDuff
11 | import android.graphics.Rect
12 | import android.os.Bundle
13 | import android.text.Editable
14 | import android.text.TextWatcher
15 | import android.util.Log
16 | import android.view.KeyEvent
17 | import android.view.View
18 | import android.view.WindowManager
19 | import android.widget.CheckBox
20 | import android.widget.EditText
21 | import android.widget.Toast
22 | import androidx.appcompat.app.AppCompatActivity
23 | import androidx.camera.core.*
24 | import androidx.camera.lifecycle.ProcessCameraProvider
25 | import androidx.core.app.ActivityCompat
26 | import androidx.core.content.ContextCompat
27 | import androidx.lifecycle.LifecycleOwner
28 | import jp.araobp.camera.Properties.Companion.IMAGE_ASPECT_RATIO
29 | import jp.araobp.camera.aicamera.ObjectDetector
30 | import jp.araobp.camera.net.IMqttReceiver
31 | import jp.araobp.camera.net.MqttClient
32 | import jp.araobp.camera.opecv.*
33 | import jp.araobp.camera.util.Fps
34 | import jp.araobp.camera.util.saveImage
35 | import kotlinx.android.synthetic.main.activity_main.*
36 | import org.eclipse.paho.client.mqttv3.MqttMessage
37 | import org.opencv.android.OpenCVLoader
38 | import org.opencv.android.Utils
39 | import org.opencv.core.Mat
40 | import java.util.concurrent.ExecutorService
41 | import java.util.concurrent.Executors
42 | import java.util.concurrent.atomic.AtomicBoolean
43 | import kotlin.concurrent.thread
44 | import kotlin.math.roundToInt
45 | import kotlin.system.exitProcess
46 |
47 | class MainActivity : AppCompatActivity() {
48 |
49 | companion object {
50 | private const val TAG = "camera"
51 | private const val REQUEST_CODE_PERMISSIONS = 10
52 | private val REQUIRED_PERMISSIONS = arrayOf(Manifest.permission.CAMERA)
53 | }
54 |
55 | init {
56 | // OpenCV initialization
57 | OpenCVLoader.initDebug()
58 | }
59 |
60 | private lateinit var mProps: Properties
61 |
62 | private lateinit var mCameraExecutor: ExecutorService
63 |
64 | private var mRectRight = 0
65 | private var mRectBottom = 0
66 |
67 | private val mOpticalFlowFarneback = OpticalFlowFarneback()
68 |
69 | private lateinit var mObjectDetector: ObjectDetector
70 | private val mDifference = DifferenceExtractor()
71 |
72 | private var mShutterPressed = false
73 |
74 | private lateinit var mMqttClient: MqttClient
75 | private lateinit var mLock: AtomicBoolean
76 |
77 | private val mFps = Fps()
78 |
79 | val mqttReceiver = object : IMqttReceiver {
80 | override fun messageArrived(topic: String?, message: MqttMessage?) {
81 | message?.let {
82 | if (mProps.remoteCamera) {
83 | if (topic == Properties.MQTT_TOPIC_IMAGE) {
84 | if (mLock.compareAndSet(false, true)) {
85 | Log.d(TAG, "mqtt message received on ${Properties.MQTT_TOPIC_IMAGE}")
86 | thread {
87 | val jpegByteArray = it.payload
88 | val bitmap =
89 | BitmapFactory.decodeByteArray(
90 | jpegByteArray,
91 | 0,
92 | jpegByteArray.size
93 | )
94 | val mat = Mat()
95 | Utils.bitmapToMat(bitmap, mat)
96 | val filterdBitmap = processImage(mat)
97 | drawImage(filterdBitmap)
98 | mLock.set(false)
99 | }
100 | }
101 | }
102 | }
103 | }
104 | }
105 | }
106 |
107 | override fun onCreate(savedInstanceState: Bundle?) {
108 |
109 | requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE
110 |
111 | super.onCreate(savedInstanceState)
112 |
113 | setContentView(R.layout.activity_main)
114 |
115 | // Request camera permissions
116 | if (allPermissionsGranted()) {
117 | startCamera()
118 | } else {
119 | ActivityCompat.requestPermissions(
120 | this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS
121 | )
122 | }
123 |
124 | // Prevent the sleep mode programmatically
125 | window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
126 |
127 | // Hide the navigation bar
128 | makeFullscreen()
129 |
130 | surfaceView.addOnLayoutChangeListener { _, _, _, _, _, _, _, _, _ ->
131 | mRectRight = (surfaceView.height * IMAGE_ASPECT_RATIO).roundToInt() - 1
132 | mRectBottom = surfaceView.height - 1
133 | }
134 |
135 | mObjectDetector = ObjectDetector(this)
136 |
137 | mCameraExecutor = Executors.newSingleThreadExecutor()
138 |
139 | mProps = Properties(this)
140 |
141 | // Settings dialog
142 | buttonSettings.setOnClickListener {
143 |
144 | mProps.load()
145 |
146 | val dialog = Dialog(this)
147 | dialog.setContentView(R.layout.settings)
148 |
149 | val editTextMqttServer = dialog.findViewById(R.id.editTextMqttServer)
150 | editTextMqttServer.setText(mProps.mqttServer)
151 |
152 | val editTextMqttUsername = dialog.findViewById(R.id.editTextMqttUsername)
153 | editTextMqttUsername.setText(mProps.mqttUsername)
154 |
155 | val editTextMqttPassword = dialog.findViewById(R.id.editTextMqttPassword)
156 | editTextMqttPassword.setText(mProps.mqttPassword)
157 |
158 | val checkBoxRemoteCamera = dialog.findViewById(R.id.checkBoxRemoteCamera)
159 | checkBoxRemoteCamera.isChecked = mProps.remoteCamera
160 |
161 | val checkBoxFps = dialog.findViewById(R.id.checkBoxFps)
162 | checkBoxFps.isChecked = mProps.showFps
163 |
164 | editTextMqttServer.addTextChangedListener(object : TextWatcher {
165 | override fun beforeTextChanged(p0: CharSequence?, p1: Int, p2: Int, p3: Int) =
166 | Unit
167 |
168 | override fun onTextChanged(p0: CharSequence?, p1: Int, p2: Int, p3: Int) = Unit
169 | override fun afterTextChanged(p0: Editable?) {
170 | mProps.mqttServer = editTextMqttServer.text.toString()
171 | }
172 | })
173 |
174 | editTextMqttUsername.addTextChangedListener(object : TextWatcher {
175 | override fun beforeTextChanged(p0: CharSequence?, p1: Int, p2: Int, p3: Int) =
176 | Unit
177 |
178 | override fun onTextChanged(p0: CharSequence?, p1: Int, p2: Int, p3: Int) = Unit
179 | override fun afterTextChanged(p0: Editable?) {
180 | mProps.mqttUsername = editTextMqttUsername.text.toString()
181 | }
182 | })
183 |
184 | editTextMqttPassword.addTextChangedListener(object : TextWatcher {
185 | override fun beforeTextChanged(p0: CharSequence?, p1: Int, p2: Int, p3: Int) =
186 | Unit
187 |
188 | override fun onTextChanged(p0: CharSequence?, p1: Int, p2: Int, p3: Int) = Unit
189 | override fun afterTextChanged(p0: Editable?) {
190 | mProps.mqttPassword = editTextMqttPassword.text.toString()
191 | }
192 | })
193 |
194 | checkBoxRemoteCamera.setOnCheckedChangeListener { _, isChecked ->
195 | mProps.remoteCamera = isChecked
196 | }
197 |
198 | checkBoxFps.setOnCheckedChangeListener { _, isChecked ->
199 | mProps.showFps = isChecked
200 | }
201 |
202 | dialog.setOnDismissListener {
203 | mProps.save()
204 | }
205 |
206 | dialog.show()
207 | }
208 |
209 | buttonQuit.setOnClickListener {
210 | this@MainActivity.finish()
211 | exitProcess(0)
212 | }
213 |
214 | mLock = AtomicBoolean(false)
215 | }
216 |
217 | override fun onResume() {
218 | super.onResume()
219 | mMqttClient = MqttClient(
220 | context = this,
221 | mqttServer = mProps.mqttServer,
222 | mqttUsername = mProps.mqttUsername,
223 | mqttPassword = mProps.mqttPassword,
224 | clientId = TAG,
225 | receiver = mqttReceiver
226 | )
227 | mMqttClient.connect(listOf(Properties.MQTT_TOPIC_IMAGE))
228 | }
229 |
230 | override fun onPause() {
231 | super.onPause()
232 | mMqttClient.destroy()
233 | }
234 |
235 | override fun onDestroy() {
236 | super.onDestroy()
237 | mCameraExecutor.shutdown()
238 | }
239 |
240 | private fun startCamera() {
241 | val cameraProviderFuture = ProcessCameraProvider.getInstance(this)
242 |
243 | cameraProviderFuture.addListener(Runnable {
244 | // Used to bind the lifecycle of cameras to the lifecycle owner
245 | val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get()
246 |
247 | // Image Analyzer
248 | val imageAnalyzer = ImageAnalysis.Builder()
249 | //.setTargetAspectRatio(AspectRatio.RATIO_16_9)
250 | //.setTargetResolution(Properties.TARGET_RESOLUTION)
251 | .build()
252 | .also {
253 | it.setAnalyzer(mCameraExecutor, ImageAnalyzer())
254 | }
255 |
256 | // Select back camera as a default
257 | val cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA
258 |
259 | try {
260 | // Unbind use cases before rebinding
261 | cameraProvider.unbindAll()
262 |
263 | // Bind use cases to camera
264 | cameraProvider.bindToLifecycle(
265 | this as LifecycleOwner, cameraSelector, imageAnalyzer
266 | )
267 |
268 | } catch (exc: Exception) {
269 | Log.e(TAG, "Use case binding failed", exc)
270 | }
271 |
272 | }, ContextCompat.getMainExecutor(this))
273 |
274 | }
275 |
276 | private fun allPermissionsGranted() = REQUIRED_PERMISSIONS.all {
277 | ContextCompat.checkSelfPermission(
278 | baseContext, it
279 | ) == PackageManager.PERMISSION_GRANTED
280 | }
281 |
282 | override fun onRequestPermissionsResult(
283 | requestCode: Int, permissions: Array, grantResults:
284 | IntArray
285 | ) {
286 | if (requestCode == REQUEST_CODE_PERMISSIONS) {
287 | if (allPermissionsGranted()) {
288 | startCamera()
289 | } else {
290 | Toast.makeText(
291 | this,
292 | "Permissions not granted by the user.",
293 | Toast.LENGTH_SHORT
294 | ).show()
295 | finish()
296 | }
297 | }
298 | }
299 |
300 | private fun drawImage(bitmap: Bitmap) {
301 | val src = Rect(0, 0, bitmap.width - 1, bitmap.height - 1)
302 | val dest = Rect(Properties.SHIFT_IMAGE, 0, mRectRight + Properties.SHIFT_IMAGE, mRectBottom)
303 |
304 | val canvas = surfaceView.holder.lockCanvas()
305 | canvas.drawColor(0, PorterDuff.Mode.CLEAR)
306 | canvas.drawBitmap(bitmap, src, dest, null)
307 | surfaceView.holder.unlockCanvasAndPost(canvas)
308 |
309 | if (mProps.showFps) {
310 | textViewFps.post {
311 | textViewFps.text = "${mFps.update()} FPS"
312 | }
313 | }
314 |
315 | if (mShutterPressed) {
316 | saveImage(bitmap, this@MainActivity, "android-camera")
317 | mShutterPressed = false
318 | }
319 | }
320 |
321 | // Image processing pipeline with OpenCV and TensorFlow Lite
322 | private fun processImage(mat: Mat): Bitmap {
323 |
324 | var filtered = mat.clone()
325 |
326 | //--- Digital signal processing with OpenCV START---//
327 |
328 | if (toggleButtonColorFilter.isChecked) {
329 | filtered = colorFilter(filtered, "yellow", "red")
330 | }
331 |
332 | if (toggleButtonOpticalFlow.isChecked) {
333 | filtered = mOpticalFlowFarneback.update(filtered)
334 | }
335 |
336 | if (toggleButtonMotionDetection.isChecked) {
337 | filtered = mDifference.update(filtered, contour = false)
338 | }
339 |
340 | if (toggleButtonContourExtraction.isChecked) {
341 | filtered = mDifference.update(filtered, contour = true)
342 | }
343 |
344 | //--- Digital signal processing with OpenCV END ---//
345 |
346 | val width = filtered.cols()
347 | val height = filtered.rows()
348 |
349 | var bitmapFiltered =
350 | Bitmap.createBitmap(
351 | width, height,
352 | Bitmap.Config.ARGB_8888
353 | )
354 |
355 | Utils.matToBitmap(filtered, bitmapFiltered);
356 |
357 | // Object detection with TensorFlow Lite START
358 |
359 | if (toggleButtonObjectDetection.isChecked) {
360 | val bitmapOriginal = Bitmap.createBitmap(
361 | width, height,
362 | Bitmap.Config.ARGB_8888
363 | )
364 | Utils.matToBitmap(mat, bitmapOriginal)
365 | bitmapFiltered = mObjectDetector.detect(bitmapFiltered, bitmapOriginal, 40)
366 | }
367 |
368 | // Object detection with TensorFlow Lite END
369 |
370 | return bitmapFiltered
371 | }
372 |
373 | private inner class ImageAnalyzer() : ImageAnalysis.Analyzer {
374 | @SuppressLint("UnsafeExperimentalUsageError")
375 | override fun analyze(imageProxy: ImageProxy) {
376 |
377 | if (!mProps.remoteCamera) {
378 | Log.d(TAG, "width: ${imageProxy.width}, height: ${imageProxy.height}")
379 | val mat = imageProxy.image?.yuvToRgba()
380 | imageProxy.close()
381 | mat?.let {
382 | val bitmap = processImage(it)
383 | drawImage(bitmap)
384 | }
385 | }
386 | }
387 | }
388 |
389 | private fun makeFullscreen() {
390 | window.decorView.systemUiVisibility =
391 | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION or View.SYSTEM_UI_FLAG_FULLSCREEN or View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
392 | }
393 |
394 | /**
395 | * Press volume key (volume up) or press a button on the bluetooth remote shutter
396 | * to take a picture of the current Mat object
397 | */
398 | override fun dispatchKeyEvent(event: KeyEvent): Boolean {
399 | val action: Int = event.action
400 | return when (event.keyCode) {
401 | KeyEvent.KEYCODE_VOLUME_UP -> {
402 | if (action == KeyEvent.ACTION_DOWN) {
403 | mShutterPressed = true
404 | }
405 | true
406 | }
407 | else -> super.dispatchKeyEvent(event)
408 | }
409 | }
410 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/Properties.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera
2 |
3 | import android.content.Context
4 |
5 | class Properties(val context: Context) {
6 |
7 | companion object {
8 | const val PREFS_NAME = "camera"
9 |
10 | const val IMAGE_ASPECT_RATIO = 4F/3F
11 | const val SHIFT_IMAGE = 100
12 |
13 | const val MQTT_TOPIC_IMAGE = "image"
14 | }
15 |
16 | var mqttServer = "localhost"
17 | var mqttUsername = "simulator"
18 | var mqttPassword = "simulator"
19 | var remoteCamera = false
20 | var showFps = false
21 |
22 | init {
23 | load()
24 | }
25 |
26 | fun load() {
27 | val prefs = context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE)
28 | mqttServer = prefs.getString("mqttServer", "localhost").toString()
29 | mqttUsername = prefs.getString("mqttUsername", "anonymous").toString()
30 | mqttPassword = prefs.getString("mqttPassword", "password").toString()
31 | remoteCamera = prefs.getBoolean("remoteCamera", false)
32 | showFps = prefs.getBoolean("fps", false)
33 | }
34 |
35 | fun save() {
36 | val editor = context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE).edit()
37 | editor.putString("mqttServer", mqttServer)
38 | editor.putString("mqttUsername", mqttUsername)
39 | editor.putString("mqttPassword", mqttPassword)
40 | editor.putBoolean("remoteCamera", remoteCamera)
41 | editor.putBoolean("fps", showFps)
42 | editor.apply()
43 | }
44 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/aicamera/DrawPaint.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.aicamera
2 |
3 | import android.graphics.Color
4 | import android.graphics.Paint
5 |
6 | /*** Color defenition ***/
7 | val BLACK = Color.rgb(0, 0, 0)
8 | val WHITE = Color.rgb(255, 255, 255)
9 | val RED = Color.rgb(255, 0, 0)
10 | val BLUE = Color.rgb(0, 0, 255)
11 | val YELLOW = Color.rgb(255, 255, 0)
12 | val CYAN = Color.rgb(0, 255, 255)
13 | val GRAY = Color.rgb(128, 128, 128)
14 | val LIGHT_GRAY = Color.rgb(211, 211, 211)
15 | val MAROON = Color.rgb(128, 0, 0)
16 | val OLIVE = Color.rgb(128, 128, 0)
17 | val GREEN = Color.rgb(0, 255, 0)
18 | val PURPLE = Color.rgb(128, 0, 128)
19 | val ORANGE = Color.rgb(255, 165, 0)
20 | val CORAL = Color.rgb(255, 127, 80)
21 | val LIGHT_STEEL_BLUE = Color.rgb(176, 196, 222)
22 | val SANDY_BROWN = Color.rgb(244, 164, 96)
23 | val TEAL = Color.rgb(0, 128, 128)
24 | val PINK = Color.rgb(255, 192, 203)
25 | val LAVENDER = Color.rgb(230, 230, 250)
26 |
27 | val paint = Paint()
28 |
29 | val boundingBoxColors = mapOf(
30 | "person" to RED,
31 | "train" to GREEN)
32 |
33 | fun paintBoundingBox(title: String): Paint {
34 | paint.apply {
35 | style = Paint.Style.STROKE
36 | strokeWidth = 2F
37 | if (title in boundingBoxColors.keys) {
38 | color = boundingBoxColors.getValue(title)
39 | } else {
40 | color = ORANGE
41 | }
42 | }
43 | return paint
44 | }
45 |
46 | fun paintTitleBox(paintColor: Int): Paint {
47 | paint.apply {
48 | style = Paint.Style.FILL
49 | color = paintColor
50 | textSize = 16F
51 | }
52 | return paint
53 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/aicamera/ObjectDetector.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.aicamera
2 |
3 | import android.content.Context
4 | import android.graphics.*
5 | import jp.araobp.camera.tflite.Classifier
6 | import jp.araobp.camera.tflite.SsdMobileNetV2
7 | import jp.araobp.camera.util.roundToTheNth
8 | import kotlin.math.abs
9 |
10 | class ObjectDetector(context: Context) {
11 |
12 | companion object {
13 | val TAG: String = this::class.java.simpleName
14 |
15 | const val MAX_NUM_RECOGNITIONS = 5
16 | const val CONFIDENCE_THRES = 40
17 |
18 | const val OBJECT_OFFSET_LEFT = 30 // 30 pixels
19 | const val OBJECT_OFFSET_RIGHT = SsdMobileNetV2.INPUT_SIZE - 30 // 30 pixels
20 | const val OBJECT_OFFSET_TOP = 30 // 30 pixels
21 | const val OBJECT_OFFSET_BOTTOM = SsdMobileNetV2.INPUT_SIZE - 30 // 30 pixels
22 | const val MAX_OBJECT_AREA = SsdMobileNetV2.INPUT_SIZE * SsdMobileNetV2.INPUT_SIZE * 3 / 4
23 | const val MIN_OBJECT_AREA = SsdMobileNetV2.INPUT_SIZE * SsdMobileNetV2.INPUT_SIZE * 1 / 500
24 | }
25 |
26 | private var results = ArrayList()
27 |
28 | // Object detector: SSD Mobilenet
29 | private val ssdMobileNetV2 = SsdMobileNetV2(
30 | context, numThreads = 2
31 | )
32 |
33 | fun detect(bitmapFiltered: Bitmap,
34 | bitmapOriginal: Bitmap,
35 | confidenceThres: Int = CONFIDENCE_THRES,
36 | personOnly: Boolean = false): Bitmap? {
37 | // Scale down to 300x300 tensor as input for SSD MobileNetv2
38 | val inputBitmap = Bitmap.createScaledBitmap(
39 | bitmapOriginal,
40 | SsdMobileNetV2.INPUT_SIZE,
41 | SsdMobileNetV2.INPUT_SIZE,
42 | false
43 | )
44 |
45 | // Execute object detection by SSD MobileNetv2
46 | val result = ssdMobileNetV2.recognizeImage(inputBitmap)
47 | //Log.d(TAG, "Result: $result")
48 |
49 | bitmapOriginal.recycle()
50 |
51 | var newBitmap: Bitmap? = null
52 |
53 | try {
54 | result?.let {
55 | // Immutable bitmap to mutable bitmap
56 | newBitmap = bitmapFiltered.copy(Bitmap.Config.ARGB_8888 ,true)
57 | bitmapFiltered.recycle()
58 | val canvas = Canvas(newBitmap!!)
59 | results.clear()
60 | for (i in 0 until if (result.size < MAX_NUM_RECOGNITIONS) result.size else MAX_NUM_RECOGNITIONS) {
61 |
62 | val r = result[i]
63 | val location = r!!.getLocation()
64 | val confidence = r.confidence
65 | val title = r.title
66 |
67 | if (!(personOnly && title != "person")) {
68 | if (confidence!! > confidenceThres / 100F) {
69 |
70 | // Rectangle location on 300x300 input tensor
71 | val w = location.right - location.left
72 | val h = location.bottom - location.top
73 | val s = abs(w * h)
74 |
75 | // Check if the recognized object fits in frame of each input bitmap
76 | if (location.left > OBJECT_OFFSET_LEFT && location.right < OBJECT_OFFSET_RIGHT
77 | && location.top > OBJECT_OFFSET_TOP && location.bottom < OBJECT_OFFSET_BOTTOM &&
78 | s < MAX_OBJECT_AREA && s > MIN_OBJECT_AREA
79 | ) {
80 |
81 | val xRatio = canvas.width.toFloat() / SsdMobileNetV2.INPUT_SIZE
82 | val yRatio = canvas.height.toFloat() / SsdMobileNetV2.INPUT_SIZE
83 | val rectF = RectF(
84 | location.left * xRatio,
85 | location.top * yRatio,
86 | location.right * xRatio,
87 | location.bottom * yRatio
88 | )
89 |
90 | // Draw text
91 | val paint = paintBoundingBox(title!!)
92 | canvas.drawRoundRect(rectF, 8F, 8F, paint)
93 |
94 | val confidenceInPercent = (confidence * 100F).roundToTheNth(1)
95 | val text = "$title ${confidenceInPercent}%"
96 | canvas.drawText(
97 | text,
98 | rectF.left,
99 | rectF.top - 10F,
100 | paintTitleBox(paint.color)
101 | )
102 |
103 | results.add(r)
104 | }
105 | }
106 | }
107 | }
108 | }
109 | } catch (e: Exception) {
110 | e.printStackTrace()
111 | }
112 |
113 | return newBitmap
114 | }
115 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/net/IMqttReceiver.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.net
2 |
3 | import org.eclipse.paho.client.mqttv3.MqttMessage
4 |
5 | interface IMqttReceiver {
6 | fun messageArrived(topic: String?, message: MqttMessage?)
7 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/net/MqttClient.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.net
2 |
3 | import android.content.Context
4 | import android.util.Log
5 | import org.eclipse.paho.android.service.MqttAndroidClient
6 | import org.eclipse.paho.client.mqttv3.*
7 | import org.json.JSONArray
8 | import org.json.JSONObject
9 |
10 | class MqttClient(
11 | val context: Context,
12 | val mqttServer: String,
13 | val mqttUsername: String,
14 | val mqttPassword: String,
15 | val clientId: String,
16 | val receiver: IMqttReceiver
17 | ) : MqttCallback {
18 |
19 | private var mTopicList = ArrayList()
20 |
21 | override fun messageArrived(topic: String?, message: MqttMessage?) {
22 | receiver.messageArrived(topic, message)
23 | }
24 |
25 | override fun connectionLost(cause: Throwable?) {
26 | //TODO("Not yet implemented")
27 | }
28 |
29 | override fun deliveryComplete(token: IMqttDeliveryToken?) {
30 | //TODO("Not yet implemented")
31 | }
32 |
33 | companion object {
34 | val TAG: String = this::class.java.simpleName
35 | val MQTT_PORT = "1883"
36 | }
37 |
38 | private val mMqttClient = MqttAndroidClient(
39 | context, "tcp://$mqttServer:$MQTT_PORT", clientId
40 | )
41 |
42 | fun publish(topic: String, jsonObject: JSONObject) {
43 | try {
44 | if (mMqttClient.isConnected) {
45 | val mqttMessage =
46 | MqttMessage(jsonObject.toString().toByteArray(charset = Charsets.UTF_8))
47 | mMqttClient.publish("${topic}Rx", mqttMessage)
48 | }
49 | } catch (e: IllegalArgumentException) {
50 | Log.d(TAG, "This mqtt client has already been removed")
51 | }
52 | }
53 |
54 | fun subscribe(topic: String) {
55 | if (mMqttClient.isConnected) {
56 | mMqttClient.subscribe(topic, 0)
57 | mTopicList.add(topic)
58 | }
59 | }
60 |
61 | fun subscribe(topicList: List) {
62 | if (mMqttClient.isConnected) {
63 | topicList.forEach {
64 | mMqttClient.subscribe(it, 0)
65 | mTopicList.add(it)
66 | }
67 | }
68 | }
69 |
70 | fun unsubscribeAllAndSubscribe(topic: String) {
71 | if (mMqttClient.isConnected && mTopicList.size > 0) {
72 | mTopicList.forEach {
73 | mMqttClient.unsubscribe("${it}Tx")
74 | }
75 | mMqttClient.subscribe("${topic}Tx", 0)
76 | mTopicList.add(topic)
77 | }
78 | }
79 |
80 | fun unsubscribeAllAndSubscribe(topicList: List) {
81 | if (mMqttClient.isConnected && mTopicList.size > 0) {
82 | mTopicList.forEach {
83 | mMqttClient.unsubscribe("${it}Tx")
84 | }
85 | topicList.forEach{
86 | mMqttClient.subscribe("${it}Tx", 0)
87 | mTopicList.add(it)
88 | }
89 | }
90 | }
91 |
92 | fun connect(topic: String) {
93 | try {
94 | mMqttClient.setCallback(this)
95 | val options = MqttConnectOptions()
96 | options.userName = mqttUsername
97 | options.password = mqttPassword.toCharArray()
98 | mMqttClient.connect(options, null, object : IMqttActionListener {
99 | override fun onSuccess(iMqttToken: IMqttToken) {
100 | try {
101 | Log.d(TAG, "onSuccess")
102 | mMqttClient.subscribe("${topic}Tx", 0)
103 | mTopicList.add(topic)
104 | } catch (e: MqttException) {
105 | Log.d(TAG, e.toString())
106 | }
107 | }
108 |
109 | override fun onFailure(
110 | iMqttToken: IMqttToken,
111 | throwable: Throwable
112 | ) {
113 | Log.d(TAG, "onFailure")
114 | }
115 | })
116 | } catch (e: MqttException) {
117 | Log.d(TAG, e.toString())
118 | }
119 | }
120 |
121 | fun connect(topicList: List) {
122 | try {
123 | mMqttClient.setCallback(this)
124 | val options = MqttConnectOptions()
125 | options.userName = mqttUsername
126 | options.password = mqttPassword.toCharArray()
127 | mMqttClient.connect(options, null, object : IMqttActionListener {
128 | override fun onSuccess(iMqttToken: IMqttToken) {
129 | Log.d(TAG, "onSuccess")
130 | try {
131 | topicList.forEach {
132 | mMqttClient.subscribe(it, 0)
133 | mTopicList.add(it)
134 | }
135 | } catch (e: MqttException) {
136 | Log.d(TAG, e.toString())
137 | }
138 | }
139 |
140 | override fun onFailure(
141 | iMqttToken: IMqttToken,
142 | throwable: Throwable
143 | ) {
144 | Log.d(TAG, "onFailure")
145 | }
146 | })
147 | } catch (e: MqttException) {
148 | Log.d(TAG, e.toString())
149 | }
150 | }
151 |
152 | fun destroy() {
153 | mTopicList.forEach {
154 | mMqttClient.unsubscribe("${it}Tx")
155 | }
156 | mMqttClient.unregisterResources()
157 | if (mMqttClient.isConnected) {
158 | mMqttClient.disconnect()
159 | }
160 | }
161 |
162 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/opecv/ColorFilter.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.opecv
2 |
3 | import android.graphics.Bitmap
4 | import org.opencv.android.Utils
5 | import org.opencv.core.*
6 | import org.opencv.imgproc.Imgproc
7 |
8 | // Reference: https://stackoverflow.com/questions/51229126/how-to-find-the-red-color-regions-using-opencv
9 | val COLOR_RANGES = mapOf(
10 | "red" to listOf(Scalar(0.0, 180.0, 120.0), Scalar(15.0, 255.0, 255.0)),
11 | "pink" to listOf(Scalar(145.0, 120.0, 120.0), Scalar(180.0, 255.0, 255.0)),
12 | "yellow" to listOf(Scalar(22.0, 180.0, 120.0), Scalar(33.0, 255.0, 255.0)),
13 | "orange" to listOf(Scalar(15.0, 120.0, 120.0), Scalar(22.0, 255.0, 255.0)),
14 | "green" to listOf(Scalar(33.0, 80.0, 60.0), Scalar(70.0, 255.0, 255.0)),
15 | "blue" to listOf(Scalar(90.0, 80.0, 60.0), Scalar(130.0, 255.0, 255.0)),
16 | "black" to listOf(Scalar(0.0, 0.0, 0.0), Scalar(360.0, 255.0, 20.0)),
17 | "white" to listOf(Scalar(0.0, 0.0, 80.0), Scalar(360.0, 20.0, 255.0))
18 | )
19 |
20 | /**
21 | * Color filter
22 | *
23 | * @param bitmap
24 | * @param colorRangeId Example: a list of "yellow" and "pink"
25 | */
26 | fun colorFilter(src: Mat, vararg colorRangeId: String): Mat {
27 | for (id in colorRangeId) {
28 | check(COLOR_RANGES.containsKey(id))
29 | }
30 |
31 | val dst = Mat() // RGB
32 | val srcHsv = Mat() // HSV
33 | var mask: Mat? = null
34 |
35 | Imgproc.cvtColor(src, srcHsv, Imgproc.COLOR_RGB2HSV)
36 |
37 | for (id in colorRangeId) {
38 | val tempMask = Mat()
39 | Core.inRange(
40 | srcHsv,
41 | COLOR_RANGES.getValue(id)[0],
42 | COLOR_RANGES.getValue(id)[1],
43 | tempMask
44 | )
45 | if (mask == null) {
46 | mask = tempMask
47 | } else {
48 | Core.add(mask, tempMask, mask)
49 | }
50 | }
51 |
52 | src.copyTo(dst, mask)
53 | return dst
54 | }
55 |
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/opecv/DifferenceExtractor.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.opecv
2 |
3 | import org.opencv.core.*
4 | import org.opencv.imgproc.Imgproc
5 | import org.opencv.imgproc.Imgproc.LINE_8
6 | import org.opencv.imgproc.Imgproc.LINE_AA
7 | import org.opencv.video.Video
8 |
9 |
10 | class DifferenceExtractor {
11 |
12 | private var mBackgroundSubtractor = Video.createBackgroundSubtractorMOG2()
13 |
14 | public fun update(mat: Mat, contour: Boolean = false): Mat {
15 | val maskImg = Mat()
16 | val contours: List = ArrayList()
17 | val hierarchy = Mat()
18 |
19 | mBackgroundSubtractor.apply(mat, maskImg)
20 |
21 | Imgproc.medianBlur(maskImg, maskImg, 11)
22 |
23 | Imgproc.threshold(
24 | maskImg, maskImg, 32.0, 255.0, Imgproc.THRESH_BINARY
25 | )
26 |
27 | Imgproc.findContours(
28 | maskImg, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE
29 | )
30 |
31 | lateinit var img: Mat
32 |
33 | if (contour) {
34 | img = mat.clone()
35 | for (i in contours.indices) {
36 | Imgproc.drawContours(img, contours, i, COLOR_RED, 2, LINE_AA, hierarchy, 1, Point())
37 | }
38 | } else {
39 | img = maskImg
40 | }
41 |
42 | return img
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/opecv/DrawColor.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.opecv
2 |
3 | import org.opencv.core.Scalar
4 |
5 | val COLOR_RED = Scalar(0.0, 0.0, 255.0)
6 |
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/opecv/OpticalFlowFarneback.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.opecv
2 |
3 | import org.opencv.core.*
4 | import org.opencv.imgproc.Imgproc.*
5 | import org.opencv.video.Video
6 |
7 | // [Reference] http://me10.sblo.jp/article/88289624.html
8 | class OpticalFlowFarneback {
9 |
10 | private var mPrevMat: Mat? = null
11 | private lateinit var mCurrentMat: Mat
12 |
13 | fun update(src: Mat): Mat {
14 |
15 | val pt1 = Point()
16 | val pt2 = Point()
17 |
18 | if (mPrevMat == null) {
19 | mCurrentMat = src.clone()
20 | cvtColor(mCurrentMat, mCurrentMat, COLOR_RGBA2GRAY)
21 | mPrevMat = mCurrentMat.clone()
22 | } else {
23 | mPrevMat = mCurrentMat
24 | mCurrentMat = src.clone()
25 | cvtColor(mCurrentMat, mCurrentMat, COLOR_RGBA2GRAY)
26 | }
27 |
28 | val flow = Mat(mCurrentMat.size(), CvType.CV_32FC2)
29 | Video.calcOpticalFlowFarneback(
30 | mPrevMat, mCurrentMat,
31 | flow, 0.5, 3, 15, 3, 5, 1.5, 0
32 | )
33 |
34 | val dst = src.clone()
35 |
36 | var i = 0
37 | while (i < mCurrentMat.size().height) {
38 | var j = 0
39 | while (j < mCurrentMat.size().width) {
40 | pt1.x = j.toDouble()
41 | pt1.y = i.toDouble()
42 | pt2.x = j + flow[i, j][0]
43 | pt2.y = i + flow[i, j][1]
44 | val color = Scalar(255.0, 0.0, 0.0, 255.0)
45 | arrowedLine(
46 | dst,
47 | pt1,
48 | pt2,
49 | color, 2, LINE_8, 0, 0.6
50 | )
51 | j += 20
52 | }
53 | i += 20
54 | }
55 |
56 | return dst
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/opecv/Utils.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.opecv
2 |
3 | import android.graphics.ImageFormat
4 | import android.media.Image
5 | import org.opencv.core.CvType
6 | import org.opencv.core.Mat
7 | import org.opencv.imgproc.Imgproc
8 |
9 | // [Reference] https://stackoverflow.com/questions/58102717/android-camerax-analyzer-image-with-format-yuv-420-888-to-opencv-mat
10 | // Ported from opencv private class JavaCamera2Frame
11 | fun Image.yuvToRgba(): Mat {
12 | val rgbaMat = Mat()
13 |
14 | if (format == ImageFormat.YUV_420_888
15 | && planes.size == 3) {
16 |
17 | val chromaPixelStride = planes[1].pixelStride
18 |
19 | if (chromaPixelStride == 2) { // Chroma channels are interleaved
20 | assert(planes[0].pixelStride == 1)
21 | assert(planes[2].pixelStride == 2)
22 | val yPlane = planes[0].buffer
23 | val uvPlane1 = planes[1].buffer
24 | val uvPlane2 = planes[2].buffer
25 | val yMat = Mat(height, width, CvType.CV_8UC1, yPlane)
26 | val uvMat1 = Mat(height / 2, width / 2, CvType.CV_8UC2, uvPlane1)
27 | val uvMat2 = Mat(height / 2, width / 2, CvType.CV_8UC2, uvPlane2)
28 | val addrDiff = uvMat2.dataAddr() - uvMat1.dataAddr()
29 | if (addrDiff > 0) {
30 | assert(addrDiff == 1L)
31 | Imgproc.cvtColorTwoPlane(yMat, uvMat1, rgbaMat, Imgproc.COLOR_YUV2RGBA_NV12)
32 | } else {
33 | assert(addrDiff == -1L)
34 | Imgproc.cvtColorTwoPlane(yMat, uvMat2, rgbaMat, Imgproc.COLOR_YUV2RGBA_NV21)
35 | }
36 | } else { // Chroma channels are not interleaved
37 | val yuvBytes = ByteArray(width * (height + height / 2))
38 | val yPlane = planes[0].buffer
39 | val uPlane = planes[1].buffer
40 | val vPlane = planes[2].buffer
41 |
42 | yPlane.get(yuvBytes, 0, width * height)
43 |
44 | val chromaRowStride = planes[1].rowStride
45 | val chromaRowPadding = chromaRowStride - width / 2
46 |
47 | var offset = width * height
48 | if (chromaRowPadding == 0) {
49 | // When the row stride of the chroma channels equals their width, we can copy
50 | // the entire channels in one go
51 | uPlane.get(yuvBytes, offset, width * height / 4)
52 | offset += width * height / 4
53 | vPlane.get(yuvBytes, offset, width * height / 4)
54 | } else {
55 | // When not equal, we need to copy the channels row by row
56 | for (i in 0 until height / 2) {
57 | uPlane.get(yuvBytes, offset, width / 2)
58 | offset += width / 2
59 | if (i < height / 2 - 1) {
60 | uPlane.position(uPlane.position() + chromaRowPadding)
61 | }
62 | }
63 | for (i in 0 until height / 2) {
64 | vPlane.get(yuvBytes, offset, width / 2)
65 | offset += width / 2
66 | if (i < height / 2 - 1) {
67 | vPlane.position(vPlane.position() + chromaRowPadding)
68 | }
69 | }
70 | }
71 |
72 | val yuvMat = Mat(height + height / 2, width, CvType.CV_8UC1)
73 | yuvMat.put(0, 0, yuvBytes)
74 | Imgproc.cvtColor(yuvMat, rgbaMat, Imgproc.COLOR_YUV2RGBA_I420, 4)
75 | }
76 | }
77 |
78 | return rgbaMat
79 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/tflite/Classifier.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.tflite
2 |
3 | import android.graphics.Bitmap
4 | import android.graphics.RectF
5 |
6 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
7 |
8 | Licensed under the Apache License, Version 2.0 (the "License");
9 | you may not use this file except in compliance with the License.
10 | You may obtain a copy of the License at
11 |
12 | http://www.apache.org/licenses/LICENSE-2.0
13 |
14 | Unless required by applicable law or agreed to in writing, software
15 | distributed under the License is distributed on an "AS IS" BASIS,
16 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | See the License for the specific language governing permissions and
18 | limitations under the License.
19 | ==============================================================================*/
20 |
21 | /** Generic interface for interacting with different recognition engines. */
22 | interface Classifier {
23 | fun recognizeImage(bitmap: Bitmap?): List?
24 | fun enableStatLogging(debug: Boolean)
25 | val statString: String?
26 |
27 | fun close()
28 |
29 | /** An immutable result returned by a Classifier describing what was recognized. */
30 | class Recognition(
31 | /**
32 | * A unique identifier for what has been recognized. Specific to the class, not the instance of
33 | * the object.
34 | */
35 | val id: String?,
36 | /** Display name for the recognition. */
37 | val title: String?,
38 | /**
39 | * A sortable score for how good the recognition is relative to others. Higher should be better.
40 | */
41 | val confidence: Float?,
42 | /** Optional location within the source image for the location of the recognized object. */
43 | private var location: RectF?
44 | ) {
45 |
46 | fun getLocation(): RectF {
47 | return RectF(location)
48 | }
49 |
50 | fun setLocation(location: RectF?) {
51 | this.location = location
52 | }
53 |
54 | override fun toString(): String {
55 | var resultString = ""
56 | if (id != null) {
57 | resultString += "[$id] "
58 | }
59 | if (title != null) {
60 | resultString += "$title "
61 | }
62 | if (confidence != null) {
63 | resultString += String.format("(%.1f%%) ", confidence * 100.0f)
64 | }
65 | if (location != null) {
66 | resultString += location.toString() + " "
67 | }
68 | return resultString.trim { it <= ' ' }
69 | }
70 |
71 | }
72 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/tflite/SsdMobilenetV2.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.tflite
2 |
3 | import android.content.Context
4 | import android.graphics.Bitmap
5 | import android.graphics.Color
6 | import android.graphics.Paint
7 |
8 | class SsdMobileNetV2(context: Context, numThreads: Int = 2) {
9 |
10 | companion object {
11 | const val INPUT_SIZE = 300
12 | private const val TF_OD_API_IS_QUANTIZED = true
13 | private val TF_OD_API_MODEL_FILE = arrayOf("detect.tflite", "labelmap.txt")
14 | }
15 |
16 | private val detector: Classifier
17 |
18 | private val paint = Paint()
19 |
20 | init {
21 | paint.apply {
22 | style = Paint.Style.FILL_AND_STROKE
23 | color = Color.BLUE
24 | textSize = 20F
25 | }
26 | detector = TFLiteObjectDetectionAPIModel.create(
27 | context.assets,
28 | TF_OD_API_MODEL_FILE[0],
29 | TF_OD_API_MODEL_FILE[1],
30 | INPUT_SIZE,
31 | TF_OD_API_IS_QUANTIZED,
32 | numThreads
33 | )
34 | }
35 |
36 | fun recognizeImage(src: Bitmap): List? {
37 | check(src.width == INPUT_SIZE && src.height == INPUT_SIZE)
38 | return detector.recognizeImage( src )
39 | }
40 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/tflite/TFLiteObjectDetectionAPIModel.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package jp.araobp.camera.tflite;
17 |
18 | import android.content.res.AssetFileDescriptor;
19 | import android.content.res.AssetManager;
20 | import android.graphics.Bitmap;
21 | import android.graphics.RectF;
22 | import android.os.Trace;
23 | import java.io.BufferedReader;
24 | import java.io.FileInputStream;
25 | import java.io.IOException;
26 | import java.io.InputStream;
27 | import java.io.InputStreamReader;
28 | import java.nio.ByteBuffer;
29 | import java.nio.ByteOrder;
30 | import java.nio.MappedByteBuffer;
31 | import java.nio.channels.FileChannel;
32 | import java.util.ArrayList;
33 | import java.util.HashMap;
34 | import java.util.List;
35 | import java.util.Map;
36 | import java.util.Vector;
37 |
38 | import org.tensorflow.lite.Interpreter;
39 |
40 | /**
41 | * Wrapper for frozen detection models trained using the Tensorflow Object Detection API:
42 | * github.com/tensorflow/models/tree/master/research/object_detection
43 | */
44 | public class TFLiteObjectDetectionAPIModel implements Classifier {
45 | //private static final Logger LOGGER = new Logger();
46 |
47 | // Only return this many results.
48 | private static final int NUM_DETECTIONS = 10;
49 | // Float model
50 | private static final float IMAGE_MEAN = 128.0f;
51 | private static final float IMAGE_STD = 128.0f;
52 | private boolean isModelQuantized;
53 | // Config values.
54 | private int inputSize;
55 | // Pre-allocated buffers.
56 | private Vector labels = new Vector();
57 | private int[] intValues;
58 | // outputLocations: array of shape [Batchsize, NUM_DETECTIONS,4]
59 | // contains the location of detected boxes
60 | private float[][][] outputLocations;
61 | // outputClasses: array of shape [Batchsize, NUM_DETECTIONS]
62 | // contains the classes of detected boxes
63 | private float[][] outputClasses;
64 | // outputScores: array of shape [Batchsize, NUM_DETECTIONS]
65 | // contains the scores of detected boxes
66 | private float[][] outputScores;
67 | // numDetections: array of shape [Batchsize]
68 | // contains the number of detected boxes
69 | private float[] numDetections;
70 |
71 | private ByteBuffer imgData;
72 |
73 | private Interpreter tfLite;
74 |
75 | private TFLiteObjectDetectionAPIModel() {}
76 |
77 | /** Memory-map the model file in Assets. */
78 | private static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename)
79 | throws IOException {
80 | AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename);
81 | FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
82 | FileChannel fileChannel = inputStream.getChannel();
83 | long startOffset = fileDescriptor.getStartOffset();
84 | long declaredLength = fileDescriptor.getDeclaredLength();
85 | return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
86 | }
87 |
88 | /**
89 | * Initializes a native TensorFlow session for classifying images.
90 | *
91 | * @param assetManager The asset manager to be used to load assets.
92 | * @param modelFilename The filepath of the model GraphDef protocol buffer.
93 | * @param labelFilename The filepath of label file for classes.
94 | * @param inputSize The size of image input
95 | * @param isQuantized Boolean representing model is quantized or not
96 | */
97 | public static Classifier create(
98 | final AssetManager assetManager,
99 | final String modelFilename,
100 | final String labelFilename,
101 | final int inputSize,
102 | final boolean isQuantized,
103 | final int numThreads)
104 | throws IOException {
105 | final TFLiteObjectDetectionAPIModel d = new TFLiteObjectDetectionAPIModel();
106 |
107 | InputStream labelsInput = null;
108 | // String actualFilename = labelFilename.split("file:///android_asset/")[1];
109 | // labelsInput = assetManager.open(actualFilename);
110 | labelsInput = assetManager.open(labelFilename);
111 | BufferedReader br = null;
112 | br = new BufferedReader(new InputStreamReader(labelsInput));
113 | String line;
114 | while ((line = br.readLine()) != null) {
115 | //LOGGER.w(line);
116 | d.labels.add(line);
117 | }
118 | br.close();
119 |
120 | d.inputSize = inputSize;
121 |
122 | Interpreter.Options options = new Interpreter.Options()
123 | .setNumThreads(numThreads);
124 |
125 | try {
126 | d.tfLite = new Interpreter(loadModelFile(assetManager, modelFilename), options);
127 | } catch (Exception e) {
128 | throw new RuntimeException(e);
129 | }
130 |
131 | d.isModelQuantized = isQuantized;
132 | // Pre-allocate buffers.
133 | int numBytesPerChannel;
134 | if (isQuantized) {
135 | numBytesPerChannel = 1; // Quantized
136 | } else {
137 | numBytesPerChannel = 4; // Floating point
138 | }
139 | d.imgData = ByteBuffer.allocateDirect(1 * d.inputSize * d.inputSize * 3 * numBytesPerChannel);
140 | d.imgData.order(ByteOrder.nativeOrder());
141 | d.intValues = new int[d.inputSize * d.inputSize];
142 |
143 | d.outputLocations = new float[1][NUM_DETECTIONS][4];
144 | d.outputClasses = new float[1][NUM_DETECTIONS];
145 | d.outputScores = new float[1][NUM_DETECTIONS];
146 | d.numDetections = new float[1];
147 | return d;
148 | }
149 |
150 | @Override
151 | public List recognizeImage(final Bitmap bitmap) {
152 | // Log this method so that it can be analyzed with systrace.
153 | Trace.beginSection("recognizeImage");
154 |
155 | Trace.beginSection("preprocessBitmap");
156 | // Preprocess the image data from 0-255 int to normalized float based
157 | // on the provided parameters.
158 | bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
159 |
160 | imgData.rewind();
161 | for (int i = 0; i < inputSize; ++i) {
162 | for (int j = 0; j < inputSize; ++j) {
163 | int pixelValue = intValues[i * inputSize + j];
164 | if (isModelQuantized) {
165 | // Quantized model
166 | imgData.put((byte) ((pixelValue >> 16) & 0xFF));
167 | imgData.put((byte) ((pixelValue >> 8) & 0xFF));
168 | imgData.put((byte) (pixelValue & 0xFF));
169 | } else { // Float model
170 | imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
171 | imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
172 | imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
173 | }
174 | }
175 | }
176 | Trace.endSection(); // preprocessBitmap
177 |
178 | // Copy the input data into TensorFlow.
179 | Trace.beginSection("feed");
180 | outputLocations = new float[1][NUM_DETECTIONS][4];
181 | outputClasses = new float[1][NUM_DETECTIONS];
182 | outputScores = new float[1][NUM_DETECTIONS];
183 | numDetections = new float[1];
184 |
185 | Object[] inputArray = {imgData};
186 | Map outputMap = new HashMap<>();
187 | outputMap.put(0, outputLocations);
188 | outputMap.put(1, outputClasses);
189 | outputMap.put(2, outputScores);
190 | outputMap.put(3, numDetections);
191 | Trace.endSection();
192 |
193 | // Run the inference call.
194 | Trace.beginSection("run");
195 | tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
196 | Trace.endSection();
197 |
198 | // Show the best detections.
199 | // after scaling them back to the input size.
200 | final ArrayList recognitions = new ArrayList<>(NUM_DETECTIONS);
201 | for (int i = 0; i < NUM_DETECTIONS; ++i) {
202 | final RectF detection =
203 | new RectF(
204 | outputLocations[0][i][1] * inputSize,
205 | outputLocations[0][i][0] * inputSize,
206 | outputLocations[0][i][3] * inputSize,
207 | outputLocations[0][i][2] * inputSize);
208 | // SSD Mobilenet V1 Model assumes class 0 is background class
209 | // in label file and class labels startRecording from 1 to number_of_classes+1,
210 | // while outputClasses correspond to class index from 0 to number_of_classes
211 | int labelOffset = 1;
212 | recognitions.add(
213 | new Recognition(
214 | "" + i,
215 | labels.get((int) outputClasses[0][i] + labelOffset),
216 | outputScores[0][i],
217 | detection));
218 | }
219 | Trace.endSection(); // "recognizeImage"
220 | return recognitions;
221 | }
222 |
223 | @Override
224 | public void enableStatLogging(final boolean logStats) {}
225 |
226 | @Override
227 | public String getStatString() {
228 | return "";
229 | }
230 |
231 | @Override
232 | public void close() {}
233 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/util/Fps.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.util
2 |
3 | class Fps {
4 |
5 | val lastFrameTime = longArrayOf(0L, 0L, 0L, 0L, 0L)
6 |
7 | fun update(): Double {
8 | lastFrameTime[4] = lastFrameTime[3]
9 | lastFrameTime[3] = lastFrameTime[2]
10 | lastFrameTime[2] = lastFrameTime[1]
11 | lastFrameTime[1] = lastFrameTime[0]
12 | lastFrameTime[0] = System.currentTimeMillis()
13 |
14 | val diff = longArrayOf(0L, 0L, 0L, 0L)
15 | diff[3] = lastFrameTime[4] - lastFrameTime[3]
16 | diff[2] = lastFrameTime[3] - lastFrameTime[2]
17 | diff[1] = lastFrameTime[2] - lastFrameTime[1]
18 | diff[0] = lastFrameTime[1] - lastFrameTime[0]
19 | val avg = diff.average()
20 | return (1000.0/avg).roundToTheNth(1)
21 | }
22 |
23 | }
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/util/MediaStorage.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.util
2 |
3 | import android.content.ContentValues
4 | import android.content.Context
5 | import android.graphics.Bitmap
6 | import android.net.Uri
7 | import android.provider.MediaStore
8 | import java.io.OutputStream
9 |
10 | // [Reference] https://www.thetopsites.net/article/54787299.shtml
11 | fun saveImage(bitmap: Bitmap, context: Context, folderName: String) {
12 | val values = contentValues()
13 | values.put(MediaStore.Images.Media.RELATIVE_PATH, "Pictures/" + folderName)
14 | values.put(MediaStore.Images.Media.IS_PENDING, true)
15 | // RELATIVE_PATH and IS_PENDING are introduced in API 29.
16 |
17 | val uri: Uri? =
18 | context.contentResolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values)
19 | if (uri != null) {
20 | saveImageToStream(bitmap, context.contentResolver.openOutputStream(uri))
21 | values.put(MediaStore.Images.Media.IS_PENDING, false)
22 | context.contentResolver.update(uri, values, null, null)
23 | }
24 | }
25 |
26 | private fun contentValues(): ContentValues {
27 | val values = ContentValues()
28 | values.put(MediaStore.Images.Media.MIME_TYPE, "image/png")
29 | values.put(MediaStore.Images.Media.DATE_ADDED, System.currentTimeMillis() / 1000);
30 | values.put(MediaStore.Images.Media.DATE_TAKEN, System.currentTimeMillis());
31 | return values
32 | }
33 |
34 | private fun saveImageToStream(bitmap: Bitmap, outputStream: OutputStream?) {
35 | if (outputStream != null) {
36 | try {
37 | bitmap.compress(Bitmap.CompressFormat.PNG, 100, outputStream)
38 | outputStream.close()
39 | } catch (e: Exception) {
40 | e.printStackTrace()
41 | }
42 | }
43 | }
44 |
45 |
--------------------------------------------------------------------------------
/android/app/src/main/java/jp/araobp/camera/util/Utils.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera.util
2 |
3 | import android.content.ContentValues
4 | import android.content.Context
5 | import android.graphics.Bitmap
6 | import android.net.Uri
7 | import android.os.Environment
8 | import android.provider.MediaStore
9 | import java.io.File
10 | import java.io.FileOutputStream
11 | import java.io.OutputStream
12 | import kotlin.math.pow
13 | import kotlin.math.roundToInt
14 |
15 | // Round a float value to the first decimal place
16 | fun Float.roundToTheNth(n: Int): Float {
17 | val magnify = 10F.pow(n)
18 | return (this * magnify).roundToInt() / magnify
19 | }
20 |
21 | // Round a doube value to the first decimal place
22 | fun Double.roundToTheNth(n: Int): Double {
23 | val magnify = 10.0.pow(n)
24 | return (this * magnify).roundToInt() / magnify
25 | }
26 |
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
15 |
18 |
21 |
22 |
23 |
24 |
30 |
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable/ic_baseline_camera_alt_24.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
10 |
12 |
14 |
16 |
18 |
20 |
22 |
24 |
26 |
28 |
30 |
32 |
34 |
36 |
38 |
40 |
42 |
44 |
46 |
48 |
50 |
52 |
54 |
56 |
58 |
60 |
62 |
64 |
66 |
68 |
70 |
72 |
74 |
75 |
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
11 |
14 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/android/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
21 |
22 |
32 |
33 |
43 |
44 |
54 |
55 |
65 |
66 |
74 |
75 |
83 |
84 |
93 |
94 |
105 |
106 |
--------------------------------------------------------------------------------
/android/app/src/main/res/layout/settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
16 |
17 |
23 |
24 |
31 |
32 |
38 |
39 |
46 |
47 |
53 |
54 |
61 |
62 |
68 |
69 |
76 |
77 |
83 |
84 |
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/android/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #6200EE
4 | #3700B3
5 | #03DAC5
6 |
--------------------------------------------------------------------------------
/android/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 280sp
4 | 180dp
5 | 16dp
6 | 12dp
7 |
--------------------------------------------------------------------------------
/android/app/src/main/res/values/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #030935
4 |
--------------------------------------------------------------------------------
/android/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | Camera
3 |
--------------------------------------------------------------------------------
/android/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 |
--------------------------------------------------------------------------------
/android/app/src/test/java/jp/araobp/camera/ExampleUnitTest.kt:
--------------------------------------------------------------------------------
1 | package jp.araobp.camera
2 |
3 | import org.junit.Test
4 |
5 | import org.junit.Assert.*
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * See [testing documentation](http://d.android.com/tools/testing).
11 | */
12 | class ExampleUnitTest {
13 | @Test
14 | fun addition_isCorrect() {
15 | assertEquals(4, 2 + 2)
16 | }
17 | }
--------------------------------------------------------------------------------
/android/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 | buildscript {
3 | ext.kotlin_version = "1.4.0"
4 | repositories {
5 | google()
6 | jcenter()
7 | }
8 | dependencies {
9 | classpath "com.android.tools.build:gradle:4.0.1"
10 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
11 |
12 | // NOTE: Do not place your application dependencies here; they belong
13 | // in the individual module build.gradle files
14 | }
15 | }
16 |
17 | allprojects {
18 | repositories {
19 | google()
20 | jcenter()
21 | maven {
22 | url "https://repo.eclipse.org/content/repositories/paho-snapshots/"
23 | }
24 | }
25 | }
26 |
27 | task clean(type: Delete) {
28 | delete rootProject.buildDir
29 | }
--------------------------------------------------------------------------------
/android/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx2048m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app"s APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Automatically convert third-party libraries to use AndroidX
19 | android.enableJetifier=true
20 | # Kotlin code style for this project: "official" or "obsolete":
21 | kotlin.code.style=official
--------------------------------------------------------------------------------
/android/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/araobp/android-camera/efc0a66ebe5060b0604d4df8e50a380f95bd34bc/android/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/android/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Sun Sep 20 10:03:34 JST 2020
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip
7 |
--------------------------------------------------------------------------------
/android/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/android/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/android/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':sdk'
2 | include ':app'
3 | rootProject.name = "My Application"
--------------------------------------------------------------------------------