├── .gitattributes
├── AndroidApplication
├── .gitignore
├── .idea
│ ├── caches
│ │ └── build_file_checksums.ser
│ ├── compiler.xml
│ ├── encodings.xml
│ ├── gradle.xml
│ ├── libraries
│ │ ├── Gradle____local_aars____home_pooja_Desktop_tmp_DeepLabV3_AndroidApplication_app_libs_snpe_release_aar_unspecified_jar.xml
│ │ ├── Gradle__android_arch_core_common_1_1_1_jar.xml
│ │ ├── Gradle__android_arch_core_runtime_1_1_1_aar.xml
│ │ ├── Gradle__android_arch_lifecycle_common_1_1_1_jar.xml
│ │ ├── Gradle__android_arch_lifecycle_livedata_1_1_1_aar.xml
│ │ ├── Gradle__android_arch_lifecycle_livedata_core_1_1_1_aar.xml
│ │ ├── Gradle__android_arch_lifecycle_runtime_1_1_1_aar.xml
│ │ ├── Gradle__android_arch_lifecycle_viewmodel_1_1_1_aar.xml
│ │ ├── Gradle__com_android_support_animated_vector_drawable_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_appcompat_v7_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_asynclayoutinflater_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_cardview_v7_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_collections_28_0_0_jar.xml
│ │ ├── Gradle__com_android_support_constraint_constraint_layout_1_1_3_aar.xml
│ │ ├── Gradle__com_android_support_constraint_constraint_layout_solver_1_1_3_jar.xml
│ │ ├── Gradle__com_android_support_coordinatorlayout_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_cursoradapter_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_customview_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_design_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_documentfile_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_drawerlayout_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_interpolator_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_loader_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_localbroadcastmanager_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_print_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_recyclerview_v7_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_slidingpanelayout_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_support_annotations_28_0_0_jar.xml
│ │ ├── Gradle__com_android_support_support_compat_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_support_core_ui_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_support_core_utils_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_support_fragment_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_support_media_compat_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_support_v4_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_support_vector_drawable_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_swiperefreshlayout_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_test_espresso_espresso_contrib_3_0_1_aar.xml
│ │ ├── Gradle__com_android_support_test_espresso_espresso_core_3_0_1_aar.xml
│ │ ├── Gradle__com_android_support_test_espresso_espresso_idling_resource_3_0_1_aar.xml
│ │ ├── Gradle__com_android_support_test_rules_1_0_1_aar.xml
│ │ ├── Gradle__com_android_support_test_runner_1_0_1_aar.xml
│ │ ├── Gradle__com_android_support_transition_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_versionedparcelable_28_0_0_aar.xml
│ │ ├── Gradle__com_android_support_viewpager_28_0_0_aar.xml
│ │ ├── Gradle__com_google_android_apps_common_testing_accessibility_framework_accessibility_test_framework_2_0_jar.xml
│ │ ├── Gradle__com_google_code_findbugs_jsr305_2_0_1_jar.xml
│ │ ├── Gradle__com_squareup_javawriter_2_1_1_jar.xml
│ │ ├── Gradle__javax_inject_javax_inject_1_jar.xml
│ │ ├── Gradle__junit_junit_4_12_jar.xml
│ │ ├── Gradle__net_sf_kxml_kxml2_2_3_0_jar.xml
│ │ ├── Gradle__org_hamcrest_hamcrest_core_1_3_jar.xml
│ │ ├── Gradle__org_hamcrest_hamcrest_integration_1_3_jar.xml
│ │ └── Gradle__org_hamcrest_hamcrest_library_1_3_jar.xml
│ ├── misc.xml
│ ├── modules.xml
│ ├── runConfigurations.xml
│ ├── vcs.xml
│ └── workspace.xml
├── README.md
├── app
│ ├── .gitignore
│ ├── build.gradle
│ ├── libs
│ │ ├── SNPE-AAR-GOES-HERE.txt
│ │ └── snpe-release.aar
│ ├── proguard-rules.pro
│ └── src
│ │ ├── androidTest
│ │ └── java
│ │ │ └── com
│ │ │ └── qdn
│ │ │ └── segmentation
│ │ │ ├── CameraFragmentTest.java
│ │ │ ├── ImageSegmentationFragmentTest.java
│ │ │ └── MainActivityTest.java
│ │ └── main
│ │ ├── AndroidManifest.xml
│ │ ├── ic_launcher-web.png
│ │ ├── java
│ │ └── com
│ │ │ └── qdn
│ │ │ └── segmentation
│ │ │ ├── Activity
│ │ │ ├── CameraActivity.java
│ │ │ └── MainActivity.java
│ │ │ ├── Fragments
│ │ │ ├── CameraPreviewFragment.java
│ │ │ └── ImageSegmentationFragment.java
│ │ │ ├── Helpers
│ │ │ ├── AutoFitTextureView.java
│ │ │ ├── BitmapToFloatArrayHelper.java
│ │ │ └── SNPEHelper.java
│ │ │ ├── Interfaces
│ │ │ ├── IBitmapLoader.java
│ │ │ └── INetworkLoader.java
│ │ │ ├── Utils
│ │ │ ├── Constants.java
│ │ │ └── Logger.java
│ │ │ └── tasks
│ │ │ ├── LoadNetworkTask.java
│ │ │ └── SegmentImageTask.java
│ │ └── res
│ │ ├── drawable-v24
│ │ ├── camera.png
│ │ ├── camera_mode.png
│ │ └── ic_launcher_foreground.xml
│ │ ├── drawable
│ │ ├── button.xml
│ │ ├── classification_background.xml
│ │ ├── ic_launcher_background.xml
│ │ ├── ic_splash.png
│ │ ├── ic_splash1.png
│ │ ├── person1.jpg
│ │ ├── person2.jpeg
│ │ ├── person3.jpg
│ │ ├── rose.jpg
│ │ ├── roses.jpg
│ │ ├── screenshot_segmentation.png
│ │ └── snapdragon_hdk.jpg
│ │ ├── layout
│ │ ├── activity_camera.xml
│ │ ├── activity_main.xml
│ │ ├── fragment_camera_preview.xml
│ │ └── fragment_image_segmentation.xml
│ │ ├── mipmap-anydpi-v26
│ │ ├── ic_launcher.xml
│ │ └── ic_launcher_round.xml
│ │ ├── mipmap-hdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-mdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xhdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxhdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── raw
│ │ └── model.dlc
│ │ └── values
│ │ ├── colors.xml
│ │ ├── dimens.xml
│ │ ├── strings.xml
│ │ └── styles.xml
├── build.gradle
├── gradle.properties
├── gradle
│ └── wrapper
│ │ ├── gradle-wrapper.jar
│ │ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── output
│ └── segmentation.apk
└── settings.gradle
├── BoardApplication
├── .gitignore
├── README.md
├── converted_model
│ └── model.dlc
├── dependencies
│ └── requirement.txt
└── src
│ ├── deep_input_raw.py
│ └── post_process_deeplab_output.py
└── README.md
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/AndroidApplication/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | ./idea/caches/build_file_checksums.ser
5 | ./idea/libraries
6 | ./idea/modules.xml
7 | ./idea/workspace.xml
8 | ./idea/tasks.xml
9 | ./idea/assetWizardSettings.xml
10 | ./idea/dictionaries
11 | ./idea/gradle.xml
12 | ./idea/*
13 |
14 |
15 | .DS_Store
16 | /build
17 | /captures
18 | .externalNativeBuild
19 | =======
20 | # Files for the ART/Dalvik VM
21 | *.dex
22 |
23 | # Java class files
24 | *.class
25 |
26 | # Generated files
27 | bin/
28 | gen/
29 | out/
30 |
31 | # Built application files
32 | *.apk
33 | *.ap_
34 |
35 | # Gradle files
36 | .gradle/
37 | build/
38 |
39 | # Local configuration file (sdk path, etc)
40 | local.properties
41 |
42 | # Proguard folder generated
43 | proguard/
44 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/caches/build_file_checksums.ser:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/.idea/caches/build_file_checksums.ser
--------------------------------------------------------------------------------
/AndroidApplication/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
17 |
18 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle____local_aars____home_pooja_Desktop_tmp_DeepLabV3_AndroidApplication_app_libs_snpe_release_aar_unspecified_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__android_arch_core_common_1_1_1_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__android_arch_core_runtime_1_1_1_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__android_arch_lifecycle_common_1_1_1_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__android_arch_lifecycle_livedata_1_1_1_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__android_arch_lifecycle_livedata_core_1_1_1_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__android_arch_lifecycle_runtime_1_1_1_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__android_arch_lifecycle_viewmodel_1_1_1_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_animated_vector_drawable_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_appcompat_v7_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_asynclayoutinflater_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_cardview_v7_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_collections_28_0_0_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_constraint_constraint_layout_1_1_3_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_constraint_constraint_layout_solver_1_1_3_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_coordinatorlayout_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_cursoradapter_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_customview_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_design_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_documentfile_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_drawerlayout_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_interpolator_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_loader_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_localbroadcastmanager_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_print_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_recyclerview_v7_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_slidingpanelayout_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_support_annotations_28_0_0_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_support_compat_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_support_core_ui_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_support_core_utils_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_support_fragment_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_support_media_compat_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_support_v4_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_support_vector_drawable_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_swiperefreshlayout_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_test_espresso_espresso_contrib_3_0_1_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_test_espresso_espresso_core_3_0_1_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_test_espresso_espresso_idling_resource_3_0_1_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_test_rules_1_0_1_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_test_runner_1_0_1_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_transition_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_versionedparcelable_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_android_support_viewpager_28_0_0_aar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_google_android_apps_common_testing_accessibility_framework_accessibility_test_framework_2_0_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_2_0_1_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__com_squareup_javawriter_2_1_1_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__javax_inject_javax_inject_1_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__junit_junit_4_12_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__net_sf_kxml_kxml2_2_3_0_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__org_hamcrest_hamcrest_integration_1_3_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/libraries/Gradle__org_hamcrest_hamcrest_library_1_3_jar.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/runConfigurations.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/AndroidApplication/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 | 1560231628310
245 |
246 |
247 | 1560231628310
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 | 1.8
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
--------------------------------------------------------------------------------
/AndroidApplication/README.md:
--------------------------------------------------------------------------------
1 | # DeepLabV3 Android Application
2 |
3 | The project is designed to utilize the Qualcomm Neural Processing SDK, a deep learning software from Qualcomm Snapdragon Platforms. The Neural Processing SDK is used to convert trained models from Caffe, Caffe2, ONNX, TensorFlow to Snapdragon supported format (.dlc format). We further utilize these models in Android application to perform semantic segmentation using DeepLab V3 support in SDK.
4 | ## Pre-requisites
5 | * Before starting the Android application, please follow the instructions for setting up Qualcomm Neural Processing SDK using the link provided.
6 | https://developer.qualcomm.com/docs/snpe/setup.html.
7 | * Android device 6.0 and above which uses below mentioned Snapdragon processors/Snapdragon HDK with display can be used to test the application.
8 |
9 | ## List of Supported Snapdragon Devices
10 |
11 | - Qualcomm Snapdragon 855
12 | - Qualcomm Snapdragon 845
13 | - Qualcomm Snapdragon 835
14 | - Qualcomm Snapdragon 821
15 | - Qualcomm Snapdragon 820
16 | - Qualcomm Snapdragon 710
17 | - Qualcomm Snapdragon 660
18 | - Qualcomm Snapdragon 652
19 | - Qualcomm Snapdragon 636
20 | - Qualcomm Snapdragon 630
21 | - Qualcomm Snapdragon 625
22 | - Qualcomm Snapdragon 605
23 | - Qualcomm Snapdragon 450
24 |
25 | The above list supports the application with CPU and GPU.For more information on the supported devices, please follow this link https://developer.qualcomm.com/docs/snpe/overview.html
26 |
27 | ## Components
28 | Below are the items used in the project.
29 | 1. Mobile Display with DeepLabV3 app
30 | 2. HDK Snapdragon board with GPU enabled
31 | 3. USB type – C cable
32 | 4. External camera setup
33 | 5. Power Cable
34 |
35 | ## Hardware Setup
36 | 
37 |
38 |
39 | ## How does it work?
40 | Image Segmentation application opens a camera preview, clicks a picture and converts it to bitmap. The network is built via Neural Network builder by passing deeplabv3.dlc as the input. The bitmap is then given to model for inference, which returns FloatTensor output. The output is again set for post-processing to achieve background manipulation (changing the background color to black and white) of the original input image.
41 |
42 |
43 | ## Steps to Install and Run the Application
44 | * Firstly set up the hardware as shown above in the hardware setup section.
45 | * Power on the Snapdragon HDK board.
46 | * Connect the Dev-Board/Android phone via USB to the device.
47 | * Switch on the display and choose the USB connection option to File Transfer.
48 | * Check if ADB is installed in the windows/linux device, if not follow the below instructions in the below link to install
49 | https://developer.android.com/studio/command-line/adb.html.
50 | * Use the below command to install the apk with the connected device with help of adb. [Download APK(Debug)](https://github.com/jinka2015/DeepLabV3/blob/master/AndroidApplication/output/segmentation.apk)
51 |
52 | $ adb install segmentation.apk
53 | * Search the Image Segmentation in the app menu and launch the application
54 |
55 | ## Screenshot of the application
56 |
57 |
--------------------------------------------------------------------------------
/AndroidApplication/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/AndroidApplication/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 28
5 | defaultConfig {
6 | applicationId "com.qdn.segmentation"
7 | minSdkVersion 23
8 | targetSdkVersion 28
9 | versionCode 1
10 | versionName "1.0"
11 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
12 | }
13 | buildTypes {
14 | release {
15 | minifyEnabled false
16 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
17 | }
18 | }
19 | }
20 |
21 | dependencies {
22 | implementation fileTree(dir: 'libs', include: ['*.aar'])
23 | implementation fileTree(dir: 'libs', include: ['*.jar'])
24 | implementation 'com.android.support:appcompat-v7:28.0.0'
25 | implementation 'com.android.support.constraint:constraint-layout:1.1.3'
26 | implementation 'com.android.support:design:28.0.0'
27 | implementation 'com.android.support:support-v4:28.0.0'
28 | testImplementation 'junit:junit:4.12'
29 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1'
30 | androidTestImplementation 'com.android.support.test.espresso:espresso-contrib:3.0.1'
31 |
32 |
33 |
34 | }
35 |
--------------------------------------------------------------------------------
/AndroidApplication/app/libs/SNPE-AAR-GOES-HERE.txt:
--------------------------------------------------------------------------------
1 | The Qualcomm Neural Processing SDK aar must be dropped in this file's parent folder.
2 |
--------------------------------------------------------------------------------
/AndroidApplication/app/libs/snpe-release.aar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/libs/snpe-release.aar
--------------------------------------------------------------------------------
/AndroidApplication/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/androidTest/java/com/qdn/segmentation/CameraFragmentTest.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.rule.ActivityTestRule;
6 | import android.support.test.runner.AndroidJUnit4;
7 |
8 | import com.qdn.segmentation.Activity.CameraActivity;
9 | import org.junit.Before;
10 | import org.junit.Rule;
11 | import org.junit.Test;
12 | import org.junit.runner.RunWith;
13 |
14 | import java.io.File;
15 |
16 | import static android.support.test.espresso.Espresso.onView;
17 |
18 | import static android.support.test.espresso.assertion.ViewAssertions.matches;
19 | import static android.support.test.espresso.matcher.ViewMatchers.withId;
20 | import static org.junit.Assert.assertTrue;
21 |
22 | @RunWith(AndroidJUnit4.class)
23 | public class CameraFragmentTest {
24 |
25 | private static final String TAG = CameraFragmentTest.class.getSimpleName();
26 | private Context mContext;
27 |
28 | @Rule
29 | public ActivityTestRule mCameraActivityRule = new ActivityTestRule<>(CameraActivity.class);
30 |
31 | @Before
32 | public void setUp() {
33 | mContext = InstrumentationRegistry.getTargetContext();
34 | }
35 |
36 | @Test
37 | public void Android_UT_test_cameraPreview() throws InterruptedException {
38 | Thread.sleep(2000);
39 | onView(withId(R.id.texture)).check(matches(withId(R.id.texture)));
40 | }
41 |
42 | @Test
43 | public void Android_UT_test_photoFilePath() throws InterruptedException {
44 | Thread.sleep(2000);
45 | File mFile = new File(mCameraActivityRule.getActivity().getExternalFilesDir(null), "pic.jpg");
46 | assertTrue(mFile.exists());
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/androidTest/java/com/qdn/segmentation/ImageSegmentationFragmentTest.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation;
2 |
3 | import android.app.Application;
4 | import android.content.Context;
5 | import android.graphics.Bitmap;
6 | import android.graphics.BitmapFactory;
7 | import android.support.test.InstrumentationRegistry;
8 | import android.support.test.runner.AndroidJUnit4;
9 |
10 | import com.qdn.segmentation.Helpers.SNPEHelper;
11 | import com.qdn.segmentation.Interfaces.IBitmapLoader;
12 | import com.qdn.segmentation.Interfaces.INetworkLoader;
13 | import com.qdn.segmentation.Utils.Logger;
14 | import com.qualcomm.qti.snpe.NeuralNetwork;
15 |
16 | import org.junit.Before;
17 | import org.junit.Test;
18 | import org.junit.runner.RunWith;
19 |
20 | import static junit.framework.TestCase.assertTrue;
21 |
22 | @RunWith(AndroidJUnit4.class)
23 | public class ImageSegmentationFragmentTest implements INetworkLoader, IBitmapLoader {
24 |
25 | private static final String TAG = ImageSegmentationFragmentTest.class.getSimpleName();
26 | private Context mContext;
27 | private SNPEHelper mSNPEHelper;
28 | private INetworkLoader mCallbackINetworkLoader;
29 | private IBitmapLoader mCallbackBitmapLoader;
30 | private boolean isNetworkBuilt;
31 | private boolean isBitmapLoaded;
32 |
33 | @Before
34 | public void setUp() {
35 | mContext = InstrumentationRegistry.getTargetContext();
36 | mCallbackINetworkLoader = this;
37 | mCallbackBitmapLoader = this;
38 | mSNPEHelper = new SNPEHelper((Application) mContext.getApplicationContext());
39 |
40 | }
41 |
42 | public void buildNetwork() {
43 | mSNPEHelper.loadNetwork((Application) mContext.getApplicationContext(), mCallbackINetworkLoader);
44 | }
45 |
46 | @Test
47 | public void Android_UT_test_imageSegmentation() throws InterruptedException {
48 | buildNetwork();
49 | Thread.sleep(10000);
50 | if (isNetworkBuilt) {
51 | Bitmap mBitmap = BitmapFactory.decodeResource(mContext.getResources(),R.drawable.person3);
52 | mSNPEHelper.loadSegmentImageTask(mContext,mSNPEHelper.getNeuralNetwork(),mBitmap,mCallbackBitmapLoader);
53 | Thread.sleep(20000);
54 | assertTrue(isBitmapLoaded);
55 | }
56 | }
57 |
58 | @Override
59 | public void onNetworkBuilt(NeuralNetwork neuralNetwork) {
60 | Logger.d(TAG, "onNetworkBuilt");
61 | if (neuralNetwork != null) {
62 | mSNPEHelper.setNeuralNetwork(neuralNetwork);
63 | isNetworkBuilt = true;
64 | }
65 | else isNetworkBuilt = false;
66 | }
67 |
68 | @Override
69 | public void loadResultBitmap(Bitmap bitmap) {
70 | Logger.d(TAG, "loadResultBitmap");
71 | if(bitmap != null)
72 | isBitmapLoaded = true;
73 | else isBitmapLoaded = false;
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/androidTest/java/com/qdn/segmentation/MainActivityTest.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation;
2 |
3 | import android.app.Application;
4 | import android.content.Context;
5 | import android.support.test.InstrumentationRegistry;
6 | import android.support.test.runner.AndroidJUnit4;
7 |
8 |
9 | import com.qdn.segmentation.Helpers.SNPEHelper;
10 | import com.qdn.segmentation.Interfaces.INetworkLoader;
11 | import com.qdn.segmentation.Utils.Logger;
12 | import com.qualcomm.qti.snpe.NeuralNetwork;
13 |
14 | import org.junit.Before;
15 | import org.junit.Test;
16 | import org.junit.runner.RunWith;
17 |
18 | import static org.junit.Assert.assertFalse;
19 | import static org.junit.Assert.assertTrue;
20 |
21 | @RunWith(AndroidJUnit4.class)
22 | public class MainActivityTest implements INetworkLoader {
23 |
24 | private static final String TAG = MainActivityTest.class.getSimpleName();
25 | private Context mContext;
26 | private SNPEHelper mSNPEHelper;
27 | private INetworkLoader mCallbackINetworkLoader;
28 | private boolean isNetworkBuilt;
29 |
30 | @Before
31 | public void setUp() {
32 | mContext = InstrumentationRegistry.getTargetContext();
33 | mCallbackINetworkLoader = this;
34 | mSNPEHelper = new SNPEHelper((Application)mContext.getApplicationContext());
35 | }
36 |
37 | @Test
38 | public void Android_UT_test_buildNetwork_positive() throws InterruptedException {
39 | mSNPEHelper.loadNetwork((Application)mContext.getApplicationContext(), mCallbackINetworkLoader);
40 | Thread.sleep(10000);
41 | Logger.d(TAG,"isNetworkBuilt "+isNetworkBuilt);
42 | assertTrue(isNetworkBuilt);
43 | }
44 |
45 | @Test
46 | public void Android_UT_test_buildNetwork_negative() {
47 | mSNPEHelper.loadNetwork((Application)mContext.getApplicationContext(), mCallbackINetworkLoader);
48 | Logger.d(TAG,"isNetworkBuilt "+isNetworkBuilt);
49 | assertFalse(isNetworkBuilt);
50 | }
51 | @Override
52 | public void onNetworkBuilt(NeuralNetwork neuralNetwork) {
53 | if(neuralNetwork != null)
54 | isNetworkBuilt = true;
55 | else isNetworkBuilt = false;
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/ic_launcher-web.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/ic_launcher-web.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Activity/CameraActivity.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation.Activity;
2 |
3 | import android.os.Bundle;
4 | import android.support.v7.app.AppCompatActivity;
5 |
6 | import com.qdn.segmentation.Fragments.CameraPreviewFragment;
7 | import com.qdn.segmentation.R;
8 |
9 | public class CameraActivity extends AppCompatActivity {
10 |
11 | @Override
12 | protected void onCreate(Bundle savedInstanceState) {
13 | super.onCreate(savedInstanceState);
14 | setContentView(R.layout.activity_camera);
15 | if (null == savedInstanceState) {
16 |
17 | //Go to camera Preview Fragment
18 | getSupportFragmentManager().beginTransaction()
19 | .replace(R.id.container, CameraPreviewFragment.newInstance())
20 | .commit();
21 | }
22 | }
23 |
24 | }
25 |
26 |
27 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Activity/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation.Activity;
2 |
3 | import android.content.Intent;
4 | import android.support.v7.app.AppCompatActivity;
5 | import android.os.Bundle;
6 |
7 | import com.qdn.segmentation.Utils.Logger;
8 | import com.qualcomm.qti.snpe.NeuralNetwork;
9 | import com.qdn.segmentation.Interfaces.INetworkLoader;
10 | import com.qdn.segmentation.R;
11 | import com.qdn.segmentation.Helpers.SNPEHelper;
12 |
13 | public class MainActivity extends AppCompatActivity implements INetworkLoader {
14 |
15 | private static final String TAG = MainActivity.class.getSimpleName();
16 | public static SNPEHelper mSNPEHelper;
17 | private INetworkLoader mCallbackINetworkLoader;
18 |
19 | @Override
20 | protected void onCreate(Bundle savedInstanceState) {
21 | super.onCreate(savedInstanceState);
22 | setContentView(R.layout.activity_main);
23 | initViews();
24 | }
25 |
26 |
27 | private void initViews() {
28 | mCallbackINetworkLoader = this;
29 | }
30 |
31 | @Override
32 | protected void onStart() {
33 | super.onStart();
34 | Logger.d(TAG, "onstart");
35 | mSNPEHelper = new SNPEHelper(getApplication());
36 | mSNPEHelper.loadNetwork(getApplication(), mCallbackINetworkLoader);
37 |
38 |
39 | }
40 |
41 | @Override
42 | protected void onResume() {
43 | super.onResume();
44 |
45 | }
46 |
47 | @Override
48 | protected void onPause() {
49 | super.onPause();
50 | Logger.d(TAG, "onPause()");
51 | }
52 |
53 | @Override
54 | protected void onStop() {
55 | super.onStop();
56 | Logger.d(TAG, "onStop()");
57 | }
58 |
59 |
60 | @Override
61 | public void onNetworkBuilt(NeuralNetwork neuralNetwork) {
62 | mSNPEHelper.setNeuralNetwork(neuralNetwork);
63 | Logger.d(TAG, "Network built successfully");
64 | Intent intent = new Intent(this, CameraActivity.class);
65 | intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
66 | startActivity(intent);
67 | finish();
68 |
69 | }
70 |
71 | public enum SupportedTensorFormat {
72 | FLOAT,
73 | UB_TF8
74 | }
75 | }
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Fragments/CameraPreviewFragment.java:
--------------------------------------------------------------------------------
1 |
2 | package com.qdn.segmentation.Fragments;
3 |
4 | import android.Manifest;
5 | import android.app.Activity;
6 | import android.app.AlertDialog;
7 | import android.app.Dialog;
8 | import android.content.Context;
9 | import android.content.DialogInterface;
10 | import android.content.pm.PackageManager;
11 | import android.content.res.Configuration;
12 | import android.graphics.ImageFormat;
13 | import android.graphics.Matrix;
14 | import android.graphics.Point;
15 | import android.graphics.RectF;
16 | import android.graphics.SurfaceTexture;
17 | import android.hardware.camera2.CameraAccessException;
18 | import android.hardware.camera2.CameraCaptureSession;
19 | import android.hardware.camera2.CameraCharacteristics;
20 | import android.hardware.camera2.CameraDevice;
21 | import android.hardware.camera2.CameraManager;
22 | import android.hardware.camera2.CameraMetadata;
23 | import android.hardware.camera2.CaptureRequest;
24 | import android.hardware.camera2.CaptureResult;
25 | import android.hardware.camera2.TotalCaptureResult;
26 | import android.hardware.camera2.params.StreamConfigurationMap;
27 | import android.media.Image;
28 | import android.media.ImageReader;
29 | import android.os.Bundle;
30 | import android.os.Handler;
31 | import android.os.HandlerThread;
32 | import android.support.annotation.NonNull;
33 | import android.support.v4.app.ActivityCompat;
34 | import android.support.v4.app.DialogFragment;
35 | import android.support.v4.app.Fragment;
36 | import android.support.v4.app.FragmentManager;
37 | import android.support.v4.app.FragmentTransaction;
38 | import android.support.v4.content.ContextCompat;
39 | import android.util.Size;
40 | import android.util.SparseIntArray;
41 | import android.view.LayoutInflater;
42 | import android.view.Surface;
43 | import android.view.TextureView;
44 | import android.view.View;
45 | import android.view.ViewGroup;
46 | import android.widget.Toast;
47 |
48 | import com.qdn.segmentation.Helpers.AutoFitTextureView;
49 | import com.qdn.segmentation.R;
50 | import com.qdn.segmentation.Utils.Constants;
51 | import com.qdn.segmentation.Utils.Logger;
52 |
53 | import java.io.File;
54 | import java.io.FileOutputStream;
55 | import java.io.IOException;
56 | import java.nio.ByteBuffer;
57 | import java.util.ArrayList;
58 | import java.util.Arrays;
59 | import java.util.Collections;
60 | import java.util.Comparator;
61 | import java.util.List;
62 | import java.util.concurrent.Semaphore;
63 | import java.util.concurrent.TimeUnit;
64 |
65 | import static com.qdn.segmentation.Utils.Constants.BUNDLE_KEY_IMAGE;
66 | import static com.qdn.segmentation.Utils.Constants.REQUEST_CAMERA_PERMISSION;
67 |
68 | public class CameraPreviewFragment extends Fragment
69 | implements View.OnClickListener, ActivityCompat.OnRequestPermissionsResultCallback {
70 |
71 |
72 | /**
73 | * Conversion from screen rotation to JPEG orientation.
74 | */
75 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
76 |
77 | private static final String FRAGMENT_DIALOG = "Dialog";
78 |
79 | static {
80 | ORIENTATIONS.append(Surface.ROTATION_0, 90);
81 | ORIENTATIONS.append(Surface.ROTATION_90, 0);
82 | ORIENTATIONS.append(Surface.ROTATION_180, 270);
83 | ORIENTATIONS.append(Surface.ROTATION_270, 180);
84 | }
85 |
86 | /**
87 | * Tag for the {@link Logger}.
88 | */
89 | private static final String TAG = CameraPreviewFragment.class.getSimpleName();
90 |
91 | /**
92 | * Camera state: Showing camera preview.
93 | */
94 | private static final int STATE_PREVIEW = 0;
95 |
96 | /**
97 | * Camera state: Waiting for the focus to be locked.
98 | */
99 | private static final int STATE_WAITING_LOCK = 1;
100 |
101 | /**
102 | * Camera state: Waiting for the exposure to be precapture state.
103 | */
104 | private static final int STATE_WAITING_PRECAPTURE = 2;
105 |
106 | /**
107 | * Camera state: Waiting for the exposure state to be something other than precapture.
108 | */
109 | private static final int STATE_WAITING_NON_PRECAPTURE = 3;
110 |
111 | /**
112 | * Camera state: Picture was taken.
113 | */
114 | private static final int STATE_PICTURE_TAKEN = 4;
115 |
116 | /**
117 | * Max preview width that is guaranteed by Camera2 API
118 | */
119 | private static final int MAX_PREVIEW_WIDTH = 1920;
120 |
121 | /**
122 | * Max preview height that is guaranteed by Camera2 API
123 | */
124 | private static final int MAX_PREVIEW_HEIGHT = 1080;
125 |
126 | /**
127 | * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a
128 | * {@link TextureView}.
129 | */
130 | private final TextureView.SurfaceTextureListener mSurfaceTextureListener
131 | = new TextureView.SurfaceTextureListener() {
132 |
133 | @Override
134 | public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
135 | openCamera(width, height);
136 | }
137 |
138 | @Override
139 | public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
140 | configureTransform(width, height);
141 | }
142 |
143 | @Override
144 | public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
145 | return true;
146 | }
147 |
148 | @Override
149 | public void onSurfaceTextureUpdated(SurfaceTexture texture) {
150 | }
151 |
152 | };
153 |
154 | /**
155 | * ID of the current {@link CameraDevice}.
156 | */
157 | private String mCameraId;
158 |
159 | /**
160 | * An {@link AutoFitTextureView} for camera preview.
161 | */
162 | private AutoFitTextureView mTextureView;
163 |
164 | /**
165 | * A {@link CameraCaptureSession } for camera preview.
166 | */
167 | private CameraCaptureSession mCaptureSession;
168 |
169 | /**
170 | * A reference to the opened {@link CameraDevice}.
171 | */
172 | private CameraDevice mCameraDevice;
173 |
174 | /**
175 | * The {@link android.util.Size} of camera preview.
176 | */
177 | private Size mPreviewSize;
178 |
179 | /**
180 | * {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state.
181 | */
182 | private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
183 |
184 | @Override
185 | public void onOpened(@NonNull CameraDevice cameraDevice) {
186 | // This method is called when the camera is opened. We start camera preview here.
187 | mCameraOpenCloseLock.release();
188 | mCameraDevice = cameraDevice;
189 | createCameraPreviewSession();
190 | }
191 |
192 | @Override
193 | public void onDisconnected(@NonNull CameraDevice cameraDevice) {
194 | mCameraOpenCloseLock.release();
195 | cameraDevice.close();
196 | mCameraDevice = null;
197 | }
198 |
199 | @Override
200 | public void onError(@NonNull CameraDevice cameraDevice, int error) {
201 | mCameraOpenCloseLock.release();
202 | cameraDevice.close();
203 | mCameraDevice = null;
204 | Activity activity = getActivity();
205 | if (null != activity) {
206 | activity.finish();
207 | }
208 | }
209 |
210 | };
211 |
212 | /**
213 | * An additional thread for running tasks that shouldn't block the UI.
214 | */
215 | private HandlerThread mBackgroundThread;
216 |
217 | /**
218 | * A {@link Handler} for running tasks in the background.
219 | */
220 | private Handler mBackgroundHandler;
221 |
222 | /**
223 | * An {@link ImageReader} that handles still image capture.
224 | */
225 | private ImageReader mImageReader;
226 |
227 | /**
228 | * This is the output file for our picture.
229 | */
230 | private File mFile;
231 |
232 | /**
233 | * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
234 | * still image is ready to be saved.
235 | */
236 | private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
237 | = new ImageReader.OnImageAvailableListener() {
238 |
239 | @Override
240 | public void onImageAvailable(ImageReader reader) {
241 | mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(), mFile));
242 | }
243 |
244 | };
245 |
246 | /**
247 | * {@link CaptureRequest.Builder} for the camera preview
248 | */
249 | private CaptureRequest.Builder mPreviewRequestBuilder;
250 |
251 | /**
252 | * {@link CaptureRequest} generated by {@link #mPreviewRequestBuilder}
253 | */
254 | private CaptureRequest mPreviewRequest;
255 |
256 | /**
257 | * The current state of camera state for taking pictures.
258 | *
259 | * @see #mCaptureCallback
260 | */
261 | private int mState = STATE_PREVIEW;
262 |
263 | /**
264 | * A {@link Semaphore} to prevent the app from exiting before closing the camera.
265 | */
266 | private Semaphore mCameraOpenCloseLock = new Semaphore(1);
267 |
268 | /**
269 | * Whether the current camera device supports Flash or not.
270 | */
271 | private boolean mFlashSupported;
272 |
273 | /**
274 | * Orientation of the camera sensor
275 | */
276 | private int mSensorOrientation;
277 |
278 | /**
279 | * A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture.
280 | */
281 | private CameraCaptureSession.CaptureCallback mCaptureCallback
282 | = new CameraCaptureSession.CaptureCallback() {
283 |
284 | private void process(CaptureResult result) {
285 | switch (mState) {
286 | case STATE_PREVIEW: {
287 | // We have nothing to do when the camera preview is working normally.
288 | break;
289 | }
290 | case STATE_WAITING_LOCK: {
291 | Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
292 | if (afState == null) {
293 | captureStillPicture();
294 | } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
295 | CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
296 | // CONTROL_AE_STATE can be null on some devices
297 | Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
298 | if (aeState == null ||
299 | aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
300 | mState = STATE_PICTURE_TAKEN;
301 | captureStillPicture();
302 | } else {
303 | runPrecaptureSequence();
304 | }
305 | }
306 | break;
307 | }
308 | case STATE_WAITING_PRECAPTURE: {
309 | // CONTROL_AE_STATE can be null on some devices
310 | Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
311 | if (aeState == null ||
312 | aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
313 | aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
314 | mState = STATE_WAITING_NON_PRECAPTURE;
315 | }
316 | break;
317 | }
318 | case STATE_WAITING_NON_PRECAPTURE: {
319 | // CONTROL_AE_STATE can be null on some devices
320 | Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
321 | if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
322 | mState = STATE_PICTURE_TAKEN;
323 | captureStillPicture();
324 | }
325 | break;
326 | }
327 | }
328 | }
329 |
330 | @Override
331 | public void onCaptureProgressed(@NonNull CameraCaptureSession session,
332 | @NonNull CaptureRequest request,
333 | @NonNull CaptureResult partialResult) {
334 | process(partialResult);
335 | }
336 |
337 | @Override
338 | public void onCaptureCompleted(@NonNull CameraCaptureSession session,
339 | @NonNull CaptureRequest request,
340 | @NonNull TotalCaptureResult result) {
341 | process(result);
342 | }
343 |
344 | };
345 |
346 | /**
347 | * Shows a {@link Toast} on the UI thread.
348 | *
349 | * @param text The message to show
350 | */
351 | private void showToast(final String text) {
352 | final Activity activity = getActivity();
353 | if (activity != null) {
354 | activity.runOnUiThread(new Runnable() {
355 | @Override
356 | public void run() {
357 | Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
358 | }
359 | });
360 | }
361 | }
362 |
363 | /**
364 | * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that
365 | * is at least as large as the respective texture view size, and that is at most as large as the
366 | * respective max size, and whose aspect ratio matches with the specified value. If such size
367 | * doesn't exist, choose the largest one that is at most as large as the respective max size,
368 | * and whose aspect ratio matches with the specified value.
369 | *
370 | * @param choices The list of sizes that the camera supports for the intended output
371 | * class
372 | * @param textureViewWidth The width of the texture view relative to sensor coordinate
373 | * @param textureViewHeight The height of the texture view relative to sensor coordinate
374 | * @param maxWidth The maximum width that can be chosen
375 | * @param maxHeight The maximum height that can be chosen
376 | * @param aspectRatio The aspect ratio
377 | * @return The optimal {@code Size}, or an arbitrary one if none were big enough
378 | */
379 | private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
380 | int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
381 |
382 | // Collect the supported resolutions that are at least as big as the preview Surface
383 | List bigEnough = new ArrayList<>();
384 | // Collect the supported resolutions that are smaller than the preview Surface
385 | List notBigEnough = new ArrayList<>();
386 | int w = aspectRatio.getWidth();
387 | int h = aspectRatio.getHeight();
388 | for (Size option : choices) {
389 | if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
390 | option.getHeight() == option.getWidth() * h / w) {
391 | if (option.getWidth() >= textureViewWidth &&
392 | option.getHeight() >= textureViewHeight) {
393 | bigEnough.add(option);
394 | } else {
395 | notBigEnough.add(option);
396 | }
397 | }
398 | }
399 |
400 | // Pick the smallest of those big enough. If there is no one big enough, pick the
401 | // largest of those not big enough.
402 | if (bigEnough.size() > 0) {
403 | return Collections.min(bigEnough, new CompareSizesByArea());
404 | } else if (notBigEnough.size() > 0) {
405 | return Collections.max(notBigEnough, new CompareSizesByArea());
406 | } else {
407 | Logger.e(TAG, "Couldn't find any suitable preview size");
408 | return choices[0];
409 | }
410 | }
411 |
412 | public static CameraPreviewFragment newInstance() {
413 | return new CameraPreviewFragment();
414 | }
415 |
416 | @Override
417 | public View onCreateView(LayoutInflater inflater, ViewGroup container,
418 | Bundle savedInstanceState) {
419 | return inflater.inflate(R.layout.fragment_camera_preview, container, false);
420 | }
421 |
422 | @Override
423 | public void onViewCreated(final View view, Bundle savedInstanceState) {
424 | view.findViewById(R.id.picture).setOnClickListener(this);
425 | mTextureView = view.findViewById(R.id.texture);
426 | }
427 |
428 | @Override
429 | public void onActivityCreated(Bundle savedInstanceState) {
430 | super.onActivityCreated(savedInstanceState);
431 | mFile = new File(getActivity().getExternalFilesDir(null), "pic.jpg");
432 | }
433 |
434 | @Override
435 | public void onResume() {
436 | super.onResume();
437 | startBackgroundThread();
438 |
439 | // When the screen is turned off and turned back on, the SurfaceTexture is already
440 | // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
441 | // a camera and start preview from here (otherwise, we wait until the surface is ready in
442 | // the SurfaceTextureListener).
443 | if (mTextureView.isAvailable()) {
444 | openCamera(mTextureView.getWidth(), mTextureView.getHeight());
445 | } else {
446 | mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
447 | }
448 | }
449 |
450 | @Override
451 | public void onPause() {
452 | closeCamera();
453 | stopBackgroundThread();
454 | super.onPause();
455 | }
456 |
457 | private void requestCameraPermission() {
458 | if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
459 | new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
460 | } else {
461 | requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
462 | }
463 | }
464 |
465 | @Override
466 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
467 | @NonNull int[] grantResults) {
468 | if (requestCode == REQUEST_CAMERA_PERMISSION) {
469 | if (grantResults.length != 1 || grantResults[0] != PackageManager.PERMISSION_GRANTED) {
470 | ErrorDialog.newInstance(getString(R.string.request_permission))
471 | .show(getChildFragmentManager(), FRAGMENT_DIALOG);
472 | }
473 | } else {
474 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
475 | }
476 | }
477 |
478 | /**
479 | * Sets up member variables related to camera.
480 | *
481 | * @param width The width of available size for camera preview
482 | * @param height The height of available size for camera preview
483 | */
484 | @SuppressWarnings("SuspiciousNameCombination")
485 | private void setUpCameraOutputs(int width, int height) {
486 | Activity activity = getActivity();
487 | CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
488 | try {
489 | for (String cameraId : manager.getCameraIdList()) {
490 | CameraCharacteristics characteristics
491 | = manager.getCameraCharacteristics(cameraId);
492 |
493 | // We don't use a front facing camera in this sample.
494 | Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
495 | if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
496 | continue;
497 | }
498 |
499 | StreamConfigurationMap map = characteristics.get(
500 | CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
501 | if (map == null) {
502 | continue;
503 | }
504 |
505 | // For still image captures, we use the largest available size.
506 | Size largest = Collections.max(
507 | Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
508 | new CompareSizesByArea());
509 | mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
510 | ImageFormat.JPEG, /*maxImages*/2);
511 | mImageReader.setOnImageAvailableListener(
512 | mOnImageAvailableListener, mBackgroundHandler);
513 |
514 | // Find out if we need to swap dimension to get the preview size relative to sensor
515 | // coordinate.
516 | int displayRotation = Surface.ROTATION_90;//activity.getWindowManager().getDefaultDisplay().getRotation();
517 | //noinspection ConstantConditions
518 | mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
519 | boolean swappedDimensions = false;
520 | switch (displayRotation) {
521 | case Surface.ROTATION_0:
522 | case Surface.ROTATION_180:
523 | if (mSensorOrientation == 90 || mSensorOrientation == 270) {
524 | swappedDimensions = true;
525 | }
526 | break;
527 | case Surface.ROTATION_90:
528 | case Surface.ROTATION_270:
529 | if (mSensorOrientation == 0 || mSensorOrientation == 180) {
530 | swappedDimensions = true;
531 | }
532 | break;
533 | default:
534 | Logger.e(TAG, "Display rotation is invalid: " + displayRotation);
535 | }
536 |
537 | Point displaySize = new Point();
538 | activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
539 | int rotatedPreviewWidth = width;
540 | int rotatedPreviewHeight = height;
541 | int maxPreviewWidth = displaySize.x;
542 | int maxPreviewHeight = displaySize.y;
543 |
544 | if (swappedDimensions) {
545 | rotatedPreviewWidth = height;
546 | rotatedPreviewHeight = width;
547 | maxPreviewWidth = displaySize.y;
548 | maxPreviewHeight = displaySize.x;
549 | }
550 |
551 | if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
552 | maxPreviewWidth = MAX_PREVIEW_WIDTH;
553 | }
554 |
555 | if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
556 | maxPreviewHeight = MAX_PREVIEW_HEIGHT;
557 | }
558 |
559 | // Danger, W.R.! Attempting to use too large a preview size could exceed the camera
560 | // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
561 | // garbage capture data.
562 | mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
563 | rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
564 | maxPreviewHeight, largest);
565 |
566 | // We fit the aspect ratio of TextureView to the size of preview we picked.
567 | int orientation = getResources().getConfiguration().orientation;
568 | if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
569 | mTextureView.setAspectRatio(
570 | mPreviewSize.getWidth(), mPreviewSize.getHeight());
571 | } else {
572 | mTextureView.setAspectRatio(
573 | mPreviewSize.getHeight(), mPreviewSize.getWidth());
574 | }
575 |
576 | // Check if the flash is supported.
577 | Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
578 | mFlashSupported = available == null ? false : available;
579 |
580 | mCameraId = cameraId;
581 | return;
582 | }
583 | } catch (CameraAccessException e) {
584 | e.printStackTrace();
585 | } catch (NullPointerException e) {
586 | // Currently an NPE is thrown when the Camera2API is used but not supported on the
587 | // device this code runs.
588 | ErrorDialog.newInstance(getString(R.string.camera_error))
589 | .show(getChildFragmentManager(), FRAGMENT_DIALOG);
590 | }
591 | }
592 |
593 | /**
594 | * Opens the camera specified by {@link CameraPreviewFragment#mCameraId}.
595 | */
596 | private void openCamera(int width, int height) {
597 | if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA)
598 | != PackageManager.PERMISSION_GRANTED) {
599 | requestCameraPermission();
600 | return;
601 | }
602 | setUpCameraOutputs(width, height);
603 | configureTransform(width, height);
604 | Activity activity = getActivity();
605 | CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
606 | try {
607 | if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
608 | throw new RuntimeException("Time out waiting to lock camera opening.");
609 | }
610 | manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
611 | } catch (CameraAccessException e) {
612 | e.printStackTrace();
613 | } catch (InterruptedException e) {
614 | throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
615 | }
616 | }
617 |
618 | /**
619 | * Closes the current {@link CameraDevice}.
620 | */
621 | private void closeCamera() {
622 | try {
623 | mCameraOpenCloseLock.acquire();
624 | if (null != mCaptureSession) {
625 | mCaptureSession.close();
626 | mCaptureSession = null;
627 | }
628 | if (null != mCameraDevice) {
629 | mCameraDevice.close();
630 | mCameraDevice = null;
631 | }
632 | if (null != mImageReader) {
633 | mImageReader.close();
634 | mImageReader = null;
635 | }
636 | } catch (InterruptedException e) {
637 | throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
638 | } finally {
639 | mCameraOpenCloseLock.release();
640 | }
641 | }
642 |
643 | /**
644 | * Starts a background thread and its {@link Handler}.
645 | */
646 | private void startBackgroundThread() {
647 | mBackgroundThread = new HandlerThread("CameraBackground");
648 | mBackgroundThread.start();
649 | mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
650 | }
651 |
652 | /**
653 | * Stops the background thread and its {@link Handler}.
654 | */
655 | private void stopBackgroundThread() {
656 | mBackgroundThread.quitSafely();
657 | try {
658 | mBackgroundThread.join();
659 | mBackgroundThread = null;
660 | mBackgroundHandler = null;
661 | } catch (InterruptedException e) {
662 | e.printStackTrace();
663 | }
664 | }
665 |
666 | /**
667 | * Creates a new {@link CameraCaptureSession} for camera preview.
668 | */
669 | private void createCameraPreviewSession() {
670 | try {
671 | SurfaceTexture texture = mTextureView.getSurfaceTexture();
672 | assert texture != null;
673 |
674 | // We configure the size of default buffer to be the size of camera preview we want.
675 | texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
676 |
677 | // This is the output Surface we need to start preview.
678 | Surface surface = new Surface(texture);
679 |
680 | // We set up a CaptureRequest.Builder with the output Surface.
681 | mPreviewRequestBuilder
682 | = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
683 | mPreviewRequestBuilder.addTarget(surface);
684 |
685 | // Here, we create a CameraCaptureSession for camera preview.
686 | mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
687 | new CameraCaptureSession.StateCallback() {
688 |
689 | @Override
690 | public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
691 | // The camera is already closed
692 | if (null == mCameraDevice) {
693 | return;
694 | }
695 |
696 | // When the session is ready, we start displaying the preview.
697 | mCaptureSession = cameraCaptureSession;
698 | try {
699 | // Auto focus should be continuous for camera preview.
700 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
701 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
702 | // Flash is automatically enabled when necessary.
703 | setAutoFlash(mPreviewRequestBuilder);
704 |
705 | // Finally, we start displaying the camera preview.
706 | mPreviewRequest = mPreviewRequestBuilder.build();
707 | mCaptureSession.setRepeatingRequest(mPreviewRequest,
708 | mCaptureCallback, mBackgroundHandler);
709 | } catch (CameraAccessException e) {
710 | e.printStackTrace();
711 | }
712 | }
713 |
714 | @Override
715 | public void onConfigureFailed(
716 | @NonNull CameraCaptureSession cameraCaptureSession) {
717 | showToast("Failed");
718 | }
719 | }, null
720 | );
721 | } catch (CameraAccessException e) {
722 | e.printStackTrace();
723 | }
724 | }
725 |
726 | /**
727 | * Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`.
728 | * This method should be called after the camera preview size is determined in
729 | * setUpCameraOutputs and also the size of `mTextureView` is fixed.
730 | *
731 | * @param viewWidth The width of `mTextureView`
732 | * @param viewHeight The height of `mTextureView`
733 | */
734 | private void configureTransform(int viewWidth, int viewHeight) {
735 | Activity activity = getActivity();
736 | if (null == mTextureView || null == mPreviewSize || null == activity) {
737 | return;
738 | }
739 | int rotation = Surface.ROTATION_90;//activity.getWindowManager().getDefaultDisplay().getRotation();
740 | Matrix matrix = new Matrix();
741 | RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
742 | RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
743 | float centerX = viewRect.centerX();
744 | float centerY = viewRect.centerY();
745 | if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
746 | bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
747 | matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
748 | float scale = Math.max(
749 | (float) viewHeight / mPreviewSize.getHeight(),
750 | (float) viewWidth / mPreviewSize.getWidth());
751 | matrix.postScale(scale, scale, centerX, centerY);
752 | matrix.postRotate(90 * (rotation - 2), centerX, centerY);
753 | } else if (Surface.ROTATION_180 == rotation) {
754 | matrix.postRotate(180, centerX, centerY);
755 | }
756 | mTextureView.setTransform(matrix);
757 | }
758 |
759 | /**
760 | * Initiate a still image capture.
761 | */
762 | private void takePicture() {
763 | lockFocus();
764 | }
765 |
766 | /**
767 | * Lock the focus as the first step for a still image capture.
768 | */
769 | private void lockFocus() {
770 | try {
771 | // This is how to tell the camera to lock focus.
772 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
773 | CameraMetadata.CONTROL_AF_TRIGGER_START);
774 | // Tell #mCaptureCallback to wait for the lock.
775 | mState = STATE_WAITING_LOCK;
776 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
777 | mBackgroundHandler);
778 | } catch (CameraAccessException e) {
779 | e.printStackTrace();
780 | }
781 | }
782 |
783 | /**
784 | * Run the precapture sequence for capturing a still image. This method should be called when
785 | * we get a response in {@link #mCaptureCallback} from {@link #lockFocus()}.
786 | */
787 | private void runPrecaptureSequence() {
788 | try {
789 | // This is how to tell the camera to trigger.
790 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
791 | CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
792 | // Tell #mCaptureCallback to wait for the precapture sequence to be set.
793 | mState = STATE_WAITING_PRECAPTURE;
794 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
795 | mBackgroundHandler);
796 | } catch (CameraAccessException e) {
797 | e.printStackTrace();
798 | }
799 | }
800 |
801 | /**
802 | * Capture a still picture. This method should be called when we get a response in
803 | * {@link #mCaptureCallback} from both {@link #lockFocus()}.
804 | */
805 | private void captureStillPicture() {
806 | try {
807 | final Activity activity = getActivity();
808 | if (null == activity || null == mCameraDevice) {
809 | return;
810 | }
811 | // This is the CaptureRequest.Builder that we use to take a picture.
812 | final CaptureRequest.Builder captureBuilder =
813 | mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
814 | captureBuilder.addTarget(mImageReader.getSurface());
815 |
816 | // Use the same AE and AF modes as the preview.
817 | captureBuilder.set(CaptureRequest.CONTROL_AF_MODE,
818 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
819 | setAutoFlash(captureBuilder);
820 |
821 | // Orientation
822 | int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
823 | captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
824 |
825 | CameraCaptureSession.CaptureCallback CaptureCallback
826 | = new CameraCaptureSession.CaptureCallback() {
827 |
828 | @Override
829 | public void onCaptureCompleted(@NonNull CameraCaptureSession session,
830 | @NonNull CaptureRequest request,
831 | @NonNull TotalCaptureResult result) {
832 | //showToast("Saved: " + mFile);
833 | Logger.d(TAG, mFile.toString());
834 | unlockFocus();
835 | goToSegmentedImageFragment();
836 |
837 | }
838 | };
839 |
840 | mCaptureSession.stopRepeating();
841 | mCaptureSession.abortCaptures();
842 | mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
843 | } catch (CameraAccessException e) {
844 | e.printStackTrace();
845 | }
846 | }
847 |
848 | /**
849 | * Retrieves the JPEG orientation from the specified screen rotation.
850 | *
851 | * @param rotation The screen rotation.
852 | * @return The JPEG orientation (one of 0, 90, 270, and 360)
853 | */
854 | private int getOrientation(int rotation) {
855 | // Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X)
856 | // We have to take that into account and rotate JPEG properly.
857 | // For devices with orientation of 90, we simply return our mapping from ORIENTATIONS.
858 | // For devices with orientation of 270, we need to rotate the JPEG 180 degrees.
859 | return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
860 | }
861 |
862 | /**
863 | * Unlock the focus. This method should be called when still image capture sequence is
864 | * finished.
865 | */
866 | private void unlockFocus() {
867 | try {
868 | // Reset the auto-focus trigger
869 | mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
870 | CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
871 | setAutoFlash(mPreviewRequestBuilder);
872 | mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback,
873 | mBackgroundHandler);
874 | // After this, the camera will go back to the normal state of preview.
875 | mState = STATE_PREVIEW;
876 | mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback,
877 | mBackgroundHandler);
878 | } catch (CameraAccessException e) {
879 | e.printStackTrace();
880 | }
881 | }
882 |
883 | @Override
884 | public void onClick(View view) {
885 | switch (view.getId()) {
886 | case R.id.picture: {
887 | takePicture();
888 | break;
889 | }
890 | }
891 | }
892 |
893 | private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
894 | if (mFlashSupported) {
895 | requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
896 | CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
897 | }
898 | }
899 |
900 | /**
901 | * Saves a JPEG {@link Image} into the specified {@link File}.
902 | */
903 | private static class ImageSaver implements Runnable {
904 |
905 | /**
906 | * The JPEG image
907 | */
908 | private final Image mImage;
909 | /**
910 | * The file we save the image into.
911 | */
912 | private final File mFile;
913 |
914 | ImageSaver(Image image, File file) {
915 | mImage = image;
916 | mFile = file;
917 | }
918 |
919 | @Override
920 | public void run() {
921 | ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
922 | byte[] bytes = new byte[buffer.remaining()];
923 | buffer.get(bytes);
924 | FileOutputStream output = null;
925 | try {
926 | output = new FileOutputStream(mFile);
927 | output.write(bytes);
928 | } catch (IOException e) {
929 | e.printStackTrace();
930 | } finally {
931 | mImage.close();
932 | if (null != output) {
933 | try {
934 | output.close();
935 | } catch (IOException e) {
936 | e.printStackTrace();
937 | }
938 | }
939 | }
940 | }
941 |
942 | }
943 |
944 | /**
945 | * Compares two {@code Size}s based on their areas.
946 | */
947 | static class CompareSizesByArea implements Comparator {
948 |
949 | @Override
950 | public int compare(Size lhs, Size rhs) {
951 | // We cast here to ensure the multiplications won't overflow
952 | return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
953 | (long) rhs.getWidth() * rhs.getHeight());
954 | }
955 |
956 | }
957 |
958 | /**
959 | * Shows an error message dialog.
960 | */
961 | public static class ErrorDialog extends DialogFragment {
962 |
963 | private static final String ARG_MESSAGE = "message";
964 |
965 | public static ErrorDialog newInstance(String message) {
966 | ErrorDialog dialog = new ErrorDialog();
967 | Bundle args = new Bundle();
968 | args.putString(ARG_MESSAGE, message);
969 | dialog.setArguments(args);
970 | return dialog;
971 | }
972 |
973 | @NonNull
974 | @Override
975 | public Dialog onCreateDialog(Bundle savedInstanceState) {
976 | final Activity activity = getActivity();
977 | return new AlertDialog.Builder(activity)
978 | .setMessage(getArguments().getString(ARG_MESSAGE))
979 | .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
980 | @Override
981 | public void onClick(DialogInterface dialogInterface, int i) {
982 | activity.finish();
983 | }
984 | })
985 | .create();
986 | }
987 |
988 | }
989 |
990 | /**
991 | * Shows OK/Cancel confirmation dialog about camera permission.
992 | */
993 | public static class ConfirmationDialog extends DialogFragment {
994 |
995 | @NonNull
996 | @Override
997 | public Dialog onCreateDialog(Bundle savedInstanceState) {
998 | final Fragment parent = getParentFragment();
999 | return new AlertDialog.Builder(getActivity())
1000 | .setMessage(R.string.request_permission)
1001 | .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
1002 | @Override
1003 | public void onClick(DialogInterface dialog, int which) {
1004 | parent.requestPermissions(new String[]{Manifest.permission.CAMERA},
1005 | REQUEST_CAMERA_PERMISSION);
1006 | }
1007 | })
1008 | .setNegativeButton(android.R.string.cancel,
1009 | new DialogInterface.OnClickListener() {
1010 | @Override
1011 | public void onClick(DialogInterface dialog, int which) {
1012 | Activity activity = parent.getActivity();
1013 | if (activity != null) {
1014 | activity.finish();
1015 | }
1016 | }
1017 | })
1018 | .create();
1019 | }
1020 | }
1021 |
1022 | public void goToSegmentedImageFragment() {
1023 | String stringUri = mFile.getAbsolutePath();
1024 | Bundle args = new Bundle();
1025 | args.putString(BUNDLE_KEY_IMAGE, stringUri);
1026 | Fragment fragment = new ImageSegmentationFragment();
1027 | fragment.setArguments(args);
1028 | FragmentManager fragmentManager = getActivity().getSupportFragmentManager();
1029 | FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
1030 | fragmentTransaction.replace(R.id.container, fragment);
1031 | fragmentTransaction.addToBackStack(null);
1032 | fragmentTransaction.commit();
1033 |
1034 | }
1035 |
1036 | }
1037 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Fragments/ImageSegmentationFragment.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation.Fragments;
2 |
3 |
4 | import android.app.ProgressDialog;
5 | import android.graphics.Bitmap;
6 | import android.graphics.BitmapFactory;
7 | import android.os.AsyncTask;
8 | import android.os.Bundle;
9 | import android.support.v4.app.Fragment;
10 | import android.support.v7.app.AppCompatActivity;
11 | import android.support.v7.widget.Toolbar;
12 | import android.view.LayoutInflater;
13 | import android.view.MenuItem;
14 | import android.view.View;
15 | import android.view.ViewGroup;
16 | import android.widget.ImageView;
17 | import android.widget.TextView;
18 |
19 | import com.qdn.segmentation.Utils.Constants;
20 | import com.qualcomm.qti.snpe.NeuralNetwork;
21 | import com.qdn.segmentation.Activity.MainActivity;
22 | import com.qdn.segmentation.Helpers.SNPEHelper;
23 | import com.qdn.segmentation.Interfaces.IBitmapLoader;
24 | import com.qdn.segmentation.R;
25 | import com.qdn.segmentation.tasks.SegmentImageTask;
26 |
27 | public class ImageSegmentationFragment extends Fragment implements IBitmapLoader {
28 |
29 | private ImageView mImageViewSegmented;
30 | private SNPEHelper mSNPEHelper;
31 | private Bitmap mBitmap;
32 | private IBitmapLoader mCallBackBitmapLoader;
33 | private ProgressDialog mProgressDialog;
34 | private AppCompatActivity mActivity;
35 | private Toolbar mToolbar;
36 | private TextView mTextViewImageType;
37 |
38 | @Override
39 | public View onCreateView(LayoutInflater inflater, ViewGroup container,
40 | Bundle savedInstanceState) {
41 | // Inflate the layout for this fragment
42 | return inflater.inflate(R.layout.fragment_image_segmentation, container, false);
43 | }
44 |
45 |
46 | @Override
47 | public void onStart() {
48 | super.onStart();
49 | mSNPEHelper = new SNPEHelper(getActivity().getApplication());
50 | initViews();
51 | initToolbar();
52 | String imgPath = getArguments().getString(Constants.BUNDLE_KEY_IMAGE);
53 | if (imgPath != null) {
54 | mBitmap = BitmapFactory.decodeFile(imgPath);
55 | mImageViewSegmented.setImageBitmap(mBitmap);
56 | mTextViewImageType.setText(getString(R.string.original_image));
57 | mProgressDialog = ProgressDialog.show(mActivity, getString(R.string.dialog_image_segmentation),
58 | getString(R.string.dialog_segmentation_process), true);
59 | mProgressDialog.show();
60 | mSNPEHelper.loadSegmentImageTask(getContext(), mSNPEHelper.getNeuralNetwork(), mBitmap, mCallBackBitmapLoader);
61 | }
62 | }
63 |
64 | private void initToolbar() {
65 | mToolbar = (Toolbar) mActivity.findViewById(R.id.toolbar_home);
66 | setHasOptionsMenu(true);
67 | mActivity.setSupportActionBar(mToolbar);
68 | mActivity.getSupportActionBar().setTitle(R.string.image_segmentation);
69 | mActivity.getSupportActionBar().setDisplayHomeAsUpEnabled(true);
70 | mActivity.getSupportActionBar().setDisplayShowHomeEnabled(true);
71 | }
72 |
73 | @Override
74 | public boolean onOptionsItemSelected(MenuItem item) {
75 | switch (item.getItemId()) {
76 | case android.R.id.home:
77 | mActivity.onBackPressed();
78 | }
79 | return super.onOptionsItemSelected(item);
80 | }
81 |
82 |
83 | private void initViews() {
84 | mActivity = (AppCompatActivity) getActivity();
85 | mCallBackBitmapLoader = this;
86 | mImageViewSegmented = (ImageView) mActivity.findViewById(R.id.image_view_segment);
87 | mTextViewImageType = (TextView) mActivity.findViewById(R.id.textView_imagetype);
88 | mSNPEHelper = MainActivity.mSNPEHelper;
89 | }
90 |
91 |
92 | @Override
93 | public void loadResultBitmap(Bitmap bitmap) {
94 | mProgressDialog.dismiss();
95 | mImageViewSegmented.setImageBitmap(bitmap);
96 | mTextViewImageType.setText(getString(R.string.segmented_image));
97 | }
98 | }
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Helpers/AutoFitTextureView.java:
--------------------------------------------------------------------------------
1 |
2 | package com.qdn.segmentation.Helpers;
3 |
4 | import android.content.Context;
5 | import android.util.AttributeSet;
6 | import android.view.TextureView;
7 |
8 | /**
9 | * A {@link TextureView} that can be adjusted to a specified aspect ratio.
10 | */
11 | public class AutoFitTextureView extends TextureView {
12 |
13 | private int mRatioWidth = 0;
14 | private int mRatioHeight = 0;
15 |
16 | public AutoFitTextureView(Context context) {
17 | this(context, null);
18 | }
19 |
20 | public AutoFitTextureView(Context context, AttributeSet attrs) {
21 | this(context, attrs, 0);
22 | }
23 |
24 | public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
25 | super(context, attrs, defStyle);
26 | }
27 |
28 | /**
29 | * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
30 | * calculated from the parameters. Note that the actual sizes of parameters don't matter, that
31 | * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
32 | *
33 | * @param width Relative horizontal size
34 | * @param height Relative vertical size
35 | */
36 | public void setAspectRatio(int width, int height) {
37 | if (width < 0 || height < 0) {
38 | throw new IllegalArgumentException("Size cannot be negative.");
39 | }
40 | mRatioWidth = width;
41 | mRatioHeight = height;
42 | requestLayout();
43 | }
44 |
45 | @Override
46 | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
47 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
48 | int width = MeasureSpec.getSize(widthMeasureSpec);
49 | int height = MeasureSpec.getSize(heightMeasureSpec);
50 | if (0 == mRatioWidth || 0 == mRatioHeight) {
51 | setMeasuredDimension(width, height);
52 | } else {
53 | if (width < height * mRatioWidth / mRatioHeight) {
54 | setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
55 | } else {
56 | setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
57 | }
58 | }
59 | }
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Helpers/BitmapToFloatArrayHelper.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation.Helpers;
2 |
3 | import android.graphics.Bitmap;
4 |
5 |
6 | import com.qdn.segmentation.Utils.Logger;
7 |
8 | import java.nio.ByteBuffer;
9 |
10 | public class BitmapToFloatArrayHelper {
11 |
12 | private static final String TAG = BitmapToFloatArrayHelper.class.getSimpleName();
13 | private ByteBuffer mByteBufferHW4;
14 | private float[] mFloatBufferHW3;
15 | private boolean mIsFloatBufferBlack;
16 |
17 | /**
18 | * This will assume the geometry of both buffers from the first input bitmap.
19 | */
20 | public void bitmapToBuffer(final Bitmap inputBitmap) {
21 | final int inputBitmapBytesSize = inputBitmap.getRowBytes() * inputBitmap.getHeight();
22 | if (mByteBufferHW4 == null || mByteBufferHW4.capacity() != inputBitmapBytesSize) {
23 | mByteBufferHW4 = ByteBuffer.allocate(inputBitmapBytesSize);
24 | mFloatBufferHW3 = new float[1 * inputBitmap.getWidth() * inputBitmap.getHeight() * 3];
25 | }
26 | mByteBufferHW4.rewind();
27 | Logger.d(TAG, "mFloatBufferHW3" + mFloatBufferHW3.length + "");
28 | inputBitmap.copyPixelsToBuffer(mByteBufferHW4);
29 | }
30 |
31 | /**
32 | * This will process pixels RGBA(0..255) to BGR(-1..1)
33 | */
34 | public float[] bufferToNormalFloatsBGR() {
35 | final byte[] inputArrayHW4 = mByteBufferHW4.array();
36 | final int area = mFloatBufferHW3.length / 3;
37 | long sumG = 0;
38 | int srcIdx = 0, dstIdx = 0;
39 | final float inputScale = 0.00784313771874f;
40 | for (int i = 0; i < area; i++) {
41 | // NOTE: the 0xFF a "cast" to unsigned int (otherwise it will be negative numbers for bright colors)
42 | final int pixelR = inputArrayHW4[srcIdx] & 0xFF;
43 | final int pixelG = inputArrayHW4[srcIdx + 1] & 0xFF;
44 | final int pixelB = inputArrayHW4[srcIdx + 2] & 0xFF;
45 | mFloatBufferHW3[dstIdx] = inputScale * (float) pixelB - 1;
46 | mFloatBufferHW3[dstIdx + 1] = inputScale * (float) pixelG - 1;
47 | mFloatBufferHW3[dstIdx + 2] = inputScale * (float) pixelR - 1;
48 | srcIdx += 4;
49 | dstIdx += 3;
50 | sumG += pixelG;
51 | }
52 | // the buffer is black if on average on average Green < 13/255 (aka: 5%)
53 | mIsFloatBufferBlack = sumG < (area * 13);
54 | return mFloatBufferHW3;
55 | }
56 |
57 | public boolean isFloatBufferBlack() {
58 | return mIsFloatBufferBlack;
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Helpers/SNPEHelper.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation.Helpers;
2 |
3 | import android.app.Application;
4 | import android.content.Context;
5 | import android.graphics.Bitmap;
6 | import android.os.AsyncTask;
7 |
8 | import com.qdn.segmentation.Interfaces.IBitmapLoader;
9 | import com.qdn.segmentation.tasks.SegmentImageTask;
10 | import com.qualcomm.qti.snpe.NeuralNetwork;
11 | import com.qdn.segmentation.Activity.MainActivity;
12 | import com.qdn.segmentation.Interfaces.INetworkLoader;
13 | import com.qdn.segmentation.tasks.LoadNetworkTask;
14 |
15 | import java.lang.*;
16 |
17 | public class SNPEHelper {
18 | private static final String TAG = SNPEHelper.class.getSimpleName();
19 | public NeuralNetwork mNeuralnetwork;
20 | BitmapToFloatArrayHelper mBitmapToFloatHelper;
21 |
22 | public SNPEHelper(Application application) {
23 | mBitmapToFloatHelper = new BitmapToFloatArrayHelper();
24 | }
25 |
26 |
27 | public static void loadNetwork(Application mApplication, INetworkLoader mCallbackINetworkLoader) {
28 | LoadNetworkTask mLoadTask = new LoadNetworkTask(mApplication, NeuralNetwork.Runtime.GPU_FLOAT16, MainActivity.SupportedTensorFormat.FLOAT, mCallbackINetworkLoader);
29 | mLoadTask.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR);
30 | }
31 |
32 | public void setNeuralNetwork(NeuralNetwork neuralNetwork) {
33 | this.mNeuralnetwork = neuralNetwork;
34 | }
35 |
36 | public NeuralNetwork getNeuralNetwork() {
37 | return mNeuralnetwork;
38 | }
39 | public static void loadSegmentImageTask(Context context, NeuralNetwork neuralNetwork, Bitmap bitmap, IBitmapLoader iBitmapLoader) {
40 | SegmentImageTask task = new SegmentImageTask(context, neuralNetwork, bitmap, iBitmapLoader);
41 | task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Interfaces/IBitmapLoader.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation.Interfaces;
2 |
3 | import android.graphics.Bitmap;
4 |
5 | public interface IBitmapLoader {
6 | void loadResultBitmap(Bitmap bitmap);
7 | }
8 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Interfaces/INetworkLoader.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation.Interfaces;
2 |
3 | import com.qualcomm.qti.snpe.NeuralNetwork;
4 |
5 | public interface INetworkLoader {
6 | void onNetworkBuilt(NeuralNetwork neuralNetwork);
7 | }
8 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Utils/Constants.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation.Utils;
2 |
3 | import android.Manifest;
4 |
5 | public class Constants {
6 |
7 | //Permission related constants
8 | public static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
9 | public static final int REQUEST_CAMERA_PERMISSION = 1;
10 |
11 | //Image related constants
12 | public static final int BITMAP_WIDTH = 513;
13 | public static final int BITMAP_HEIGHT = 513;
14 |
15 | //Bundle related constants
16 | public static final String BUNDLE_KEY_IMAGE = "Image";
17 |
18 | //SNPE constants
19 | public static final String MNETSSD_OUTPUT_LAYER = "ArgMax:0";
20 | public static final String MNETSSD_INPUT_LAYER = "sub_7:0";
21 | }
22 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/Utils/Logger.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation.Utils;
2 |
3 | import android.util.Log;
4 |
5 | public class Logger {
6 |
7 | private static boolean isLogEnabled = false;
8 |
9 | /**
10 | * Method to show Debug Logs
11 | *
12 | * @param tag
13 | * @param msg
14 | */
15 |
16 | public static void d(String tag, String msg) {
17 | if (isLogEnabled)
18 | Log.d(tag, msg);
19 |
20 | }
21 |
22 |
23 | /**
24 | * Method to show warning Logs
25 | *
26 | * @param tag
27 | * @param msg
28 | */
29 | public static void w(String tag, String msg) {
30 | if (isLogEnabled)
31 | Log.w(tag, msg);
32 |
33 | }
34 |
35 | /**
36 | * Method to show Verbose Logs
37 | *
38 | * @param tag
39 | * @param msg
40 | */
41 | public static void v(String tag, String msg) {
42 | if (isLogEnabled)
43 | Log.v(tag, msg);
44 |
45 | }
46 |
47 | /**
48 | * Method to show Error Logs
49 | *
50 | * @param tag
51 | * @param msg
52 | * @param t
53 | */
54 | public static void e(String tag, String msg, Throwable t) {
55 | if (isLogEnabled)
56 | Log.e(tag, msg, t);
57 |
58 | }
59 |
60 | /**
61 | * Method to show Error Logs
62 | *
63 | * @param tag
64 | * @param msg
65 | */
66 |
67 | public static void e(String tag, String msg) {
68 | if (isLogEnabled)
69 | Log.e(tag, msg);
70 |
71 | }
72 | }
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/tasks/LoadNetworkTask.java:
--------------------------------------------------------------------------------
1 |
2 | package com.qdn.segmentation.tasks;
3 |
4 | import android.app.Application;
5 | import android.os.AsyncTask;
6 | import android.widget.Toast;
7 |
8 | import com.qdn.segmentation.Utils.Logger;
9 | import com.qualcomm.qti.snpe.NeuralNetwork;
10 | import com.qualcomm.qti.snpe.SNPE;
11 | import com.qdn.segmentation.Activity.MainActivity;
12 | import com.qdn.segmentation.Interfaces.INetworkLoader;
13 | import com.qdn.segmentation.R;
14 |
15 | import java.io.File;
16 | import java.io.IOException;
17 | import java.io.InputStream;
18 | public class LoadNetworkTask extends AsyncTask {
19 |
20 | private static final String TAG = LoadNetworkTask.class.getSimpleName();
21 | private final Application mApplication;
22 | private final NeuralNetwork.Runtime mTargetRuntime;
23 | private final MainActivity.SupportedTensorFormat mTensorFormat;
24 | private INetworkLoader mCallbackINetworkLoader;
25 | private InputStream mInputstream;
26 |
27 | public LoadNetworkTask(final Application application,
28 | final NeuralNetwork.Runtime targetRuntime,
29 | final MainActivity.SupportedTensorFormat tensorFormat,
30 | final INetworkLoader iNetworkLoader) {
31 | mApplication = application;
32 | mTargetRuntime = targetRuntime;
33 | mTensorFormat = tensorFormat;
34 | this.mCallbackINetworkLoader = iNetworkLoader;
35 | mInputstream = getFileByResourceId(R.raw.model);
36 |
37 | }
38 |
39 | @Override
40 | protected NeuralNetwork doInBackground(File... params) {
41 | NeuralNetwork network = null;
42 | try {
43 |
44 | final SNPE.NeuralNetworkBuilder builder = new SNPE.NeuralNetworkBuilder(mApplication)
45 | .setDebugEnabled(false)
46 | .setRuntimeOrder(mTargetRuntime)
47 | .setModel(mInputstream, mInputstream.available())
48 | .setCpuFallbackEnabled(true)
49 | .setUseUserSuppliedBuffers(mTensorFormat != MainActivity.SupportedTensorFormat.FLOAT);
50 | network = builder.build();
51 | } catch (IllegalStateException | IOException e) {
52 | Logger.e(TAG, e.getMessage(), e);
53 | }
54 | return network;
55 | }
56 |
57 | @Override
58 | protected void onPostExecute(NeuralNetwork neuralNetwork) {
59 | super.onPostExecute(neuralNetwork);
60 |
61 | if (neuralNetwork != null) {
62 | mCallbackINetworkLoader.onNetworkBuilt(neuralNetwork);
63 | } else
64 | Toast.makeText(mApplication, "Failed Building network !!!", Toast.LENGTH_SHORT).show();
65 |
66 | }
67 |
68 |
69 | private InputStream getFileByResourceId(int id) {
70 | InputStream ins = mApplication.getResources().openRawResource(id);
71 | return ins;
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/java/com/qdn/segmentation/tasks/SegmentImageTask.java:
--------------------------------------------------------------------------------
1 | package com.qdn.segmentation.tasks;
2 |
3 | import android.content.Context;
4 | import android.graphics.Bitmap;
5 | import android.os.AsyncTask;
6 |
7 | import com.qdn.segmentation.Utils.Logger;
8 | import com.qualcomm.qti.snpe.FloatTensor;
9 | import com.qualcomm.qti.snpe.NeuralNetwork;
10 | import com.qdn.segmentation.Helpers.BitmapToFloatArrayHelper;
11 | import com.qdn.segmentation.Utils.Constants;
12 | import com.qdn.segmentation.Interfaces.IBitmapLoader;
13 |
14 | import java.util.HashMap;
15 | import java.util.Map;
16 |
17 | import static com.qdn.segmentation.Utils.Constants.MNETSSD_INPUT_LAYER;
18 | import static com.qdn.segmentation.Utils.Constants.MNETSSD_OUTPUT_LAYER;
19 |
20 | public class SegmentImageTask extends AsyncTask {
21 |
22 | private static String TAG = SegmentImageTask.class.getSimpleName();
23 | private static int MNETSSD_NUM_BOXES = Constants.BITMAP_WIDTH * Constants.BITMAP_WIDTH;
24 | private final float[] floatOutput = new float[MNETSSD_NUM_BOXES];
25 | private Map mOutputs;
26 | private BitmapToFloatArrayHelper mBitmapToFloatHelper;
27 |
28 |
29 | public int originalBitmapW, originalBitmapH;
30 | private int[] mInputTensorShapeHWC;
31 | private FloatTensor mInputTensorReused;
32 | private Map mInputTensorsMap;
33 | private NeuralNetwork mNeuralnetwork;
34 | private Bitmap mScaledBitmap, mOutputBitmap;
35 | private IBitmapLoader mCallbackBitmapLoader;
36 |
37 | public int getInputTensorWidth() {
38 | return mInputTensorShapeHWC == null ? 0 : mInputTensorShapeHWC[1];
39 | }
40 |
41 | public int getInputTensorHeight() {
42 | return mInputTensorShapeHWC == null ? 0 : mInputTensorShapeHWC[2];
43 | }
44 |
45 |
46 | public SegmentImageTask(final Context context,
47 | final NeuralNetwork neuralNetwork,
48 | final Bitmap bitmap,
49 | final IBitmapLoader iBitmapLoader) {
50 | this.mNeuralnetwork = neuralNetwork;
51 | this.mCallbackBitmapLoader = iBitmapLoader;
52 | mBitmapToFloatHelper = new BitmapToFloatArrayHelper();
53 | originalBitmapH = bitmap.getHeight();
54 | originalBitmapW = bitmap.getWidth();
55 | mScaledBitmap = Bitmap.createScaledBitmap(bitmap, Constants.BITMAP_WIDTH, Constants.BITMAP_HEIGHT, false);
56 | }
57 |
58 | @Override
59 | protected Bitmap doInBackground(Void... params) {
60 | return deeplabV3Inference();
61 | }
62 |
63 | @Override
64 | protected void onPostExecute(Bitmap bitmap) {
65 | super.onPostExecute(bitmap);
66 | Logger.d(TAG, "onPostExecute " + bitmap);
67 | mCallbackBitmapLoader.loadResultBitmap(bitmap);
68 |
69 | }
70 |
71 | public Bitmap deeplabV3Inference() {
72 | try {
73 |
74 | mInputTensorShapeHWC = mNeuralnetwork.getInputTensorsShapes().get(MNETSSD_INPUT_LAYER);
75 | // allocate the single input tensor
76 | mInputTensorReused = mNeuralnetwork.createFloatTensor(mInputTensorShapeHWC);
77 | // add it to the map of inputs, even if it's a single input
78 | mInputTensorsMap = new HashMap<>();
79 | mInputTensorsMap.put(MNETSSD_INPUT_LAYER, mInputTensorReused);
80 | // execute the inference, and get 3 tensors as outputs
81 | mOutputs = inferenceOnBitmap(mScaledBitmap);
82 | if (mOutputs != null) {
83 | MNETSSD_NUM_BOXES = mOutputs.get(MNETSSD_OUTPUT_LAYER).getSize();
84 | // convert tensors to boxes - Note: Optimized to read-all upfront
85 | mOutputs.get(MNETSSD_OUTPUT_LAYER).read(floatOutput, 0, MNETSSD_NUM_BOXES);
86 | //for black/white image
87 | int w = mScaledBitmap.getWidth();
88 | int h = mScaledBitmap.getHeight();
89 | int b = 0xFF;
90 | int out = 0xFF;
91 |
92 | for (int y = 0; y < h; y++) {
93 | for (int x = 0; x < w; x++) {
94 | b = b & mScaledBitmap.getPixel(x, y);
95 | for (int i = 1; i <= 3 && floatOutput[y * w + x] != 15; i++) {
96 | out = out << (8) | b;
97 | }
98 | mScaledBitmap.setPixel(x, y, floatOutput[y * w + x] != 15 ? out : mScaledBitmap.getPixel(x, y));
99 | out = 0xFF;
100 | b = 0xFF;
101 | }
102 | }
103 |
104 | mOutputBitmap = Bitmap.createScaledBitmap(mScaledBitmap, originalBitmapW,
105 | originalBitmapH, true);
106 | Logger.d(TAG, mOutputBitmap.getWidth() + "");
107 | }
108 | } catch (Exception e) {
109 | e.printStackTrace();
110 | }
111 |
112 |
113 | return mOutputBitmap;
114 | }
115 |
116 |
117 |
118 | /* Generic functions, for typical image models */
119 |
120 | private Map inferenceOnBitmap(Bitmap scaledBitmap) {
121 | final Map outputs;
122 |
123 | try {
124 | // safety check
125 | if (mNeuralnetwork == null || mInputTensorReused == null || scaledBitmap.getWidth() != getInputTensorWidth() || scaledBitmap.getHeight() != getInputTensorHeight()) {
126 | Logger.d("SNPEHelper", "No NN loaded, or image size different than tensor size");
127 | return null;
128 | }
129 |
130 | // [0.3ms] Bitmap to RGBA byte array (size: 300*300*3 (RGBA..))
131 | mBitmapToFloatHelper.bitmapToBuffer(scaledBitmap);
132 |
133 | // [2ms] Pre-processing: Bitmap (300,300,4 ints) -> Float Input Tensor (300,300,3 floats)
134 |
135 | final float[] inputFloatsHW3 = mBitmapToFloatHelper.bufferToNormalFloatsBGR();
136 | if (mBitmapToFloatHelper.isFloatBufferBlack())
137 | return null;
138 | mInputTensorReused.write(inputFloatsHW3, 0, inputFloatsHW3.length, 0, 0);
139 | // [31ms on GPU16, 50ms on GPU] execute the inference
140 |
141 | outputs = mNeuralnetwork.execute(mInputTensorsMap);
142 |
143 | } catch (Exception e) {
144 | e.printStackTrace();
145 | Logger.d("SNPEHelper", e.getCause() + "");
146 | return null;
147 | }
148 |
149 | return outputs;
150 | }
151 | }
152 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable-v24/camera.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable-v24/camera.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable-v24/camera_mode.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable-v24/camera_mode.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
12 |
13 |
19 |
22 |
25 |
26 |
27 |
28 |
34 |
35 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/button.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/classification_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
10 |
12 |
14 |
16 |
18 |
20 |
22 |
24 |
26 |
28 |
30 |
32 |
34 |
36 |
38 |
40 |
42 |
44 |
46 |
48 |
50 |
52 |
54 |
56 |
58 |
60 |
62 |
64 |
66 |
68 |
70 |
72 |
74 |
75 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/ic_splash.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable/ic_splash.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/ic_splash1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable/ic_splash1.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/person1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable/person1.jpg
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/person2.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable/person2.jpeg
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/person3.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable/person3.jpg
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/rose.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable/rose.jpg
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/roses.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable/roses.jpg
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/screenshot_segmentation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable/screenshot_segmentation.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/drawable/snapdragon_hdk.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/drawable/snapdragon_hdk.jpg
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/layout/activity_camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
11 |
12 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/layout/fragment_camera_preview.xml:
--------------------------------------------------------------------------------
1 |
3 |
6 |
7 |
12 |
13 |
19 |
20 |
27 |
28 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/layout/fragment_image_segmentation.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
13 |
14 |
22 |
23 |
24 |
25 |
26 |
32 |
33 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/raw/model.dlc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/app/src/main/res/raw/model.dlc
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #03A9F4
4 | #0288D1
5 | #FF9800
6 | #B3E5FC
7 | #212121
8 | #757575
9 | #BDBDBD
10 | #FFFFFF
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 | 16dp
3 | 112dp
4 | 15dp
5 | 5dp
6 | 20dp
7 |
8 |
9 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | Image Segmentation
3 | Take Picture
4 | This sample needs camera permission.
5 | This device doesn\'t support Camera2 API.
6 | Image Segmentation
7 | Segmentation in Process...
8 | Image Segmentation
9 | Original Image
10 | Segmented Image
11 |
12 |
--------------------------------------------------------------------------------
/AndroidApplication/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
8 |
13 |
14 |
18 |
19 |
23 |
24 |
28 |
29 |
30 |
31 |
32 |
37 |
38 |
--------------------------------------------------------------------------------
/AndroidApplication/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | jcenter()
6 | maven {
7 | url 'https://maven.google.com/'
8 | name 'Google'
9 | }
10 | }
11 | dependencies {
12 | classpath 'com.android.tools.build:gradle:3.4.0'
13 | }
14 | }
15 |
16 | allprojects {
17 | repositories {
18 | jcenter()
19 | maven {
20 | url 'https://maven.google.com/'
21 | name 'Google'
22 | }
23 | }
24 | }
25 |
26 | task clean(type: Delete) {
27 | delete rootProject.buildDir
28 | }
--------------------------------------------------------------------------------
/AndroidApplication/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 |
15 |
16 |
--------------------------------------------------------------------------------
/AndroidApplication/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/AndroidApplication/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Tue May 14 13:53:36 IST 2019
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip
7 |
--------------------------------------------------------------------------------
/AndroidApplication/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/AndroidApplication/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/AndroidApplication/output/segmentation.apk:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/AndroidApplication/output/segmentation.apk
--------------------------------------------------------------------------------
/AndroidApplication/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------
/BoardApplication/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | ./idea/caches/build_file_checksums.ser
5 | ./idea/libraries
6 | ./idea/modules.xml
7 | ./idea/workspace.xml
8 | ./idea/tasks.xml
9 | ./idea/assetWizardSettings.xml
10 | ./idea/dictionaries
11 | ./idea/gradle.xml
12 | ./idea/*
13 |
14 |
15 | .DS_Store
16 | /build
17 | /captures
18 | .externalNativeBuild
19 | =======
20 | # Files for the ART/Dalvik VM
21 | *.dex
22 |
23 | # Java class files
24 | *.class
25 |
26 | # Generated files
27 | bin/
28 | gen/
29 | out/
30 |
31 | # Built application files
32 | *.apk
33 | *.ap_
34 |
35 | # Gradle files
36 | .gradle/
37 | build/
38 |
39 | # Local configuration file (sdk path, etc)
40 | local.properties
41 |
42 | # Proguard folder generated
43 | proguard/
44 |
--------------------------------------------------------------------------------
/BoardApplication/README.md:
--------------------------------------------------------------------------------
1 | # DeepLabV3 Board Application
2 |
3 | The application demonstrates the Image semantic segmentation. DeepLabV3 is used in this application, which is an algorithm implemented by Google for semantic segmentation.
4 |
5 | This will classify the object in each pixel from given portrait image and assigns the label to each of them. Image semantic segmentation is helpful in applications like background change, portrait mode, etc.
6 |
7 | To develop a DeepLabV3 model for image segmentation we use Snapdragon mobile platforms(SD835) and Qualcomm Neural Processing SDK.
8 | ## Recommended setup for model training
9 | ### Hardware prerequisite
10 | 1. Intel i5 or greater
11 | 2. NVIDIA 10 series or greater
12 | 3. Ram 16 GB or more
13 |
14 | ### System software requirements
15 | 1. Ubuntu 14.04 LTS or above
16 | 2. Cuda
17 | 3. CuDNN
18 |
19 | ## How to train the model
20 |
21 | Tensorflow’s DeepLab API setup is the prerequisite for training the DeepLabV3 model.
22 | For installation steps follow the below instruction,
23 |
24 | - Execute the following commands for installing the dependencies,
25 |
26 | ```bash
27 | # Run this command from
28 | $ sudo pip install -r dependencies/requirement.txt
29 | ```
30 | - Clone the tensorflow and its model repositories by executing the below commands,
31 |
32 | ```bash
33 | $ git clone https://github.com/tensorflow/tensorflow.git
34 | $ cd tensorflow
35 | $ git clone https://github.com/tensorflow/models.git
36 | ```
37 | - Update the Python Environment setup
38 |
39 | ```bash
40 | $ cd /tensorflow/models/research
41 | $ export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim
42 | ```
43 | Note: The environment setup needed to be made for every new terminal session.
44 |
45 | To avoid updating the environment path for every new session, add above lines at the end of .bashrc file with an absolute path.
46 |
47 | ## Testing the Installation
48 |
49 | Run the below commands to test if installation is successful,
50 | ```bash
51 | $ cd /tensorflow/models/research
52 | $ python deeplab/model_test.py
53 | ```
54 | After completion of installation, execute the below commands to start the training,
55 | ```bash
56 | $ cd /tensorflow/models/research/deeplab
57 | $ sh local_test.sh
58 | ```
59 | Note: By default the iteration count in local_test.sh is 10, if required kindly modify.
60 |
61 | ## How to convert Tensorflow's model into DLC?
62 |
63 | Prerequisites: Qualcomm Neural Processing SDK setup. Use the instructions from the below link to make the setup,
64 |
65 | https://developer.qualcomm.com/software/qualcomm-neural-processing-sdk/getting-started
66 |
67 | - Initialize the environmental variables of Qualcomm Neural Processing SDK with tensorflow.
68 |
69 | - The model is trained using TensorFlow framework and exported to graph file with .pb extension.
70 |
71 | - Once you’ve .pb file convert it into .dlc using the following command:
72 | ```bash
73 | $ snpe-tensorflow-to-dlc –graph deeplabv3_mnv2_pascal_train_aug/frozen_inference_graph.pb -i sub_7 1,513,513,3 --out_node ArgMax --dlc deeplabv3.dlc --allow_unconsumed_nodes
74 | ```
75 |
76 | ## Inference on Ubuntu using Qualcomm Neural Processing SDK
77 |
78 | Qualcomm Neural Processing SDK doesn’t support direct images as an input to the model for inference.
79 |
80 | Neural Processing SDK requires the NumPy array stored as a raw file. In order to run the application in SDK we need to preprocess the input image.
81 |
82 | Following are the details of pre-processing steps followed in src/deep_input_raw.py file,
83 |
84 | (We have used the opencv for pre-processing the image)
85 | 1. Resize the image with the dimensions of 513x513x3.
86 | 2. Pad the smaller dimension with the mean value of 128 to produce an image of 513x513x3.
87 | 3. Convert the image to type float32.
88 | 4. Multiply the image element-wise with 0.00784313771874 and subtract 1.0 respectively.
89 | 5. Store this pre-processed array as a raw file (blob.raw).
90 |
91 | On executing the src/deep_input_raw.py script, blob.raw file is generated.
92 |
93 |
94 | # Procedure to change the background using DeepLab
95 |
96 | Following is the detailed description of steps followed to change the background for a pre-processed image using src/post_process_deeplab_output.py,
97 |
98 | 1. The output (output/Result_0/ArgMax:0.raw) from the previous section is a NumPy array of dimension 513x513x1.
99 | 2. Each element in the array contains the predicted class index of the corresponding pixels for the given input image.
100 | 3. The index number of person is 15.
101 | 4. Read the NumPy array into a data buffer of type float32.
102 | 5. To change the background to grayscale, the pixel values are assigned such that R, G, and B components have identical values.
103 | For example, the pixel values R(123), G(93) and B(49) are modified to R(123), G(123), B(123).
104 | 6. Loop through the NumPy array and change the pixel values in the original resized image (as per step 5 above) other than pixels of class index 15.
105 | 7. Resize the image to the original size.
106 |
107 | post_process_deeplab_output.py script will change the background to grayscale for a pre-processed image.
108 |
109 |
--------------------------------------------------------------------------------
/BoardApplication/converted_model/model.dlc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/globaledgesoft/deeplabv3-application-using-neural-processing-sdk/b70dbe0d33083489ad6de241c929a607850ad52d/BoardApplication/converted_model/model.dlc
--------------------------------------------------------------------------------
/BoardApplication/dependencies/requirement.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | pillow
3 | jupyter
4 | matplotlib
5 | tensorflow-gpu
6 | PrettyTable
--------------------------------------------------------------------------------
/BoardApplication/src/deep_input_raw.py:
--------------------------------------------------------------------------------
1 | '''Qualcomm Neural Processing SDK Doesn’t support direct images as an input for the model for inferencing.
2 | This SDK requires the Numpy array which is stored in raw form on secondary storage.
3 | In order to run the application in Neural Processing SDK we should firstly have to do
4 | some basic image pre-processing to pass the input to the SDK.'''
5 |
6 |
7 | # Library Import
8 | import numpy as np
9 | import cv2
10 | import os
11 | import argparse
12 |
13 |
14 | parser = argparse.ArgumentParser()
15 | parser.add_argument("--img_path", metavar="ImagePath", help="Give image path to
16 | change the background", required=True, type=str)
17 |
18 | args = parser.parse_args()
19 |
20 | class PreprocessingInputImage:
21 | def __init__(self):
22 | self.img_path = args.img_path
23 | if((os.path.isfile(self.img_path)) == False):
24 | print("File not found!")
25 | def preprocess_image(self):
26 | print (self.img_path)
27 | frame = cv2.imread(self.img_path)
28 | frame_resized = cv2.resize(frame,(513,513)) # resize frame for prediction
29 | blob = cv2.dnn.blobFromImage(frame_resized, 0.007843, (513, 513),
30 | (127.5, 127.5, 127.5), swapRB=True)
31 | blob = np.reshape(blob, (1,513,513,3))
32 | np.ndarray.tofile(blob, open('blob.raw','w'))
33 |
34 |
35 | if __name__ == '__main__':
36 | convert_img = PreprocessingInputImage()
37 | convert_img.preprocess_image()
38 |
39 |
40 |
--------------------------------------------------------------------------------
/BoardApplication/src/post_process_deeplab_output.py:
--------------------------------------------------------------------------------
1 | '''Executes the steps to change the background of the original image with the help of raw
2 | input generated by the snpe-tensorflow-to-dlc tool provided in Qualcomm Neural Processing SDK '''
3 |
4 | # Library Import
5 | import numpy as np
6 | import cv2
7 | import os
8 | import argparse
9 |
10 | parser = argparse.ArgumentParser()
11 | parser.add_argument("--output_raw", metavar="RawOutput", help="Provide the output generated raw file
12 | presents in output/Result_0 directory after running snpe-net-run", required=True, type=str)
13 | parser.add_argument("--img_file_path", metavar="Imagepath", help="Give original image file path to
14 | post-process(Background changing) the output of snpe-net-run", required=True, type=str)
15 |
16 | args = parser.parse_args()
17 |
18 |
19 | class ChangeBackGroundImage:
20 | def __init__(self):
21 | self.img_path = args.img_file_path
22 | self.raw_input = args.output_raw
23 | if((os.path.isfile(self.img_path)) == False):
24 | print("File not found!")
25 | sys.exit()
26 | if((os.path.isfile(self.raw_input)) == False):
27 | print("File not found!")
28 | sys.exit()
29 |
30 |
31 | def change_background(self):
32 | arr = np.fromfile(open(self.raw_input, 'r'), dtype="float32")
33 | arr = np.reshape(arr, (513,513,1))
34 | segment = arr[342:, 342:]
35 | arr[arr == 15] = 255
36 | original_img = cv2.imread(self.img_path)
37 |
38 | arr2 = cv2.resize(segment,(original_img.shape[1], original_img.shape[0]))
39 | print(arr.shape)
40 | for i in range(arr2.shape[0]):
41 | for j in range(arr2.shape[1]):
42 | if (arr2[i][j] != 255):
43 | original_img[i][j] = original_img[i][j][0] = original_img[i][j][1] = original_img[i][j][2]
44 | cv2.imwrite('image_with_background_changed.jpg', original_img)
45 |
46 |
47 | if __name__ == '__main__':
48 | convert_img = ChangeBackGroundImage()
49 | convert_img.change_background()
50 |
51 |
52 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## DeepLabV3 Application
2 |
3 | The application demonstrates the Image semantic segmentation using DeepLabV3, which is an algorithm implemented by Google for semantic segmentation.
4 |
5 | * [Android App](/AndroidApplication)
6 | * [Board Application](/BoardApplication)
7 |
--------------------------------------------------------------------------------