├── MobileNetSSD_demo ├── .gitignore ├── .idea │ ├── codeStyles │ │ └── Project.xml │ ├── gradle.xml │ └── misc.xml ├── app │ ├── .gitignore │ ├── CMakeLists.txt │ ├── build.gradle │ ├── proguard-rules.pro │ └── src │ │ ├── androidTest │ │ └── java │ │ │ └── com │ │ │ └── example │ │ │ └── che │ │ │ └── mobilenetssd_demo │ │ │ └── ExampleInstrumentedTest.java │ │ ├── main │ │ ├── AndroidManifest.xml │ │ ├── assets │ │ │ ├── MobileNetSSD_deploy.bin │ │ │ ├── MobileNetSSD_deploy.param.bin │ │ │ └── words.txt │ │ ├── cpp │ │ │ ├── MobileNetSSD_deploy.id.h │ │ │ ├── MobileNetssd.cpp │ │ │ └── include │ │ │ │ ├── allocator.h │ │ │ │ ├── benchmark.h │ │ │ │ ├── blob.h │ │ │ │ ├── cpu.h │ │ │ │ ├── layer.h │ │ │ │ ├── layer_type.h │ │ │ │ ├── layer_type_enum.h │ │ │ │ ├── mat.h │ │ │ │ ├── modelbin.h │ │ │ │ ├── net.h │ │ │ │ ├── opencv.h │ │ │ │ ├── paramdict.h │ │ │ │ └── platform.h │ │ ├── java │ │ │ └── com │ │ │ │ └── example │ │ │ │ └── che │ │ │ │ └── mobilenetssd_demo │ │ │ │ ├── MainActivity.java │ │ │ │ ├── MobileNetssd.java │ │ │ │ └── PhotoUtil.java │ │ ├── jniLibs │ │ │ └── armeabi-v7a │ │ │ │ └── libncnn.a │ │ └── res │ │ │ ├── drawable-v24 │ │ │ └── ic_launcher_foreground.xml │ │ │ ├── drawable │ │ │ └── ic_launcher_background.xml │ │ │ ├── layout │ │ │ └── activity_main.xml │ │ │ ├── mipmap-anydpi-v26 │ │ │ ├── ic_launcher.xml │ │ │ └── ic_launcher_round.xml │ │ │ ├── mipmap-hdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-mdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-xhdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-xxhdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-xxxhdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ └── values │ │ │ ├── colors.xml │ │ │ ├── strings.xml │ │ │ └── styles.xml │ │ └── test │ │ └── java │ │ └── com │ │ └── example │ │ └── che │ │ └── mobilenetssd_demo │ │ └── ExampleUnitTest.java ├── build.gradle ├── gradle.properties ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── settings.gradle ├── MobileNetSSD_demo_single ├── .gitignore ├── .idea │ ├── codeStyles │ │ └── Project.xml │ ├── gradle.xml │ └── misc.xml ├── app │ ├── .gitignore │ ├── CMakeLists.txt │ ├── build.gradle │ ├── proguard-rules.pro │ └── src │ │ ├── androidTest │ │ └── java │ │ │ └── com │ │ │ └── example │ │ │ └── che │ │ │ └── mobilenetssd_demo │ │ │ └── ExampleInstrumentedTest.java │ │ ├── main │ │ ├── AndroidManifest.xml │ │ ├── assets │ │ │ ├── MobileNetSSD_deploy.bin │ │ │ ├── MobileNetSSD_deploy.param.bin │ │ │ └── words.txt │ │ ├── cpp │ │ │ ├── MobileNetSSD_deploy.id.h │ │ │ ├── MobileNetssd.cpp │ │ │ └── include │ │ │ │ ├── allocator.h │ │ │ │ ├── benchmark.h │ │ │ │ ├── blob.h │ │ │ │ ├── cpu.h │ │ │ │ ├── layer.h │ │ │ │ ├── layer_type.h │ │ │ │ ├── layer_type_enum.h │ │ │ │ ├── mat.h │ │ │ │ ├── modelbin.h │ │ │ │ ├── net.h │ │ │ │ ├── opencv.h │ │ │ │ ├── paramdict.h │ │ │ │ └── platform.h │ │ ├── java │ │ │ └── com │ │ │ │ └── example │ │ │ │ └── che │ │ │ │ └── mobilenetssd_demo │ │ │ │ ├── MainActivity.java │ │ │ │ ├── MobileNetssd.java │ │ │ │ └── PhotoUtil.java │ │ ├── jniLibs │ │ │ └── armeabi-v7a │ │ │ │ └── libncnn.a │ │ └── res │ │ │ ├── drawable-v24 │ │ │ └── ic_launcher_foreground.xml │ │ │ ├── drawable │ │ │ └── ic_launcher_background.xml │ │ │ ├── layout │ │ │ └── activity_main.xml │ │ │ ├── mipmap-anydpi-v26 │ │ │ ├── ic_launcher.xml │ │ │ └── ic_launcher_round.xml │ │ │ ├── mipmap-hdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-mdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-xhdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-xxhdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-xxxhdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ └── values │ │ │ ├── colors.xml │ │ │ ├── strings.xml │ │ │ └── styles.xml │ │ └── test │ │ └── java │ │ └── com │ │ └── example │ │ └── che │ │ └── mobilenetssd_demo │ │ └── ExampleUnitTest.java ├── build.gradle ├── gradle.properties ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── settings.gradle └── README.md /MobileNetSSD_demo/.gitignore: -------------------------------------------------------------------------------- 1 | *.iml 2 | .gradle 3 | /local.properties 4 | /.idea/caches/build_file_checksums.ser 5 | /.idea/libraries 6 | /.idea/modules.xml 7 | /.idea/workspace.xml 8 | .DS_Store 9 | /build 10 | /captures 11 | .externalNativeBuild 12 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/.idea/codeStyles/Project.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 15 | 16 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/.idea/gradle.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 17 | 18 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 19 | 31 | 32 | 33 | 34 | 35 | 36 | 38 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # For more information about using CMake with Android Studio, read the 2 | # documentation: https://d.android.com/studio/projects/add-native-code.html 3 | 4 | # Sets the minimum version of CMake required to build the native library. 5 | 6 | cmake_minimum_required(VERSION 3.4.1) 7 | 8 | # Creates and names a library, sets it as either STATIC 9 | # or SHARED, and provides the relative paths to its source code. 10 | # You can define multiple libraries, and CMake builds them for you. 11 | # Gradle automatically packages shared libraries with your APK. 12 | 13 | ##需要添加 14 | set(ncnn_lib ${CMAKE_SOURCE_DIR}/src/main/jniLibs/armeabi-v7a/libncnn.a) 15 | add_library (ncnn_lib STATIC IMPORTED) 16 | set_target_properties(ncnn_lib PROPERTIES IMPORTED_LOCATION ${ncnn_lib}) 17 | 18 | add_library( # Sets the name of the library. 19 | MobileNetssd ## 为生成.so的文字最好直接和.c名字一样,需要更改 20 | 21 | # Sets the library as a shared library. 22 | SHARED 23 | 24 | # Provides a relative path to your source file(s). 25 | src/main/cpp/MobileNetssd.cpp)##cpp文件的name 26 | 27 | # Searches for a specified prebuilt library and stores the path as a 28 | # variable. Because CMake includes system libraries in the search path by 29 | # default, you only need to specify the name of the public NDK library 30 | # you want to add. CMake verifies that the library exists before 31 | # completing its build. 32 | 33 | find_library( # Sets the name of the path variable. 34 | log-lib 35 | 36 | # Specifies the name of the NDK library that 37 | # you want CMake to locate. 38 | log) 39 | 40 | # Specifies libraries CMake should link to your target library. You 41 | # can link multiple libraries, such as libraries you define in this 42 | # build script, prebuilt third-party libraries, or system libraries. 43 | 44 | target_link_libraries( # Specifies the target library. 45 | ##以下三个都要添加 46 | MobileNetssd #和上面一样 47 | ncnn_lib #这个ncnn的lib的add 48 | jnigraphics #这个jni也需要add 49 | 50 | # Links the target library to the log library 51 | # included in the NDK. 52 | ${log-lib}) -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdkVersion 28 5 | defaultConfig { 6 | applicationId "com.example.che.mobilenetssd_demo" 7 | minSdkVersion 15 8 | targetSdkVersion 28 9 | versionCode 1 10 | versionName "1.0" 11 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" 12 | externalNativeBuild { 13 | cmake { 14 | cppFlags "-std=c++11 -fopenmp"//c++,多线程 需要添加need to add 15 | abiFilters "armeabi-v7a" // 手机的硬件架构,基本所有的硬件都适配 16 | } 17 | } 18 | } 19 | buildTypes { 20 | release { 21 | minifyEnabled false 22 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' 23 | } 24 | } 25 | externalNativeBuild { 26 | cmake { 27 | path "CMakeLists.txt" 28 | } 29 | } 30 | 31 | // 需要添加 把 .a文件导入, .a为 ncnn make intall生成的里面的.a文件 32 | sourceSets { 33 | main { 34 | jniLibs.srcDirs = ["src/main/jniLibs"] 35 | jni.srcDirs = ['src/cpp'] 36 | } 37 | } 38 | } 39 | 40 | dependencies { 41 | implementation fileTree(dir: 'libs', include: ['*.jar']) 42 | implementation 'com.android.support:appcompat-v7:28.0.0' 43 | implementation 'com.android.support.constraint:constraint-layout:1.1.3' 44 | testImplementation 'junit:junit:4.12' 45 | implementation 'com.github.bumptech.glide:glide:4.3.1' // need to add增加图片类 bumptech,build自动红线消失 46 | androidTestImplementation 'com.android.support.test:runner:1.0.2' 47 | androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2' 48 | } 49 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # You can control the set of applied configuration files using the 3 | # proguardFiles setting in build.gradle. 4 | # 5 | # For more details, see 6 | # http://developer.android.com/guide/developing/tools/proguard.html 7 | 8 | # If your project uses WebView with JS, uncomment the following 9 | # and specify the fully qualified class name to the JavaScript interface 10 | # class: 11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 12 | # public *; 13 | #} 14 | 15 | # Uncomment this to preserve the line number information for 16 | # debugging stack traces. 17 | #-keepattributes SourceFile,LineNumberTable 18 | 19 | # If you keep the line number information, uncomment this to 20 | # hide the original source file name. 21 | #-renamesourcefileattribute SourceFile 22 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/androidTest/java/com/example/che/mobilenetssd_demo/ExampleInstrumentedTest.java: -------------------------------------------------------------------------------- 1 | package com.example.che.mobilenetssd_demo; 2 | 3 | import android.content.Context; 4 | import android.support.test.InstrumentationRegistry; 5 | import android.support.test.runner.AndroidJUnit4; 6 | 7 | import org.junit.Test; 8 | import org.junit.runner.RunWith; 9 | 10 | import static org.junit.Assert.*; 11 | 12 | /** 13 | * Instrumented test, which will execute on an Android device. 14 | * 15 | * @see Testing documentation 16 | */ 17 | @RunWith(AndroidJUnit4.class) 18 | public class ExampleInstrumentedTest { 19 | @Test 20 | public void useAppContext() { 21 | // Context of the app under test. 22 | Context appContext = InstrumentationRegistry.getTargetContext(); 23 | 24 | assertEquals("com.example.che.mobilenetssd_demo", appContext.getPackageName()); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/assets/MobileNetSSD_deploy.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chehongshu/ncnnforandroid_objectiondetection_Mobilenetssd/c87964b9ddecf7f9890416f70e9009f5ffc58b3e/MobileNetSSD_demo/app/src/main/assets/MobileNetSSD_deploy.bin -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/assets/MobileNetSSD_deploy.param.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chehongshu/ncnnforandroid_objectiondetection_Mobilenetssd/c87964b9ddecf7f9890416f70e9009f5ffc58b3e/MobileNetSSD_demo/app/src/main/assets/MobileNetSSD_deploy.param.bin -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/assets/words.txt: -------------------------------------------------------------------------------- 1 | background 2 | aeroplane 3 | bicycle 4 | bird 5 | boat 6 | bottle 7 | bus 8 | car 9 | cat 10 | chair 11 | cow 12 | diningtable 13 | dog 14 | horse 15 | motorbike 16 | person 17 | pottedplant 18 | sheep 19 | sofa 20 | train 21 | tvmonitor -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/MobileNetSSD_deploy.id.h: -------------------------------------------------------------------------------- 1 | #ifndef NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h 2 | #define NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h 3 | namespace MobileNetSSD_deploy_param_id { 4 | const int LAYER_input = 0; 5 | const int BLOB_data = 0; 6 | const int LAYER_splitncnn_0 = 1; 7 | const int BLOB_data_splitncnn_0 = 1; 8 | const int BLOB_data_splitncnn_1 = 2; 9 | const int BLOB_data_splitncnn_2 = 3; 10 | const int BLOB_data_splitncnn_3 = 4; 11 | const int BLOB_data_splitncnn_4 = 5; 12 | const int BLOB_data_splitncnn_5 = 6; 13 | const int BLOB_data_splitncnn_6 = 7; 14 | const int LAYER_conv0 = 2; 15 | const int BLOB_conv0 = 8; 16 | const int LAYER_conv0_relu = 3; 17 | const int BLOB_conv0_conv0_relu = 9; 18 | const int LAYER_conv1_dw = 4; 19 | const int BLOB_conv1_dw = 10; 20 | const int LAYER_conv1_dw_relu = 5; 21 | const int BLOB_conv1_dw_conv1_dw_relu = 11; 22 | const int LAYER_conv1 = 6; 23 | const int BLOB_conv1 = 12; 24 | const int LAYER_conv1_relu = 7; 25 | const int BLOB_conv1_conv1_relu = 13; 26 | const int LAYER_conv2_dw = 8; 27 | const int BLOB_conv2_dw = 14; 28 | const int LAYER_conv2_dw_relu = 9; 29 | const int BLOB_conv2_dw_conv2_dw_relu = 15; 30 | const int LAYER_conv2 = 10; 31 | const int BLOB_conv2 = 16; 32 | const int LAYER_conv2_relu = 11; 33 | const int BLOB_conv2_conv2_relu = 17; 34 | const int LAYER_conv3_dw = 12; 35 | const int BLOB_conv3_dw = 18; 36 | const int LAYER_conv3_dw_relu = 13; 37 | const int BLOB_conv3_dw_conv3_dw_relu = 19; 38 | const int LAYER_conv3 = 14; 39 | const int BLOB_conv3 = 20; 40 | const int LAYER_conv3_relu = 15; 41 | const int BLOB_conv3_conv3_relu = 21; 42 | const int LAYER_conv4_dw = 16; 43 | const int BLOB_conv4_dw = 22; 44 | const int LAYER_conv4_dw_relu = 17; 45 | const int BLOB_conv4_dw_conv4_dw_relu = 23; 46 | const int LAYER_conv4 = 18; 47 | const int BLOB_conv4 = 24; 48 | const int LAYER_conv4_relu = 19; 49 | const int BLOB_conv4_conv4_relu = 25; 50 | const int LAYER_conv5_dw = 20; 51 | const int BLOB_conv5_dw = 26; 52 | const int LAYER_conv5_dw_relu = 21; 53 | const int BLOB_conv5_dw_conv5_dw_relu = 27; 54 | const int LAYER_conv5 = 22; 55 | const int BLOB_conv5 = 28; 56 | const int LAYER_conv5_relu = 23; 57 | const int BLOB_conv5_conv5_relu = 29; 58 | const int LAYER_conv6_dw = 24; 59 | const int BLOB_conv6_dw = 30; 60 | const int LAYER_conv6_dw_relu = 25; 61 | const int BLOB_conv6_dw_conv6_dw_relu = 31; 62 | const int LAYER_conv6 = 26; 63 | const int BLOB_conv6 = 32; 64 | const int LAYER_conv6_relu = 27; 65 | const int BLOB_conv6_conv6_relu = 33; 66 | const int LAYER_conv7_dw = 28; 67 | const int BLOB_conv7_dw = 34; 68 | const int LAYER_conv7_dw_relu = 29; 69 | const int BLOB_conv7_dw_conv7_dw_relu = 35; 70 | const int LAYER_conv7 = 30; 71 | const int BLOB_conv7 = 36; 72 | const int LAYER_conv7_relu = 31; 73 | const int BLOB_conv7_conv7_relu = 37; 74 | const int LAYER_conv8_dw = 32; 75 | const int BLOB_conv8_dw = 38; 76 | const int LAYER_conv8_dw_relu = 33; 77 | const int BLOB_conv8_dw_conv8_dw_relu = 39; 78 | const int LAYER_conv8 = 34; 79 | const int BLOB_conv8 = 40; 80 | const int LAYER_conv8_relu = 35; 81 | const int BLOB_conv8_conv8_relu = 41; 82 | const int LAYER_conv9_dw = 36; 83 | const int BLOB_conv9_dw = 42; 84 | const int LAYER_conv9_dw_relu = 37; 85 | const int BLOB_conv9_dw_conv9_dw_relu = 43; 86 | const int LAYER_conv9 = 38; 87 | const int BLOB_conv9 = 44; 88 | const int LAYER_conv9_relu = 39; 89 | const int BLOB_conv9_conv9_relu = 45; 90 | const int LAYER_conv10_dw = 40; 91 | const int BLOB_conv10_dw = 46; 92 | const int LAYER_conv10_dw_relu = 41; 93 | const int BLOB_conv10_dw_conv10_dw_relu = 47; 94 | const int LAYER_conv10 = 42; 95 | const int BLOB_conv10 = 48; 96 | const int LAYER_conv10_relu = 43; 97 | const int BLOB_conv10_conv10_relu = 49; 98 | const int LAYER_conv11_dw = 44; 99 | const int BLOB_conv11_dw = 50; 100 | const int LAYER_conv11_dw_relu = 45; 101 | const int BLOB_conv11_dw_conv11_dw_relu = 51; 102 | const int LAYER_conv11 = 46; 103 | const int BLOB_conv11 = 52; 104 | const int LAYER_conv11_relu = 47; 105 | const int BLOB_conv11_conv11_relu = 53; 106 | const int LAYER_splitncnn_1 = 48; 107 | const int BLOB_conv11_conv11_relu_splitncnn_0 = 54; 108 | const int BLOB_conv11_conv11_relu_splitncnn_1 = 55; 109 | const int BLOB_conv11_conv11_relu_splitncnn_2 = 56; 110 | const int BLOB_conv11_conv11_relu_splitncnn_3 = 57; 111 | const int LAYER_conv12_dw = 49; 112 | const int BLOB_conv12_dw = 58; 113 | const int LAYER_conv12_dw_relu = 50; 114 | const int BLOB_conv12_dw_conv12_dw_relu = 59; 115 | const int LAYER_conv12 = 51; 116 | const int BLOB_conv12 = 60; 117 | const int LAYER_conv12_relu = 52; 118 | const int BLOB_conv12_conv12_relu = 61; 119 | const int LAYER_conv13_dw = 53; 120 | const int BLOB_conv13_dw = 62; 121 | const int LAYER_conv13_dw_relu = 54; 122 | const int BLOB_conv13_dw_conv13_dw_relu = 63; 123 | const int LAYER_conv13 = 55; 124 | const int BLOB_conv13 = 64; 125 | const int LAYER_conv13_relu = 56; 126 | const int BLOB_conv13_conv13_relu = 65; 127 | const int LAYER_splitncnn_2 = 57; 128 | const int BLOB_conv13_conv13_relu_splitncnn_0 = 66; 129 | const int BLOB_conv13_conv13_relu_splitncnn_1 = 67; 130 | const int BLOB_conv13_conv13_relu_splitncnn_2 = 68; 131 | const int BLOB_conv13_conv13_relu_splitncnn_3 = 69; 132 | const int LAYER_conv14_1 = 58; 133 | const int BLOB_conv14_1 = 70; 134 | const int LAYER_conv14_1_relu = 59; 135 | const int BLOB_conv14_1_conv14_1_relu = 71; 136 | const int LAYER_conv14_2 = 60; 137 | const int BLOB_conv14_2 = 72; 138 | const int LAYER_conv14_2_relu = 61; 139 | const int BLOB_conv14_2_conv14_2_relu = 73; 140 | const int LAYER_splitncnn_3 = 62; 141 | const int BLOB_conv14_2_conv14_2_relu_splitncnn_0 = 74; 142 | const int BLOB_conv14_2_conv14_2_relu_splitncnn_1 = 75; 143 | const int BLOB_conv14_2_conv14_2_relu_splitncnn_2 = 76; 144 | const int BLOB_conv14_2_conv14_2_relu_splitncnn_3 = 77; 145 | const int LAYER_conv15_1 = 63; 146 | const int BLOB_conv15_1 = 78; 147 | const int LAYER_conv15_1_relu = 64; 148 | const int BLOB_conv15_1_conv15_1_relu = 79; 149 | const int LAYER_conv15_2 = 65; 150 | const int BLOB_conv15_2 = 80; 151 | const int LAYER_conv15_2_relu = 66; 152 | const int BLOB_conv15_2_conv15_2_relu = 81; 153 | const int LAYER_splitncnn_4 = 67; 154 | const int BLOB_conv15_2_conv15_2_relu_splitncnn_0 = 82; 155 | const int BLOB_conv15_2_conv15_2_relu_splitncnn_1 = 83; 156 | const int BLOB_conv15_2_conv15_2_relu_splitncnn_2 = 84; 157 | const int BLOB_conv15_2_conv15_2_relu_splitncnn_3 = 85; 158 | const int LAYER_conv16_1 = 68; 159 | const int BLOB_conv16_1 = 86; 160 | const int LAYER_conv16_1_relu = 69; 161 | const int BLOB_conv16_1_conv16_1_relu = 87; 162 | const int LAYER_conv16_2 = 70; 163 | const int BLOB_conv16_2 = 88; 164 | const int LAYER_conv16_2_relu = 71; 165 | const int BLOB_conv16_2_conv16_2_relu = 89; 166 | const int LAYER_splitncnn_5 = 72; 167 | const int BLOB_conv16_2_conv16_2_relu_splitncnn_0 = 90; 168 | const int BLOB_conv16_2_conv16_2_relu_splitncnn_1 = 91; 169 | const int BLOB_conv16_2_conv16_2_relu_splitncnn_2 = 92; 170 | const int BLOB_conv16_2_conv16_2_relu_splitncnn_3 = 93; 171 | const int LAYER_conv17_1 = 73; 172 | const int BLOB_conv17_1 = 94; 173 | const int LAYER_conv17_1_relu = 74; 174 | const int BLOB_conv17_1_conv17_1_relu = 95; 175 | const int LAYER_conv17_2 = 75; 176 | const int BLOB_conv17_2 = 96; 177 | const int LAYER_conv17_2_relu = 76; 178 | const int BLOB_conv17_2_conv17_2_relu = 97; 179 | const int LAYER_splitncnn_6 = 77; 180 | const int BLOB_conv17_2_conv17_2_relu_splitncnn_0 = 98; 181 | const int BLOB_conv17_2_conv17_2_relu_splitncnn_1 = 99; 182 | const int BLOB_conv17_2_conv17_2_relu_splitncnn_2 = 100; 183 | const int LAYER_conv11_mbox_loc = 78; 184 | const int BLOB_conv11_mbox_loc = 101; 185 | const int LAYER_conv11_mbox_loc_perm = 79; 186 | const int BLOB_conv11_mbox_loc_perm = 102; 187 | const int LAYER_conv11_mbox_loc_flat = 80; 188 | const int BLOB_conv11_mbox_loc_flat = 103; 189 | const int LAYER_conv11_mbox_conf = 81; 190 | const int BLOB_conv11_mbox_conf = 104; 191 | const int LAYER_conv11_mbox_conf_perm = 82; 192 | const int BLOB_conv11_mbox_conf_perm = 105; 193 | const int LAYER_conv11_mbox_conf_flat = 83; 194 | const int BLOB_conv11_mbox_conf_flat = 106; 195 | const int LAYER_conv11_mbox_priorbox = 84; 196 | const int BLOB_conv11_mbox_priorbox = 107; 197 | const int LAYER_conv13_mbox_loc = 85; 198 | const int BLOB_conv13_mbox_loc = 108; 199 | const int LAYER_conv13_mbox_loc_perm = 86; 200 | const int BLOB_conv13_mbox_loc_perm = 109; 201 | const int LAYER_conv13_mbox_loc_flat = 87; 202 | const int BLOB_conv13_mbox_loc_flat = 110; 203 | const int LAYER_conv13_mbox_conf = 88; 204 | const int BLOB_conv13_mbox_conf = 111; 205 | const int LAYER_conv13_mbox_conf_perm = 89; 206 | const int BLOB_conv13_mbox_conf_perm = 112; 207 | const int LAYER_conv13_mbox_conf_flat = 90; 208 | const int BLOB_conv13_mbox_conf_flat = 113; 209 | const int LAYER_conv13_mbox_priorbox = 91; 210 | const int BLOB_conv13_mbox_priorbox = 114; 211 | const int LAYER_conv14_2_mbox_loc = 92; 212 | const int BLOB_conv14_2_mbox_loc = 115; 213 | const int LAYER_conv14_2_mbox_loc_perm = 93; 214 | const int BLOB_conv14_2_mbox_loc_perm = 116; 215 | const int LAYER_conv14_2_mbox_loc_flat = 94; 216 | const int BLOB_conv14_2_mbox_loc_flat = 117; 217 | const int LAYER_conv14_2_mbox_conf = 95; 218 | const int BLOB_conv14_2_mbox_conf = 118; 219 | const int LAYER_conv14_2_mbox_conf_perm = 96; 220 | const int BLOB_conv14_2_mbox_conf_perm = 119; 221 | const int LAYER_conv14_2_mbox_conf_flat = 97; 222 | const int BLOB_conv14_2_mbox_conf_flat = 120; 223 | const int LAYER_conv14_2_mbox_priorbox = 98; 224 | const int BLOB_conv14_2_mbox_priorbox = 121; 225 | const int LAYER_conv15_2_mbox_loc = 99; 226 | const int BLOB_conv15_2_mbox_loc = 122; 227 | const int LAYER_conv15_2_mbox_loc_perm = 100; 228 | const int BLOB_conv15_2_mbox_loc_perm = 123; 229 | const int LAYER_conv15_2_mbox_loc_flat = 101; 230 | const int BLOB_conv15_2_mbox_loc_flat = 124; 231 | const int LAYER_conv15_2_mbox_conf = 102; 232 | const int BLOB_conv15_2_mbox_conf = 125; 233 | const int LAYER_conv15_2_mbox_conf_perm = 103; 234 | const int BLOB_conv15_2_mbox_conf_perm = 126; 235 | const int LAYER_conv15_2_mbox_conf_flat = 104; 236 | const int BLOB_conv15_2_mbox_conf_flat = 127; 237 | const int LAYER_conv15_2_mbox_priorbox = 105; 238 | const int BLOB_conv15_2_mbox_priorbox = 128; 239 | const int LAYER_conv16_2_mbox_loc = 106; 240 | const int BLOB_conv16_2_mbox_loc = 129; 241 | const int LAYER_conv16_2_mbox_loc_perm = 107; 242 | const int BLOB_conv16_2_mbox_loc_perm = 130; 243 | const int LAYER_conv16_2_mbox_loc_flat = 108; 244 | const int BLOB_conv16_2_mbox_loc_flat = 131; 245 | const int LAYER_conv16_2_mbox_conf = 109; 246 | const int BLOB_conv16_2_mbox_conf = 132; 247 | const int LAYER_conv16_2_mbox_conf_perm = 110; 248 | const int BLOB_conv16_2_mbox_conf_perm = 133; 249 | const int LAYER_conv16_2_mbox_conf_flat = 111; 250 | const int BLOB_conv16_2_mbox_conf_flat = 134; 251 | const int LAYER_conv16_2_mbox_priorbox = 112; 252 | const int BLOB_conv16_2_mbox_priorbox = 135; 253 | const int LAYER_conv17_2_mbox_loc = 113; 254 | const int BLOB_conv17_2_mbox_loc = 136; 255 | const int LAYER_conv17_2_mbox_loc_perm = 114; 256 | const int BLOB_conv17_2_mbox_loc_perm = 137; 257 | const int LAYER_conv17_2_mbox_loc_flat = 115; 258 | const int BLOB_conv17_2_mbox_loc_flat = 138; 259 | const int LAYER_conv17_2_mbox_conf = 116; 260 | const int BLOB_conv17_2_mbox_conf = 139; 261 | const int LAYER_conv17_2_mbox_conf_perm = 117; 262 | const int BLOB_conv17_2_mbox_conf_perm = 140; 263 | const int LAYER_conv17_2_mbox_conf_flat = 118; 264 | const int BLOB_conv17_2_mbox_conf_flat = 141; 265 | const int LAYER_conv17_2_mbox_priorbox = 119; 266 | const int BLOB_conv17_2_mbox_priorbox = 142; 267 | const int LAYER_mbox_loc = 120; 268 | const int BLOB_mbox_loc = 143; 269 | const int LAYER_mbox_conf = 121; 270 | const int BLOB_mbox_conf = 144; 271 | const int LAYER_mbox_priorbox = 122; 272 | const int BLOB_mbox_priorbox = 145; 273 | const int LAYER_mbox_conf_reshape = 123; 274 | const int BLOB_mbox_conf_reshape = 146; 275 | const int LAYER_mbox_conf_softmax = 124; 276 | const int BLOB_mbox_conf_softmax = 147; 277 | const int LAYER_mbox_conf_flatten = 125; 278 | const int BLOB_mbox_conf_flatten = 148; 279 | const int LAYER_detection_out = 126; 280 | const int BLOB_detection_out = 149; 281 | } // namespace MobileNetSSD_deploy_param_id 282 | #endif // NCNN_INCLUDE_GUARD_MobileNetSSD_deploy_id_h 283 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/MobileNetssd.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | 7 | // ncnn 8 | #include "include/opencv.h" 9 | #include "MobileNetSSD_deploy.id.h" //这里看成自己的id.h 10 | #include 11 | #include 12 | #include "include/net.h" 13 | 14 | static ncnn::UnlockedPoolAllocator g_blob_pool_allocator; 15 | static ncnn::PoolAllocator g_workspace_pool_allocator; 16 | 17 | static ncnn::Mat ncnn_param; 18 | static ncnn::Mat ncnn_bin; 19 | static ncnn::Net ncnn_net; 20 | 21 | extern "C" { 22 | 23 | 24 | // public native boolean Init(byte[] words,byte[] param, byte[] bin);  原函数形式(c++) 以下形式为ndk的c++形式 25 | JNIEXPORT jboolean JNICALL 26 | Java_com_example_che_mobilenetssd_1demo_MobileNetssd_Init(JNIEnv *env, jobject obj, jbyteArray param, jbyteArray bin) { 27 | __android_log_print(ANDROID_LOG_DEBUG, "MobileNetssd", "enter the jni func"); 28 | // init param 29 | { 30 | int len = env->GetArrayLength(param); 31 | ncnn_param.create(len, (size_t) 1u); 32 | env->GetByteArrayRegion(param, 0, len, (jbyte *) ncnn_param); 33 | int ret = ncnn_net.load_param((const unsigned char *) ncnn_param); 34 | __android_log_print(ANDROID_LOG_DEBUG, "MobileNetssd", "load_param %d %d", ret, len); 35 | } 36 | 37 | // init bin 38 | { 39 | int len = env->GetArrayLength(bin); 40 | ncnn_bin.create(len, (size_t) 1u); 41 | env->GetByteArrayRegion(bin, 0, len, (jbyte *) ncnn_bin); 42 | int ret = ncnn_net.load_model((const unsigned char *) ncnn_bin); 43 | __android_log_print(ANDROID_LOG_DEBUG, "MobileNetssd", "load_model %d %d", ret, len); 44 | } 45 | 46 | ncnn::Option opt; 47 | opt.lightmode = true; 48 | opt.num_threads = 4; //线程 这里可以修改 49 | opt.blob_allocator = &g_blob_pool_allocator; 50 | opt.workspace_allocator = &g_workspace_pool_allocator; 51 | 52 | ncnn::set_default_option(opt); 53 | 54 | return JNI_TRUE; 55 | } 56 | 57 | // public native String Detect(Bitmap bitmap); 58 | JNIEXPORT jfloatArray JNICALL Java_com_example_che_mobilenetssd_1demo_MobileNetssd_Detect(JNIEnv* env, jobject thiz, jobject bitmap) 59 | { 60 | // ncnn from bitmap 61 | ncnn::Mat in; 62 | { 63 | AndroidBitmapInfo info; 64 | AndroidBitmap_getInfo(env, bitmap, &info); 65 | // int origin_w = info.width; 66 | // int origin_h = info.height; 67 | // int width = 300; 68 | // int height = 300; 69 | int width = info.width; 70 | int height = info.height; 71 | if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) 72 | return NULL; 73 | 74 | void* indata; 75 | AndroidBitmap_lockPixels(env, bitmap, &indata); 76 | // 把像素转换成data,并指定通道顺序 77 | // 因为图像预处理每个网络层输入的数据格式不一样一般为300*300 128*128等等所以这类需要一个resize的操作可以在cpp中写,也可以是java读入图片时有个resize操作 78 | // in = ncnn::Mat::from_pixels_resize((const unsigned char*)indata, ncnn::Mat::PIXEL_RGBA2RGB, origin_w, origin_h, width, height); 79 | 80 | in = ncnn::Mat::from_pixels((const unsigned char*)indata, ncnn::Mat::PIXEL_RGBA2RGB, width, height); 81 | 82 | // 下面一行为debug代码 83 | //__android_log_print(ANDROID_LOG_DEBUG, "MobilenetssdJniIn", "Mobilenetssd_predict_has_input1, in.w: %d; in.h: %d", in.w, in.h); 84 | AndroidBitmap_unlockPixels(env, bitmap); 85 | } 86 | 87 | // ncnn_net 88 | std::vector cls_scores; 89 | { 90 | // 减去均值和乘上比例(这个数据和前面的归一化图片预处理形式一一对应) 91 | const float mean_vals[3] = {127.5f, 127.5f, 127.5f}; 92 | const float scale[3] = {0.007843f, 0.007843f, 0.007843f}; 93 | 94 | in.substract_mean_normalize(mean_vals, scale);// 归一化 95 | 96 | ncnn::Extractor ex = ncnn_net.create_extractor();//前向传播 97 | 98 | // 如果不加密是使用ex.input("data", in); 99 | // BLOB_data在id.h文件中可见,相当于datainput网络层的id 100 | ex.input(MobileNetSSD_deploy_param_id::BLOB_data, in); 101 | //ex.set_num_threads(4); 和上面一样一个对象 102 | 103 | ncnn::Mat out; 104 | // 如果时不加密是使用ex.extract("prob", out); 105 | //BLOB_detection_out.h文件中可见,相当于dataout网络层的id,输出检测的结果数据 106 | ex.extract(MobileNetSSD_deploy_param_id::BLOB_detection_out, out); 107 | 108 | int output_wsize = out.w; 109 | int output_hsize = out.h; 110 | 111 | //输出整理 112 | jfloat *output[output_wsize * output_hsize]; // float类型 113 | for(int i = 0; i< out.h; i++) { 114 | for (int j = 0; j < out.w; j++) { 115 | output[i*output_wsize + j] = &out.row(i)[j]; 116 | } 117 | } 118 | //建立float数组 长度为 output_wsize * output_hsize,如果只是ouput_size相当于只有一行的out的数据那就是一个object检测数据 119 | jfloatArray jOutputData = env->NewFloatArray(output_wsize * output_hsize); 120 | if (jOutputData == nullptr) return nullptr; 121 | env->SetFloatArrayRegion(jOutputData, 0, output_wsize * output_hsize, 122 | reinterpret_cast(*output)); 123 | return jOutputData; 124 | } 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/allocator.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2018 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_ALLOCATOR_H 16 | #define NCNN_ALLOCATOR_H 17 | 18 | #ifdef _WIN32 19 | #define WIN32_LEAN_AND_MEAN 20 | #include 21 | #else 22 | #include 23 | #endif 24 | 25 | #include 26 | #include 27 | 28 | namespace ncnn { 29 | 30 | // the alignment of all the allocated buffers 31 | #define MALLOC_ALIGN 16 32 | 33 | // Aligns a pointer to the specified number of bytes 34 | // ptr Aligned pointer 35 | // n Alignment size that must be a power of two 36 | template static inline _Tp* alignPtr(_Tp* ptr, int n=(int)sizeof(_Tp)) 37 | { 38 | return (_Tp*)(((size_t)ptr + n-1) & -n); 39 | } 40 | 41 | // Aligns a buffer size to the specified number of bytes 42 | // The function returns the minimum number that is greater or equal to sz and is divisible by n 43 | // sz Buffer size to align 44 | // n Alignment size that must be a power of two 45 | static inline size_t alignSize(size_t sz, int n) 46 | { 47 | return (sz + n-1) & -n; 48 | } 49 | 50 | static inline void* fastMalloc(size_t size) 51 | { 52 | unsigned char* udata = (unsigned char*)malloc(size + sizeof(void*) + MALLOC_ALIGN); 53 | if (!udata) 54 | return 0; 55 | unsigned char** adata = alignPtr((unsigned char**)udata + 1, MALLOC_ALIGN); 56 | adata[-1] = udata; 57 | return adata; 58 | } 59 | 60 | static inline void fastFree(void* ptr) 61 | { 62 | if (ptr) 63 | { 64 | unsigned char* udata = ((unsigned char**)ptr)[-1]; 65 | free(udata); 66 | } 67 | } 68 | 69 | // exchange-add operation for atomic operations on reference counters 70 | #if defined __INTEL_COMPILER && !(defined WIN32 || defined _WIN32) 71 | // atomic increment on the linux version of the Intel(tm) compiler 72 | # define NCNN_XADD(addr, delta) (int)_InterlockedExchangeAdd(const_cast(reinterpret_cast(addr)), delta) 73 | #elif defined __GNUC__ 74 | # if defined __clang__ && __clang_major__ >= 3 && !defined __ANDROID__ && !defined __EMSCRIPTEN__ && !defined(__CUDACC__) 75 | # ifdef __ATOMIC_ACQ_REL 76 | # define NCNN_XADD(addr, delta) __c11_atomic_fetch_add((_Atomic(int)*)(addr), delta, __ATOMIC_ACQ_REL) 77 | # else 78 | # define NCNN_XADD(addr, delta) __atomic_fetch_add((_Atomic(int)*)(addr), delta, 4) 79 | # endif 80 | # else 81 | # if defined __ATOMIC_ACQ_REL && !defined __clang__ 82 | // version for gcc >= 4.7 83 | # define NCNN_XADD(addr, delta) (int)__atomic_fetch_add((unsigned*)(addr), (unsigned)(delta), __ATOMIC_ACQ_REL) 84 | # else 85 | # define NCNN_XADD(addr, delta) (int)__sync_fetch_and_add((unsigned*)(addr), (unsigned)(delta)) 86 | # endif 87 | # endif 88 | #elif defined _MSC_VER && !defined RC_INVOKED 89 | # include 90 | # define NCNN_XADD(addr, delta) (int)_InterlockedExchangeAdd((long volatile*)addr, delta) 91 | #else 92 | // thread-unsafe branch 93 | static inline int NCNN_XADD(int* addr, int delta) { int tmp = *addr; *addr += delta; return tmp; } 94 | #endif 95 | 96 | #ifdef _WIN32 97 | class Mutex 98 | { 99 | public: 100 | Mutex() { InitializeSRWLock(&srwlock); } 101 | ~Mutex() {} 102 | void lock() { AcquireSRWLockExclusive(&srwlock); } 103 | void unlock() { ReleaseSRWLockExclusive(&srwlock); } 104 | private: 105 | // NOTE SRWLock is available from windows vista 106 | SRWLOCK srwlock; 107 | }; 108 | #else // _WIN32 109 | class Mutex 110 | { 111 | public: 112 | Mutex() { pthread_mutex_init(&mutex, 0); } 113 | ~Mutex() { pthread_mutex_destroy(&mutex); } 114 | void lock() { pthread_mutex_lock(&mutex); } 115 | void unlock() { pthread_mutex_unlock(&mutex); } 116 | private: 117 | pthread_mutex_t mutex; 118 | }; 119 | #endif // _WIN32 120 | 121 | class Allocator 122 | { 123 | public: 124 | virtual ~Allocator() = 0; 125 | virtual void* fastMalloc(size_t size) = 0; 126 | virtual void fastFree(void* ptr) = 0; 127 | }; 128 | 129 | class PoolAllocator : public Allocator 130 | { 131 | public: 132 | PoolAllocator(); 133 | ~PoolAllocator(); 134 | 135 | // ratio range 0 ~ 1 136 | // default cr = 0.75 137 | void set_size_compare_ratio(float scr); 138 | 139 | // release all budgets immediately 140 | void clear(); 141 | 142 | virtual void* fastMalloc(size_t size); 143 | virtual void fastFree(void* ptr); 144 | 145 | private: 146 | Mutex budgets_lock; 147 | Mutex payouts_lock; 148 | unsigned int size_compare_ratio;// 0~256 149 | std::list< std::pair > budgets; 150 | std::list< std::pair > payouts; 151 | }; 152 | 153 | class UnlockedPoolAllocator : public Allocator 154 | { 155 | public: 156 | UnlockedPoolAllocator(); 157 | ~UnlockedPoolAllocator(); 158 | 159 | // ratio range 0 ~ 1 160 | // default cr = 0.75 161 | void set_size_compare_ratio(float scr); 162 | 163 | // release all budgets immediately 164 | void clear(); 165 | 166 | virtual void* fastMalloc(size_t size); 167 | virtual void fastFree(void* ptr); 168 | 169 | private: 170 | unsigned int size_compare_ratio;// 0~256 171 | std::list< std::pair > budgets; 172 | std::list< std::pair > payouts; 173 | }; 174 | 175 | } // namespace ncnn 176 | 177 | #endif // NCNN_ALLOCATOR_H 178 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/benchmark.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_BENCHMARK_H 16 | #define NCNN_BENCHMARK_H 17 | 18 | #include "platform.h" 19 | #include "mat.h" 20 | #include "layer.h" 21 | 22 | namespace ncnn { 23 | 24 | // get now timestamp in ms 25 | double get_current_time(); 26 | 27 | #if NCNN_BENCHMARK 28 | 29 | void benchmark(const Layer* layer, double start, double end); 30 | void benchmark(const Layer* layer, const Mat& bottom_blob, Mat& top_blob, double start, double end); 31 | 32 | #endif // NCNN_BENCHMARK 33 | 34 | } // namespace ncnn 35 | 36 | #endif // NCNN_BENCHMARK_H 37 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/blob.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_BLOB_H 16 | #define NCNN_BLOB_H 17 | 18 | #include 19 | #include 20 | #include "platform.h" 21 | 22 | namespace ncnn { 23 | 24 | class Blob 25 | { 26 | public: 27 | // empty 28 | Blob(); 29 | 30 | public: 31 | #if NCNN_STRING 32 | // blob name 33 | std::string name; 34 | #endif // NCNN_STRING 35 | // layer index which produce this blob as output 36 | int producer; 37 | // layer index which need this blob as input 38 | std::vector consumers; 39 | }; 40 | 41 | } // namespace ncnn 42 | 43 | #endif // NCNN_BLOB_H 44 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/cpu.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_CPU_H 16 | #define NCNN_CPU_H 17 | 18 | namespace ncnn { 19 | 20 | // test optional cpu features 21 | // neon = armv7 neon or aarch64 asimd 22 | int cpu_support_arm_neon(); 23 | // vfpv4 = armv7 fp16 + fma 24 | int cpu_support_arm_vfpv4(); 25 | // asimdhp = aarch64 asimd half precision 26 | int cpu_support_arm_asimdhp(); 27 | 28 | // cpu info 29 | int get_cpu_count(); 30 | 31 | // bind all threads on little clusters if powersave enabled 32 | // affacts HMP arch cpu like ARM big.LITTLE 33 | // only implemented on android at the moment 34 | // switching powersave is expensive and not thread-safe 35 | // 0 = all cores enabled(default) 36 | // 1 = only little clusters enabled 37 | // 2 = only big clusters enabled 38 | // return 0 if success for setter function 39 | int get_cpu_powersave(); 40 | int set_cpu_powersave(int powersave); 41 | 42 | // misc function wrapper for openmp routines 43 | int get_omp_num_threads(); 44 | void set_omp_num_threads(int num_threads); 45 | 46 | int get_omp_dynamic(); 47 | void set_omp_dynamic(int dynamic); 48 | 49 | } // namespace ncnn 50 | 51 | #endif // NCNN_CPU_H 52 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/layer.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_LAYER_H 16 | #define NCNN_LAYER_H 17 | 18 | #include 19 | #include 20 | #include 21 | #include "mat.h" 22 | #include "modelbin.h" 23 | #include "paramdict.h" 24 | #include "platform.h" 25 | 26 | namespace ncnn { 27 | 28 | class Allocator; 29 | class Option 30 | { 31 | public: 32 | // default option 33 | Option(); 34 | 35 | public: 36 | // light mode 37 | // intermediate blob will be recycled when enabled 38 | // enabled by default 39 | bool lightmode; 40 | 41 | // thread count 42 | // default value is the one returned by get_cpu_count() 43 | int num_threads; 44 | 45 | // blob memory allocator 46 | Allocator* blob_allocator; 47 | 48 | // workspace memory allocator 49 | Allocator* workspace_allocator; 50 | }; 51 | 52 | // the global default option 53 | const Option& get_default_option(); 54 | int set_default_option(const Option& opt); 55 | 56 | class Layer 57 | { 58 | public: 59 | // empty 60 | Layer(); 61 | // virtual destructor 62 | virtual ~Layer(); 63 | 64 | // load layer specific parameter from parsed dict 65 | // return 0 if success 66 | virtual int load_param(const ParamDict& pd); 67 | 68 | // load layer specific weight data from model binary 69 | // return 0 if success 70 | virtual int load_model(const ModelBin& mb); 71 | 72 | public: 73 | // one input and one output blob 74 | bool one_blob_only; 75 | 76 | // support inplace inference 77 | bool support_inplace; 78 | 79 | public: 80 | // implement inference 81 | // return 0 if success 82 | virtual int forward(const std::vector& bottom_blobs, std::vector& top_blobs, const Option& opt = get_default_option()) const; 83 | virtual int forward(const Mat& bottom_blob, Mat& top_blob, const Option& opt = get_default_option()) const; 84 | 85 | // implement inplace inference 86 | // return 0 if success 87 | virtual int forward_inplace(std::vector& bottom_top_blobs, const Option& opt = get_default_option()) const; 88 | virtual int forward_inplace(Mat& bottom_top_blob, const Option& opt = get_default_option()) const; 89 | 90 | public: 91 | #if NCNN_STRING 92 | // layer type name 93 | std::string type; 94 | // layer name 95 | std::string name; 96 | #endif // NCNN_STRING 97 | // blob index which this layer needs as input 98 | std::vector bottoms; 99 | // blob index which this layer produces as output 100 | std::vector tops; 101 | }; 102 | 103 | // layer factory function 104 | typedef Layer* (*layer_creator_func)(); 105 | 106 | struct layer_registry_entry 107 | { 108 | #if NCNN_STRING 109 | // layer type name 110 | const char* name; 111 | #endif // NCNN_STRING 112 | // layer factory entry 113 | layer_creator_func creator; 114 | }; 115 | 116 | #if NCNN_STRING 117 | // get layer type from type name 118 | int layer_to_index(const char* type); 119 | // create layer from type name 120 | Layer* create_layer(const char* type); 121 | #endif // NCNN_STRING 122 | // create layer from layer type 123 | Layer* create_layer(int index); 124 | 125 | #define DEFINE_LAYER_CREATOR(name) \ 126 | ::ncnn::Layer* name##_layer_creator() { return new name; } 127 | 128 | } // namespace ncnn 129 | 130 | #endif // NCNN_LAYER_H 131 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/layer_type.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_LAYER_TYPE_H 16 | #define NCNN_LAYER_TYPE_H 17 | 18 | namespace ncnn { 19 | 20 | namespace LayerType { 21 | enum 22 | { 23 | #include "layer_type_enum.h" 24 | CustomBit = (1<<8), 25 | }; 26 | } // namespace LayerType 27 | 28 | } // namespace ncnn 29 | 30 | #endif // NCNN_LAYER_TYPE_H 31 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/layer_type_enum.h: -------------------------------------------------------------------------------- 1 | // Layer Type Enum header 2 | // 3 | // This file is auto-generated by cmake, don't edit it. 4 | 5 | AbsVal = 0, 6 | ArgMax = 1, 7 | BatchNorm = 2, 8 | Bias = 3, 9 | BNLL = 4, 10 | Concat = 5, 11 | Convolution = 6, 12 | Crop = 7, 13 | Deconvolution = 8, 14 | Dropout = 9, 15 | Eltwise = 10, 16 | ELU = 11, 17 | Embed = 12, 18 | Exp = 13, 19 | Flatten = 14, 20 | InnerProduct = 15, 21 | Input = 16, 22 | Log = 17, 23 | LRN = 18, 24 | MemoryData = 19, 25 | MVN = 20, 26 | Pooling = 21, 27 | Power = 22, 28 | PReLU = 23, 29 | Proposal = 24, 30 | Reduction = 25, 31 | ReLU = 26, 32 | Reshape = 27, 33 | ROIPooling = 28, 34 | Scale = 29, 35 | Sigmoid = 30, 36 | Slice = 31, 37 | Softmax = 32, 38 | Split = 33, 39 | SPP = 34, 40 | TanH = 35, 41 | Threshold = 36, 42 | Tile = 37, 43 | RNN = 38, 44 | LSTM = 39, 45 | BinaryOp = 40, 46 | UnaryOp = 41, 47 | ConvolutionDepthWise = 42, 48 | Padding = 43, 49 | Squeeze = 44, 50 | ExpandDims = 45, 51 | Normalize = 46, 52 | Permute = 47, 53 | PriorBox = 48, 54 | DetectionOutput = 49, 55 | Interp = 50, 56 | DeconvolutionDepthWise = 51, 57 | ShuffleChannel = 52, 58 | InstanceNorm = 53, 59 | Clip = 54, 60 | Reorg = 55, 61 | YoloDetectionOutput = 56, 62 | Quantize = 57, 63 | Dequantize = 58, 64 | Yolov3DetectionOutput = 59, 65 | PSROIPooling = 60, 66 | ROIAlign = 61, 67 | 68 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/modelbin.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_MODELBIN_H 16 | #define NCNN_MODELBIN_H 17 | 18 | #include 19 | #include "mat.h" 20 | #include "platform.h" 21 | 22 | namespace ncnn { 23 | 24 | class Net; 25 | class ModelBin 26 | { 27 | public: 28 | // element type 29 | // 0 = auto 30 | // 1 = float32 31 | // 2 = float16 32 | // 3 = int8 33 | // load vec 34 | virtual Mat load(int w, int type) const = 0; 35 | // load image 36 | virtual Mat load(int w, int h, int type) const; 37 | // load dim 38 | virtual Mat load(int w, int h, int c, int type) const; 39 | }; 40 | 41 | #if NCNN_STDIO 42 | class ModelBinFromStdio : public ModelBin 43 | { 44 | public: 45 | // construct from file 46 | ModelBinFromStdio(FILE* binfp); 47 | 48 | virtual Mat load(int w, int type) const; 49 | 50 | protected: 51 | FILE* binfp; 52 | }; 53 | #endif // NCNN_STDIO 54 | 55 | class ModelBinFromMemory : public ModelBin 56 | { 57 | public: 58 | // construct from external memory 59 | ModelBinFromMemory(const unsigned char*& mem); 60 | 61 | virtual Mat load(int w, int type) const; 62 | 63 | protected: 64 | const unsigned char*& mem; 65 | }; 66 | 67 | class ModelBinFromMatArray : public ModelBin 68 | { 69 | public: 70 | // construct from weight blob array 71 | ModelBinFromMatArray(const Mat* weights); 72 | 73 | virtual Mat load(int w, int type) const; 74 | 75 | protected: 76 | mutable const Mat* weights; 77 | }; 78 | 79 | } // namespace ncnn 80 | 81 | #endif // NCNN_MODELBIN_H 82 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/net.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_NET_H 16 | #define NCNN_NET_H 17 | 18 | #include 19 | #include 20 | #include "blob.h" 21 | #include "layer.h" 22 | #include "mat.h" 23 | #include "platform.h" 24 | 25 | namespace ncnn { 26 | 27 | class Extractor; 28 | class Net 29 | { 30 | public: 31 | // empty init 32 | Net(); 33 | // clear and destroy 34 | ~Net(); 35 | 36 | #if NCNN_STRING 37 | // register custom layer by layer type name 38 | // return 0 if success 39 | int register_custom_layer(const char* type, layer_creator_func creator); 40 | #endif // NCNN_STRING 41 | // register custom layer by layer type 42 | // return 0 if success 43 | int register_custom_layer(int index, layer_creator_func creator); 44 | 45 | #if NCNN_STDIO 46 | #if NCNN_STRING 47 | // load network structure from plain param file 48 | // return 0 if success 49 | int load_param(FILE* fp); 50 | int load_param(const char* protopath); 51 | int load_param_mem(const char* mem); 52 | #endif // NCNN_STRING 53 | // load network structure from binary param file 54 | // return 0 if success 55 | int load_param_bin(FILE* fp); 56 | int load_param_bin(const char* protopath); 57 | 58 | // load network weight data from model file 59 | // return 0 if success 60 | int load_model(FILE* fp); 61 | int load_model(const char* modelpath); 62 | #endif // NCNN_STDIO 63 | 64 | // load network structure from external memory 65 | // memory pointer must be 32-bit aligned 66 | // return bytes consumed 67 | int load_param(const unsigned char* mem); 68 | 69 | // reference network weight data from external memory 70 | // weight data is not copied but referenced 71 | // so external memory should be retained when used 72 | // memory pointer must be 32-bit aligned 73 | // return bytes consumed 74 | int load_model(const unsigned char* mem); 75 | 76 | // unload network structure and weight data 77 | void clear(); 78 | 79 | // construct an Extractor from network 80 | Extractor create_extractor() const; 81 | 82 | public: 83 | // enable winograd convolution optimization 84 | // improve convolution 3x3 stride1 performace, may consume more memory 85 | // changes should be applied before loading network structure and weight 86 | // enabled by default 87 | int use_winograd_convolution; 88 | 89 | // enable sgemm convolution optimization 90 | // improve convolution 1x1 stride1 performace, may consume more memory 91 | // changes should be applied before loading network structure and weight 92 | // enabled by default 93 | int use_sgemm_convolution; 94 | 95 | // enable quantized int8 inference 96 | // use low-precision int8 path for quantized model 97 | // changes should be applied before loading network structure and weight 98 | // enabled by default 99 | int use_int8_inference; 100 | 101 | protected: 102 | friend class Extractor; 103 | #if NCNN_STRING 104 | int find_blob_index_by_name(const char* name) const; 105 | int find_layer_index_by_name(const char* name) const; 106 | int custom_layer_to_index(const char* type); 107 | Layer* create_custom_layer(const char* type); 108 | #endif // NCNN_STRING 109 | Layer* create_custom_layer(int index); 110 | int forward_layer(int layer_index, std::vector& blob_mats, Option& opt) const; 111 | 112 | protected: 113 | std::vector blobs; 114 | std::vector layers; 115 | 116 | std::vector custom_layer_registry; 117 | }; 118 | 119 | class Extractor 120 | { 121 | public: 122 | // enable light mode 123 | // intermediate blob will be recycled when enabled 124 | // enabled by default 125 | void set_light_mode(bool enable); 126 | 127 | // set thread count for this extractor 128 | // this will overwrite the global setting 129 | // default count is system depended 130 | void set_num_threads(int num_threads); 131 | 132 | // set blob memory allocator 133 | void set_blob_allocator(Allocator* allocator); 134 | 135 | // set workspace memory allocator 136 | void set_workspace_allocator(Allocator* allocator); 137 | 138 | #if NCNN_STRING 139 | // set input by blob name 140 | // return 0 if success 141 | int input(const char* blob_name, const Mat& in); 142 | 143 | // get result by blob name 144 | // return 0 if success 145 | int extract(const char* blob_name, Mat& feat); 146 | #endif // NCNN_STRING 147 | 148 | // set input by blob index 149 | // return 0 if success 150 | int input(int blob_index, const Mat& in); 151 | 152 | // get result by blob index 153 | // return 0 if success 154 | int extract(int blob_index, Mat& feat); 155 | 156 | protected: 157 | friend Extractor Net::create_extractor() const; 158 | Extractor(const Net* net, int blob_count); 159 | 160 | private: 161 | const Net* net; 162 | std::vector blob_mats; 163 | Option opt; 164 | }; 165 | 166 | } // namespace ncnn 167 | 168 | #endif // NCNN_NET_H 169 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/opencv.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_OPENCV_H 16 | #define NCNN_OPENCV_H 17 | 18 | #include "platform.h" 19 | 20 | #if NCNN_OPENCV 21 | 22 | #include 23 | #include 24 | #include "mat.h" 25 | 26 | // minimal opencv style data structure implementation 27 | namespace cv 28 | { 29 | 30 | struct Size 31 | { 32 | Size() : width(0), height(0) {} 33 | Size(int _w, int _h) : width(_w), height(_h) {} 34 | 35 | int width; 36 | int height; 37 | }; 38 | 39 | template 40 | struct Rect_ 41 | { 42 | Rect_() : x(0), y(0), width(0), height(0) {} 43 | Rect_(_Tp _x, _Tp _y, _Tp _w, _Tp _h) : x(_x), y(_y), width(_w), height(_h) {} 44 | 45 | _Tp x; 46 | _Tp y; 47 | _Tp width; 48 | _Tp height; 49 | 50 | // area 51 | _Tp area() const 52 | { 53 | return width * height; 54 | } 55 | }; 56 | 57 | template static inline Rect_<_Tp>& operator &= ( Rect_<_Tp>& a, const Rect_<_Tp>& b ) 58 | { 59 | _Tp x1 = std::max(a.x, b.x), y1 = std::max(a.y, b.y); 60 | a.width = std::min(a.x + a.width, b.x + b.width) - x1; 61 | a.height = std::min(a.y + a.height, b.y + b.height) - y1; 62 | a.x = x1; a.y = y1; 63 | if( a.width <= 0 || a.height <= 0 ) 64 | a = Rect_<_Tp>(); 65 | return a; 66 | } 67 | 68 | template static inline Rect_<_Tp>& operator |= ( Rect_<_Tp>& a, const Rect_<_Tp>& b ) 69 | { 70 | _Tp x1 = std::min(a.x, b.x), y1 = std::min(a.y, b.y); 71 | a.width = std::max(a.x + a.width, b.x + b.width) - x1; 72 | a.height = std::max(a.y + a.height, b.y + b.height) - y1; 73 | a.x = x1; a.y = y1; 74 | return a; 75 | } 76 | 77 | template static inline Rect_<_Tp> operator & (const Rect_<_Tp>& a, const Rect_<_Tp>& b) 78 | { 79 | Rect_<_Tp> c = a; 80 | return c &= b; 81 | } 82 | 83 | template static inline Rect_<_Tp> operator | (const Rect_<_Tp>& a, const Rect_<_Tp>& b) 84 | { 85 | Rect_<_Tp> c = a; 86 | return c |= b; 87 | } 88 | 89 | typedef Rect_ Rect; 90 | typedef Rect_ Rect2f; 91 | 92 | template 93 | struct Point_ 94 | { 95 | Point_() : x(0), y(0) {} 96 | Point_(_Tp _x, _Tp _y) : x(_x), y(_y) {} 97 | 98 | _Tp x; 99 | _Tp y; 100 | }; 101 | 102 | typedef Point_ Point; 103 | typedef Point_ Point2f; 104 | 105 | #define CV_8UC1 1 106 | #define CV_8UC3 3 107 | #define CV_8UC4 4 108 | #define CV_32FC1 4 109 | 110 | struct Mat 111 | { 112 | Mat() : data(0), refcount(0), rows(0), cols(0), c(0) {} 113 | 114 | Mat(int _rows, int _cols, int flags) : data(0), refcount(0) 115 | { 116 | create(_rows, _cols, flags); 117 | } 118 | 119 | // copy 120 | Mat(const Mat& m) : data(m.data), refcount(m.refcount) 121 | { 122 | if (refcount) 123 | NCNN_XADD(refcount, 1); 124 | 125 | rows = m.rows; 126 | cols = m.cols; 127 | c = m.c; 128 | } 129 | 130 | Mat(int _rows, int _cols, int flags, void* _data) : data((unsigned char*)_data), refcount(0) 131 | { 132 | rows = _rows; 133 | cols = _cols; 134 | c = flags; 135 | } 136 | 137 | ~Mat() 138 | { 139 | release(); 140 | } 141 | 142 | // assign 143 | Mat& operator=(const Mat& m) 144 | { 145 | if (this == &m) 146 | return *this; 147 | 148 | if (m.refcount) 149 | NCNN_XADD(m.refcount, 1); 150 | 151 | release(); 152 | 153 | data = m.data; 154 | refcount = m.refcount; 155 | 156 | rows = m.rows; 157 | cols = m.cols; 158 | c = m.c; 159 | 160 | return *this; 161 | } 162 | 163 | void create(int _rows, int _cols, int flags) 164 | { 165 | release(); 166 | 167 | rows = _rows; 168 | cols = _cols; 169 | c = flags; 170 | 171 | if (total() > 0) 172 | { 173 | // refcount address must be aligned, so we expand totalsize here 174 | size_t totalsize = (total() + 3) >> 2 << 2; 175 | data = (unsigned char*)ncnn::fastMalloc(totalsize + (int)sizeof(*refcount)); 176 | refcount = (int*)(((unsigned char*)data) + totalsize); 177 | *refcount = 1; 178 | } 179 | } 180 | 181 | void release() 182 | { 183 | if (refcount && NCNN_XADD(refcount, -1) == 1) 184 | ncnn::fastFree(data); 185 | 186 | data = 0; 187 | 188 | rows = 0; 189 | cols = 0; 190 | c = 0; 191 | 192 | refcount = 0; 193 | } 194 | 195 | Mat clone() const 196 | { 197 | if (empty()) 198 | return Mat(); 199 | 200 | Mat m(rows, cols, c); 201 | 202 | if (total() > 0) 203 | { 204 | memcpy(m.data, data, total()); 205 | } 206 | 207 | return m; 208 | } 209 | 210 | bool empty() const { return data == 0 || total() == 0; } 211 | 212 | int channels() const { return c; } 213 | 214 | size_t total() const { return cols * rows * c; } 215 | 216 | const unsigned char* ptr(int y) const { return data + y * cols * c; } 217 | 218 | unsigned char* ptr(int y) { return data + y * cols * c; } 219 | 220 | // roi 221 | Mat operator()( const Rect& roi ) const 222 | { 223 | if (empty()) 224 | return Mat(); 225 | 226 | Mat m(roi.height, roi.width, c); 227 | 228 | int sy = roi.y; 229 | for (int y = 0; y < roi.height; y++) 230 | { 231 | const unsigned char* sptr = ptr(sy) + roi.x * c; 232 | unsigned char* dptr = m.ptr(y); 233 | memcpy(dptr, sptr, roi.width * c); 234 | sy++; 235 | } 236 | 237 | return m; 238 | } 239 | 240 | unsigned char* data; 241 | 242 | // pointer to the reference counter; 243 | // when points to user-allocated data, the pointer is NULL 244 | int* refcount; 245 | 246 | int rows; 247 | int cols; 248 | 249 | int c; 250 | 251 | }; 252 | 253 | #define CV_LOAD_IMAGE_GRAYSCALE 1 254 | #define CV_LOAD_IMAGE_COLOR 3 255 | Mat imread(const std::string& path, int flags); 256 | void imwrite(const std::string& path, const Mat& m); 257 | 258 | #if NCNN_PIXEL 259 | void resize(const Mat& src, Mat& dst, const Size& size, float sw = 0.f, float sh = 0.f, int flags = 0); 260 | #endif // NCNN_PIXEL 261 | 262 | } // namespace cv 263 | 264 | #endif // NCNN_OPENCV 265 | 266 | #endif // NCNN_OPENCV_H 267 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/paramdict.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_PARAMDICT_H 16 | #define NCNN_PARAMDICT_H 17 | 18 | #include 19 | #include "mat.h" 20 | #include "platform.h" 21 | 22 | // at most 20 parameters 23 | #define NCNN_MAX_PARAM_COUNT 20 24 | 25 | namespace ncnn { 26 | 27 | class Net; 28 | class ParamDict 29 | { 30 | public: 31 | // empty 32 | ParamDict(); 33 | 34 | // get int 35 | int get(int id, int def) const; 36 | // get float 37 | float get(int id, float def) const; 38 | // get array 39 | Mat get(int id, const Mat& def) const; 40 | 41 | // set int 42 | void set(int id, int i); 43 | // set float 44 | void set(int id, float f); 45 | // set array 46 | void set(int id, const Mat& v); 47 | 48 | public: 49 | int use_winograd_convolution; 50 | int use_sgemm_convolution; 51 | int use_int8_inference; 52 | 53 | protected: 54 | friend class Net; 55 | 56 | void clear(); 57 | 58 | #if NCNN_STDIO 59 | #if NCNN_STRING 60 | int load_param(FILE* fp); 61 | int load_param_mem(const char*& mem); 62 | #endif // NCNN_STRING 63 | int load_param_bin(FILE* fp); 64 | #endif // NCNN_STDIO 65 | int load_param(const unsigned char*& mem); 66 | 67 | protected: 68 | struct 69 | { 70 | int loaded; 71 | union { int i; float f; }; 72 | Mat v; 73 | } params[NCNN_MAX_PARAM_COUNT]; 74 | }; 75 | 76 | } // namespace ncnn 77 | 78 | #endif // NCNN_PARAMDICT_H 79 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/cpp/include/platform.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NCNN_PLATFORM_H 16 | #define NCNN_PLATFORM_H 17 | 18 | #define NCNN_STDIO 1 19 | #define NCNN_STRING 1 20 | #define NCNN_OPENCV 0 21 | #define NCNN_BENCHMARK 0 22 | #define NCNN_PIXEL 1 23 | #define NCNN_PIXEL_ROTATE 0 24 | 25 | #endif // NCNN_PLATFORM_H 26 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/MainActivity.java: -------------------------------------------------------------------------------- 1 | package com.example.che.mobilenetssd_demo; 2 | 3 | import android.Manifest; 4 | import android.app.Activity; 5 | import android.content.Intent; 6 | import android.content.pm.PackageManager; 7 | import android.content.res.AssetManager; 8 | import android.graphics.Bitmap; 9 | import android.graphics.Canvas; 10 | import android.graphics.Color; 11 | import android.graphics.Paint; 12 | import android.net.Uri; 13 | import android.support.annotation.NonNull; 14 | import android.support.annotation.Nullable; 15 | import android.support.v4.app.ActivityCompat; 16 | import android.support.v4.content.ContextCompat; 17 | import android.support.v7.app.AppCompatActivity; 18 | import android.os.Bundle; 19 | import android.text.method.ScrollingMovementMethod; 20 | import android.util.Log; 21 | import android.view.View; 22 | import android.widget.Button; 23 | import android.widget.ImageView; 24 | import android.widget.TextView; 25 | import android.widget.Toast; 26 | 27 | import java.io.BufferedReader; 28 | import java.io.IOException; 29 | import java.io.InputStream; 30 | import java.io.InputStreamReader; 31 | import java.util.ArrayList; 32 | import java.util.Arrays; 33 | import java.util.List; 34 | 35 | 36 | import com.bumptech.glide.Glide; 37 | import com.bumptech.glide.load.engine.DiskCacheStrategy; 38 | import com.bumptech.glide.request.RequestOptions; 39 | 40 | 41 | public class MainActivity extends AppCompatActivity { 42 | 43 | private static final String TAG = MainActivity.class.getName(); 44 | private static final int USE_PHOTO = 1001; 45 | private String camera_image_path; 46 | private ImageView show_image; 47 | private TextView result_text; 48 | private boolean load_result = false; 49 | private int[] ddims = {1, 3, 300, 300}; //这里的维度的值要和train model的input 一一对应 50 | private int model_index = 1; 51 | private List resultLabel = new ArrayList<>(); 52 | private MobileNetssd mobileNetssd = new MobileNetssd(); //java接口实例化 下面直接利用java函数调用NDK c++函数 53 | 54 | @Override 55 | protected void onCreate(Bundle savedInstanceState) { 56 | super.onCreate(savedInstanceState); 57 | setContentView(R.layout.activity_main); 58 | try 59 | { 60 | initMobileNetSSD(); 61 | } catch (IOException e) { 62 | Log.e("MainActivity", "initMobileNetSSD error"); 63 | } 64 | init_view(); 65 | readCacheLabelFromLocalFile(); 66 | } 67 | 68 | /** 69 | * 70 | * MobileNetssd初始化,也就是把model文件进行加载 71 | */ 72 | private void initMobileNetSSD() throws IOException { 73 | byte[] param = null; 74 | byte[] bin = null; 75 | { 76 | //用io流读取二进制文件,最后存入到byte[]数组中 77 | InputStream assetsInputStream = getAssets().open("MobileNetSSD_deploy.param.bin");// param: 网络结构文件 78 | int available = assetsInputStream.available(); 79 | param = new byte[available]; 80 | int byteCode = assetsInputStream.read(param); 81 | assetsInputStream.close(); 82 | } 83 | { 84 | //用io流读取二进制文件,最后存入到byte上,转换为int型 85 | InputStream assetsInputStream = getAssets().open("MobileNetSSD_deploy.bin");//bin: model文件 86 | int available = assetsInputStream.available(); 87 | bin = new byte[available]; 88 | int byteCode = assetsInputStream.read(bin); 89 | assetsInputStream.close(); 90 | } 91 | 92 | load_result = mobileNetssd.Init(param, bin);// 再将文件传入java的NDK接口(c++ 代码中的init接口 ) 93 | Log.d("load model", "MobileNetSSD_load_model_result:" + load_result); 94 | } 95 | 96 | 97 | // initialize view 98 | private void init_view() { 99 | request_permissions(); 100 | show_image = (ImageView) findViewById(R.id.show_image); 101 | result_text = (TextView) findViewById(R.id.result_text); 102 | result_text.setMovementMethod(ScrollingMovementMethod.getInstance()); 103 | Button use_photo = (Button) findViewById(R.id.use_photo); 104 | // use photo click 105 | use_photo.setOnClickListener(new View.OnClickListener() { 106 | @Override 107 | public void onClick(View view) { 108 | if (!load_result) { 109 | Toast.makeText(MainActivity.this, "never load model", Toast.LENGTH_SHORT).show(); 110 | return; 111 | } 112 | PhotoUtil.use_photo(MainActivity.this, USE_PHOTO); 113 | } 114 | }); 115 | } 116 | 117 | // load label's name 118 | private void readCacheLabelFromLocalFile() { 119 | try { 120 | AssetManager assetManager = getApplicationContext().getAssets(); 121 | BufferedReader reader = new BufferedReader(new InputStreamReader(assetManager.open("words.txt")));//这里是label的文件 122 | String readLine = null; 123 | while ((readLine = reader.readLine()) != null) { 124 | resultLabel.add(readLine); 125 | } 126 | reader.close(); 127 | } catch (Exception e) { 128 | Log.e("labelCache", "error " + e); 129 | } 130 | } 131 | 132 | 133 | protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) { 134 | String image_path; 135 | RequestOptions options = new RequestOptions().skipMemoryCache(true).diskCacheStrategy(DiskCacheStrategy.NONE); 136 | if (resultCode == Activity.RESULT_OK) { 137 | switch (requestCode) { 138 | case USE_PHOTO: 139 | if (data == null) { 140 | Log.w(TAG, "user photo data is null"); 141 | return; 142 | } 143 | Uri image_uri = data.getData(); 144 | 145 | //Glide.with(MainActivity.this).load(image_uri).apply(options).into(show_image); 146 | 147 | // get image path from uri 148 | image_path = PhotoUtil.get_path_from_URI(MainActivity.this, image_uri); 149 | // predict image 150 | predict_image(image_path); 151 | break; 152 | } 153 | } 154 | } 155 | 156 | // predict image 157 | private void predict_image(String image_path) { 158 | // picture to float array 159 | Bitmap bmp = PhotoUtil.getScaleBitmap(image_path); 160 | Bitmap rgba = bmp.copy(Bitmap.Config.ARGB_8888, true); 161 | // resize 162 | Bitmap input_bmp = Bitmap.createScaledBitmap(rgba, ddims[2], ddims[3], false); 163 | try { 164 | // Data format conversion takes too long 165 | // Log.d("inputData", Arrays.toString(inputData)); 166 | long start = System.currentTimeMillis(); 167 | // get predict result 168 | float[] result = mobileNetssd.Detect(input_bmp); 169 | // time end 170 | long end = System.currentTimeMillis(); 171 | Log.d(TAG, "origin predict result:" + Arrays.toString(result)); 172 | long time = end - start; 173 | Log.d("result length", "length of result: " + String.valueOf(result.length)); 174 | // show predict result and time 175 | // float[] r = get_max_result(result); 176 | 177 | String show_text = "result:" + Arrays.toString(result) + "\nname:" + resultLabel.get((int) result[0]) + "\nprobability:" + result[1] + "\ntime:" + time + "ms" ; 178 | result_text.setText(show_text); 179 | 180 | // 画布配置 181 | Canvas canvas = new Canvas(rgba); 182 | //图像上画矩形 183 | Paint paint = new Paint(); 184 | paint.setColor(Color.RED); 185 | paint.setStyle(Paint.Style.STROKE);//不填充 186 | paint.setStrokeWidth(5); //线的宽度 187 | 188 | 189 | float get_finalresult[][] = TwoArry(result); 190 | Log.d("zhuanhuan",get_finalresult+""); 191 | int object_num = 0; 192 | int num = result.length/6;// number of object 193 | //continue to draw rect 194 | for(object_num = 0; object_num < num; object_num++){ 195 | Log.d(TAG, "haha :" + Arrays.toString(get_finalresult)); 196 | // 画框 197 | paint.setColor(Color.RED); 198 | paint.setStyle(Paint.Style.STROKE);//不填充 199 | paint.setStrokeWidth(5); //线的宽度 200 | canvas.drawRect(get_finalresult[object_num][2] * rgba.getWidth(), get_finalresult[object_num][3] * rgba.getHeight(), 201 | get_finalresult[object_num][4] * rgba.getWidth(), get_finalresult[object_num][5] * rgba.getHeight(), paint); 202 | 203 | paint.setColor(Color.YELLOW); 204 | paint.setStyle(Paint.Style.FILL);//不填充 205 | paint.setStrokeWidth(1); //线的宽度 206 | canvas.drawText(resultLabel.get((int) get_finalresult[object_num][0]) + "\n" + get_finalresult[object_num][1], 207 | get_finalresult[object_num][2]*rgba.getWidth(),get_finalresult[object_num][3]*rgba.getHeight(),paint); 208 | } 209 | 210 | show_image.setImageBitmap(rgba); 211 | 212 | 213 | } catch (Exception e) { 214 | e.printStackTrace(); 215 | } 216 | } 217 | 218 | //一维数组转化为二维数组 219 | public static float[][] TwoArry(float[] inputfloat){ 220 | int n = inputfloat.length; 221 | int num = inputfloat.length/6; 222 | float[][] outputfloat = new float[num][6]; 223 | int k = 0; 224 | for(int i = 0; i < num ; i++) 225 | { 226 | int j = 0; 227 | 228 | while(j<6) 229 | { 230 | outputfloat[i][j] = inputfloat[k]; 231 | k++; 232 | j++; 233 | } 234 | 235 | } 236 | 237 | return outputfloat; 238 | } 239 | 240 | /* 241 | // get max probability label 242 | private float[] get_max_result(float[] result) { 243 | int num_rs = result.length / 6; 244 | float maxProp = result[1]; 245 | int maxI = 0; 246 | for(int i = 1; i permissionList = new ArrayList<>(); 262 | if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { 263 | permissionList.add(Manifest.permission.CAMERA); 264 | } 265 | if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { 266 | permissionList.add(Manifest.permission.WRITE_EXTERNAL_STORAGE); 267 | } 268 | if (ContextCompat.checkSelfPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { 269 | permissionList.add(Manifest.permission.READ_EXTERNAL_STORAGE); 270 | } 271 | // if list is not empty will request permissions 272 | if (!permissionList.isEmpty()) { 273 | ActivityCompat.requestPermissions(this, permissionList.toArray(new String[permissionList.size()]), 1); 274 | } 275 | } 276 | 277 | @Override 278 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { 279 | super.onRequestPermissionsResult(requestCode, permissions, grantResults); 280 | switch (requestCode) { 281 | case 1: 282 | if (grantResults.length > 0) { 283 | for (int i = 0; i < grantResults.length; i++) { 284 | int grantResult = grantResults[i]; 285 | if (grantResult == PackageManager.PERMISSION_DENIED) { 286 | String s = permissions[i]; 287 | Toast.makeText(this, s + "permission was denied", Toast.LENGTH_SHORT).show(); 288 | } 289 | } 290 | } 291 | break; 292 | } 293 | } 294 | 295 | 296 | 297 | } 298 | 299 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/MobileNetssd.java: -------------------------------------------------------------------------------- 1 | package com.example.che.mobilenetssd_demo; 2 | 3 | import android.graphics.Bitmap; 4 | 5 | /** 6 | * MobileNetssd的java接口,与本地c++代码相呼应 native为本地 此文件与 MobileNetssd.cpp相呼应 7 | */ 8 | public class MobileNetssd { 9 | 10 | public native boolean Init(byte[] param, byte[] bin); // 初始化函数 11 | public native float[] Detect(Bitmap bitmap); // 检测函数 12 | // Used to load the 'native-lib' library on application startup. 13 | static { 14 | System.loadLibrary("MobileNetssd"); 15 | } 16 | } 17 | 18 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/java/com/example/che/mobilenetssd_demo/PhotoUtil.java: -------------------------------------------------------------------------------- 1 | package com.example.che.mobilenetssd_demo; 2 | 3 | import android.app.Activity; 4 | import android.content.Context; 5 | import android.content.Intent; 6 | import android.database.Cursor; 7 | import android.graphics.Bitmap; 8 | import android.graphics.BitmapFactory; 9 | import android.net.Uri; 10 | import android.provider.MediaStore; 11 | 12 | public class PhotoUtil { 13 | // get picture in photo 14 | public static void use_photo(Activity activity, int requestCode) { 15 | Intent intent = new Intent(Intent.ACTION_PICK); 16 | intent.setType("image/*"); 17 | activity.startActivityForResult(intent, requestCode); 18 | } 19 | 20 | // get photo from Uri 21 | public static String get_path_from_URI(Context context, Uri uri) { 22 | String result; 23 | Cursor cursor = context.getContentResolver().query(uri, null, null, null, null); 24 | if (cursor == null) { 25 | result = uri.getPath(); 26 | } else { 27 | cursor.moveToFirst(); 28 | int idx = cursor.getColumnIndex(MediaStore.Images.ImageColumns.DATA); 29 | result = cursor.getString(idx); 30 | cursor.close(); 31 | } 32 | return result; 33 | } 34 | 35 | // compress picture 36 | public static Bitmap getScaleBitmap(String filePath) { 37 | BitmapFactory.Options opt = new BitmapFactory.Options(); 38 | opt.inJustDecodeBounds = true; 39 | BitmapFactory.decodeFile(filePath, opt); 40 | 41 | int bmpWidth = opt.outWidth; 42 | int bmpHeight = opt.outHeight; 43 | 44 | int maxSize = 500; 45 | 46 | // compress picture with inSampleSize 47 | opt.inSampleSize = 1; 48 | while (true) { 49 | if (bmpWidth / opt.inSampleSize < maxSize || bmpHeight / opt.inSampleSize < maxSize) { 50 | break; 51 | } 52 | opt.inSampleSize *= 2; 53 | } 54 | opt.inJustDecodeBounds = false; 55 | return BitmapFactory.decodeFile(filePath, opt); 56 | } 57 | } 58 | 59 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/jniLibs/armeabi-v7a/libncnn.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chehongshu/ncnnforandroid_objectiondetection_Mobilenetssd/c87964b9ddecf7f9890416f70e9009f5ffc58b3e/MobileNetSSD_demo/app/src/main/jniLibs/armeabi-v7a/libncnn.a -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/res/drawable-v24/ic_launcher_foreground.xml: -------------------------------------------------------------------------------- 1 | 7 | 12 | 13 | 19 | 22 | 25 | 26 | 27 | 28 | 34 | 35 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/res/drawable/ic_launcher_background.xml: -------------------------------------------------------------------------------- 1 | 2 | 7 | 10 | 15 | 20 | 25 | 30 | 35 | 40 | 45 | 50 | 55 | 60 | 65 | 70 | 75 | 80 | 85 | 90 | 95 | 100 | 105 | 110 | 115 | 120 | 125 | 130 | 135 | 140 | 145 | 150 | 155 | 160 | 165 | 170 | 171 | -------------------------------------------------------------------------------- /MobileNetSSD_demo/app/src/main/res/layout/activity_main.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 14 |