├── .gitignore ├── README.md ├── app ├── build.gradle └── src │ └── main │ ├── AndroidManifest.xml │ ├── assets │ ├── yolov6n.bin │ └── yolov6n.param │ ├── java │ └── com │ │ └── tencent │ │ └── ncnnyolov6 │ │ ├── MainActivity.java │ │ └── NcnnYolov6.java │ ├── jni │ ├── CMakeLists.txt │ ├── ndkcamera.cpp │ ├── ndkcamera.h │ ├── yolo.cpp │ ├── yolo.h │ └── yoloncnn.cpp │ └── res │ ├── layout │ └── main.xml │ └── values │ └── strings.xml ├── build.gradle ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── local.properties ├── screenshot.png └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | .gradle/ 2 | .idea/ 3 | app/.cxx/ 4 | build/ 5 | jni/ -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | The yolov6 object detection 3 | 4 | This is a sample ncnn android project, it depends on ncnn library and opencv 5 | 6 | https://github.com/Tencent/ncnn 7 | 8 | https://github.com/nihui/opencv-mobile 9 | 10 | 11 | ## how to build and run 12 | ### step1 13 | https://github.com/Tencent/ncnn/releases 14 | 15 | * Download ncnn-YYYYMMDD-android-vulkan.zip or build ncnn for android yourself 16 | * Extract ncnn-YYYYMMDD-android-vulkan.zip into **app/src/main/jni** and change the **ncnn_DIR** path to yours in **app/src/main/jni/CMakeLists.txt** 17 | 18 | ### step2 19 | https://github.com/nihui/opencv-mobile 20 | 21 | * Download opencv-mobile-XYZ-android.zip 22 | * Extract opencv-mobile-XYZ-android.zip into **app/src/main/jni** and change the **OpenCV_DIR** path to yours in **app/src/main/jni/CMakeLists.txt** 23 | 24 | ### step3 25 | * Open this project with Android Studio, build it and enjoy! 26 | 27 | ## some notes 28 | * Android ndk camera is used for best efficiency 29 | * Crash may happen on very old devices for lacking HAL3 camera interface 30 | * All models are manually modified to accept dynamic input shape 31 | * Most small models run slower on GPU than on CPU, this is common 32 | * FPS may be lower in dark environment because of longer camera exposure time 33 | 34 | ## screenshot 35 | ![](screenshot.png) 36 | 37 | ## reference 38 | https://github.com/meituan/YOLOv6 39 | https://github.com/nihui/ncnn-android-nanodet 40 | https://github.com/Megvii-BaseDetection/YOLOX -------------------------------------------------------------------------------- /app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdkVersion 24 5 | buildToolsVersion "29.0.2" 6 | 7 | defaultConfig { 8 | applicationId "com.tencent.ncnnyolov6" 9 | archivesBaseName = "$applicationId" 10 | 11 | minSdkVersion 24 12 | } 13 | 14 | externalNativeBuild { 15 | cmake { 16 | version "3.10.2" 17 | path file('src/main/jni/CMakeLists.txt') 18 | } 19 | } 20 | 21 | dependencies { 22 | implementation 'com.android.support:support-v4:24.0.0' 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /app/src/main/assets/yolov6n.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FeiGeChuanShu/ncnn-android-yolov6/cdb14fceed18910177f8cc6e43647c8be3c9d3ce/app/src/main/assets/yolov6n.bin -------------------------------------------------------------------------------- /app/src/main/assets/yolov6n.param: -------------------------------------------------------------------------------- 1 | 7767517 2 | 102 117 3 | Input image_arrays 0 1 images 4 | Convolution Conv_0 1 1 images 148 0=16 1=3 3=2 4=1 5=1 6=432 9=1 5 | Convolution Conv_2 1 1 148 150 0=32 1=3 3=2 4=1 5=1 6=4608 9=1 6 | Convolution Conv_4 1 1 150 152 0=32 1=3 4=1 5=1 6=9216 9=1 7 | Convolution Conv_6 1 1 152 154 0=32 1=3 4=1 5=1 6=9216 9=1 8 | Convolution Conv_8 1 1 154 156 0=64 1=3 3=2 4=1 5=1 6=18432 9=1 9 | Convolution Conv_10 1 1 156 158 0=64 1=3 4=1 5=1 6=36864 9=1 10 | Convolution Conv_12 1 1 158 160 0=64 1=3 4=1 5=1 6=36864 9=1 11 | Convolution Conv_14 1 1 160 162 0=64 1=3 4=1 5=1 6=36864 9=1 12 | Convolution Conv_16 1 1 162 164 0=64 1=3 4=1 5=1 6=36864 9=1 13 | Split splitncnn_0 1 2 164 164_splitncnn_0 164_splitncnn_1 14 | Convolution Conv_18 1 1 164_splitncnn_1 166 0=128 1=3 3=2 4=1 5=1 6=73728 9=1 15 | Convolution Conv_20 1 1 166 168 0=128 1=3 4=1 5=1 6=147456 9=1 16 | Convolution Conv_22 1 1 168 170 0=128 1=3 4=1 5=1 6=147456 9=1 17 | Convolution Conv_24 1 1 170 172 0=128 1=3 4=1 5=1 6=147456 9=1 18 | Convolution Conv_26 1 1 172 174 0=128 1=3 4=1 5=1 6=147456 9=1 19 | Convolution Conv_28 1 1 174 176 0=128 1=3 4=1 5=1 6=147456 9=1 20 | Convolution Conv_30 1 1 176 178 0=128 1=3 4=1 5=1 6=147456 9=1 21 | Split splitncnn_1 1 2 178 178_splitncnn_0 178_splitncnn_1 22 | Convolution Conv_32 1 1 178_splitncnn_1 180 0=256 1=3 3=2 4=1 5=1 6=294912 9=1 23 | Convolution Conv_34 1 1 180 182 0=256 1=3 4=1 5=1 6=589824 9=1 24 | Convolution Conv_36 1 1 182 184 0=256 1=3 4=1 5=1 6=589824 9=1 25 | Convolution Conv_38 1 1 184 187 0=128 1=1 5=1 6=32768 9=1 26 | Split splitncnn_2 1 2 187 187_splitncnn_0 187_splitncnn_1 27 | Pooling MaxPool_40 1 1 187_splitncnn_1 188 1=5 3=2 5=1 28 | Split splitncnn_3 1 2 188 188_splitncnn_0 188_splitncnn_1 29 | Pooling MaxPool_41 1 1 188_splitncnn_1 189 1=5 3=2 5=1 30 | Split splitncnn_4 1 2 189 189_splitncnn_0 189_splitncnn_1 31 | Pooling MaxPool_42 1 1 189_splitncnn_1 190 1=5 3=2 5=1 32 | Concat Concat_43 4 1 187_splitncnn_0 188_splitncnn_0 189_splitncnn_0 190 191 33 | Convolution Conv_44 1 1 191 194 0=256 1=1 5=1 6=131072 9=1 34 | Convolution Conv_46 1 1 194 197 0=64 1=1 5=1 6=16384 9=1 35 | Split splitncnn_5 1 2 197 197_splitncnn_0 197_splitncnn_1 36 | Deconvolution ConvTranspose_48 1 1 197_splitncnn_1 198 0=64 1=2 3=2 5=1 6=16384 37 | Concat Concat_49 2 1 198 178_splitncnn_0 199 38 | Convolution Conv_50 1 1 199 201 0=64 1=3 4=1 5=1 6=110592 9=1 39 | Convolution Conv_52 1 1 201 203 0=64 1=3 4=1 5=1 6=36864 9=1 40 | Convolution Conv_54 1 1 203 205 0=64 1=3 4=1 5=1 6=36864 9=1 41 | Convolution Conv_56 1 1 205 207 0=64 1=3 4=1 5=1 6=36864 9=1 42 | Convolution Conv_58 1 1 207 210 0=32 1=1 5=1 6=2048 9=1 43 | Split splitncnn_6 1 2 210 210_splitncnn_0 210_splitncnn_1 44 | Deconvolution ConvTranspose_60 1 1 210_splitncnn_1 211 0=32 1=2 3=2 5=1 6=4096 45 | Concat Concat_61 2 1 211 164_splitncnn_0 212 46 | Convolution Conv_62 1 1 212 214 0=32 1=3 4=1 5=1 6=27648 9=1 47 | Convolution Conv_64 1 1 214 216 0=32 1=3 4=1 5=1 6=9216 9=1 48 | Convolution Conv_66 1 1 216 218 0=32 1=3 4=1 5=1 6=9216 9=1 49 | Convolution Conv_68 1 1 218 220 0=32 1=3 4=1 5=1 6=9216 9=1 50 | Split splitncnn_7 1 2 220 220_splitncnn_0 220_splitncnn_1 51 | Convolution Conv_70 1 1 220_splitncnn_1 223 0=32 1=3 3=2 4=1 5=1 6=9216 9=1 52 | Concat Concat_72 2 1 223 210_splitncnn_0 224 53 | Convolution Conv_73 1 1 224 226 0=64 1=3 4=1 5=1 6=36864 9=1 54 | Convolution Conv_75 1 1 226 228 0=64 1=3 4=1 5=1 6=36864 9=1 55 | Convolution Conv_77 1 1 228 230 0=64 1=3 4=1 5=1 6=36864 9=1 56 | Convolution Conv_79 1 1 230 232 0=64 1=3 4=1 5=1 6=36864 9=1 57 | Split splitncnn_8 1 2 232 232_splitncnn_0 232_splitncnn_1 58 | Convolution Conv_81 1 1 232_splitncnn_1 235 0=64 1=3 3=2 4=1 5=1 6=36864 9=1 59 | Concat Concat_83 2 1 235 197_splitncnn_0 236 60 | Convolution Conv_84 1 1 236 238 0=128 1=3 4=1 5=1 6=147456 9=1 61 | Convolution Conv_86 1 1 238 240 0=128 1=3 4=1 5=1 6=147456 9=1 62 | Convolution Conv_88 1 1 240 242 0=128 1=3 4=1 5=1 6=147456 9=1 63 | Convolution Conv_90 1 1 242 244 0=128 1=3 4=1 5=1 6=147456 9=1 64 | Convolution Conv_92 1 1 220_splitncnn_0 245 0=32 1=1 5=1 6=1024 65 | Swish Mul_94 1 1 245 247 66 | Split splitncnn_9 1 2 247 247_splitncnn_0 247_splitncnn_1 67 | Convolution Conv_95 1 1 247_splitncnn_1 248 0=32 1=3 4=1 5=1 6=9216 68 | Swish Mul_97 1 1 248 250 69 | Convolution Conv_98 1 1 250 251 0=80 1=1 5=1 6=2560 9=4 70 | Convolution Conv_99 1 1 247_splitncnn_0 252 0=32 1=3 4=1 5=1 6=9216 71 | Swish Mul_101 1 1 252 254 72 | Split splitncnn_10 1 2 254 254_splitncnn_0 254_splitncnn_1 73 | Convolution Conv_102 1 1 254_splitncnn_1 255 0=4 1=1 5=1 6=128 74 | Convolution Conv_103 1 1 254_splitncnn_0 256 0=1 1=1 5=1 6=32 9=4 75 | Concat Concat_104 3 1 255 256 251 257 76 | Convolution Conv_120 1 1 232_splitncnn_0 277 0=64 1=1 5=1 6=4096 77 | Swish Mul_122 1 1 277 279 78 | Split splitncnn_11 1 2 279 279_splitncnn_0 279_splitncnn_1 79 | Convolution Conv_123 1 1 279_splitncnn_1 280 0=64 1=3 4=1 5=1 6=36864 80 | Swish Mul_125 1 1 280 282 81 | Convolution Conv_126 1 1 282 283 0=80 1=1 5=1 6=5120 9=4 82 | Convolution Conv_127 1 1 279_splitncnn_0 284 0=64 1=3 4=1 5=1 6=36864 83 | Swish Mul_129 1 1 284 286 84 | Split splitncnn_12 1 2 286 286_splitncnn_0 286_splitncnn_1 85 | Convolution Conv_130 1 1 286_splitncnn_1 287 0=4 1=1 5=1 6=256 86 | Convolution Conv_131 1 1 286_splitncnn_0 288 0=1 1=1 5=1 6=64 9=4 87 | Concat Concat_132 3 1 287 288 283 289 88 | Convolution Conv_148 1 1 244 309 0=128 1=1 5=1 6=16384 89 | Swish Mul_150 1 1 309 311 90 | Split splitncnn_13 1 2 311 311_splitncnn_0 311_splitncnn_1 91 | Convolution Conv_151 1 1 311_splitncnn_1 312 0=128 1=3 4=1 5=1 6=147456 92 | Swish Mul_153 1 1 312 314 93 | Convolution Conv_154 1 1 314 315 0=80 1=1 5=1 6=10240 9=4 94 | Convolution Conv_155 1 1 311_splitncnn_0 316 0=128 1=3 4=1 5=1 6=147456 95 | Swish Mul_157 1 1 316 318 96 | Split splitncnn_14 1 2 318 318_splitncnn_0 318_splitncnn_1 97 | Convolution Conv_158 1 1 318_splitncnn_1 319 0=4 1=1 5=1 6=512 98 | Convolution Conv_159 1 1 318_splitncnn_0 320 0=1 1=1 5=1 6=128 9=4 99 | Concat Concat_160 3 1 319 320 315 321 100 | Reshape Reshape_405 1 1 257 2000 0=-1 1=85 101 | Reshape Reshape_413 1 1 289 2001 0=-1 1=85 102 | Reshape Reshape_421 1 1 321 2002 0=-1 1=85 103 | Concat Concat_422 3 1 2000 2001 2002 2003 0=1 104 | Permute Transpose_423 1 1 2003 output 0=1 105 | -------------------------------------------------------------------------------- /app/src/main/java/com/tencent/ncnnyolov6/MainActivity.java: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | package com.tencent.ncnnyolov6; 16 | 17 | import android.Manifest; 18 | import android.app.Activity; 19 | import android.content.pm.PackageManager; 20 | import android.graphics.PixelFormat; 21 | import android.os.Bundle; 22 | import android.util.Log; 23 | import android.view.Surface; 24 | import android.view.SurfaceHolder; 25 | import android.view.SurfaceView; 26 | import android.view.View; 27 | import android.view.WindowManager; 28 | import android.widget.AdapterView; 29 | import android.widget.Button; 30 | import android.widget.Spinner; 31 | 32 | import android.support.v4.app.ActivityCompat; 33 | import android.support.v4.content.ContextCompat; 34 | 35 | public class MainActivity extends Activity implements SurfaceHolder.Callback 36 | { 37 | public static final int REQUEST_CAMERA = 100; 38 | 39 | private NcnnYolov6 ncnnyolov6 = new NcnnYolov6(); 40 | private int facing = 0; 41 | 42 | private Spinner spinnerModel; 43 | private Spinner spinnerCPUGPU; 44 | private int current_model = 0; 45 | private int current_cpugpu = 0; 46 | 47 | private SurfaceView cameraView; 48 | 49 | /** Called when the activity is first created. */ 50 | @Override 51 | public void onCreate(Bundle savedInstanceState) 52 | { 53 | super.onCreate(savedInstanceState); 54 | setContentView(R.layout.main); 55 | 56 | getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); 57 | 58 | cameraView = (SurfaceView) findViewById(R.id.cameraview); 59 | 60 | cameraView.getHolder().setFormat(PixelFormat.RGBA_8888); 61 | cameraView.getHolder().addCallback(this); 62 | 63 | Button buttonSwitchCamera = (Button) findViewById(R.id.buttonSwitchCamera); 64 | buttonSwitchCamera.setOnClickListener(new View.OnClickListener() { 65 | @Override 66 | public void onClick(View arg0) { 67 | 68 | int new_facing = 1 - facing; 69 | 70 | ncnnyolov6.closeCamera(); 71 | 72 | ncnnyolov6.openCamera(new_facing); 73 | 74 | facing = new_facing; 75 | } 76 | }); 77 | 78 | spinnerModel = (Spinner) findViewById(R.id.spinnerModel); 79 | spinnerModel.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { 80 | @Override 81 | public void onItemSelected(AdapterView arg0, View arg1, int position, long id) 82 | { 83 | if (position != current_model) 84 | { 85 | current_model = position; 86 | reload(); 87 | } 88 | } 89 | 90 | @Override 91 | public void onNothingSelected(AdapterView arg0) 92 | { 93 | } 94 | }); 95 | 96 | spinnerCPUGPU = (Spinner) findViewById(R.id.spinnerCPUGPU); 97 | spinnerCPUGPU.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { 98 | @Override 99 | public void onItemSelected(AdapterView arg0, View arg1, int position, long id) 100 | { 101 | if (position != current_cpugpu) 102 | { 103 | current_cpugpu = position; 104 | reload(); 105 | } 106 | } 107 | 108 | @Override 109 | public void onNothingSelected(AdapterView arg0) 110 | { 111 | } 112 | }); 113 | 114 | reload(); 115 | } 116 | 117 | private void reload() 118 | { 119 | boolean ret_init = ncnnyolov6.loadModel(getAssets(), current_model, current_cpugpu); 120 | if (!ret_init) 121 | { 122 | Log.e("MainActivity", "ncnnyolov6 loadModel failed"); 123 | } 124 | } 125 | 126 | @Override 127 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) 128 | { 129 | ncnnyolov6.setOutputWindow(holder.getSurface()); 130 | } 131 | 132 | @Override 133 | public void surfaceCreated(SurfaceHolder holder) 134 | { 135 | } 136 | 137 | @Override 138 | public void surfaceDestroyed(SurfaceHolder holder) 139 | { 140 | } 141 | 142 | @Override 143 | public void onResume() 144 | { 145 | super.onResume(); 146 | 147 | if (ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA) == PackageManager.PERMISSION_DENIED) 148 | { 149 | ActivityCompat.requestPermissions(this, new String[] {Manifest.permission.CAMERA}, REQUEST_CAMERA); 150 | } 151 | 152 | ncnnyolov6.openCamera(facing); 153 | } 154 | 155 | @Override 156 | public void onPause() 157 | { 158 | super.onPause(); 159 | 160 | ncnnyolov6.closeCamera(); 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /app/src/main/java/com/tencent/ncnnyolov6/NcnnYolov6.java: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | package com.tencent.ncnnyolov6; 16 | 17 | import android.content.res.AssetManager; 18 | import android.view.Surface; 19 | 20 | public class NcnnYolov6 21 | { 22 | public native boolean loadModel(AssetManager mgr, int modelid, int cpugpu); 23 | public native boolean openCamera(int facing); 24 | public native boolean closeCamera(); 25 | public native boolean setOutputWindow(Surface surface); 26 | 27 | static { 28 | System.loadLibrary("ncnnyolov6"); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /app/src/main/jni/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | project(ncnnyolov6) 2 | 3 | cmake_minimum_required(VERSION 3.10) 4 | 5 | set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/opencv-mobile-4.5.3-android/sdk/native/jni) 6 | find_package(OpenCV REQUIRED core imgproc) 7 | 8 | set(ncnn_DIR ${CMAKE_SOURCE_DIR}/ncnn-20220216-android-vulkan/${ANDROID_ABI}/lib/cmake/ncnn) 9 | find_package(ncnn REQUIRED) 10 | 11 | add_library(ncnnyolov6 SHARED yoloncnn.cpp yolo.cpp ndkcamera.cpp) 12 | 13 | target_link_libraries(ncnnyolov6 ncnn ${OpenCV_LIBS} camera2ndk mediandk) 14 | -------------------------------------------------------------------------------- /app/src/main/jni/ndkcamera.cpp: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #include "ndkcamera.h" 16 | 17 | #include 18 | 19 | #include 20 | 21 | #include 22 | 23 | #include "mat.h" 24 | 25 | static void onDisconnected(void* context, ACameraDevice* device) 26 | { 27 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onDisconnected %p", device); 28 | } 29 | 30 | static void onError(void* context, ACameraDevice* device, int error) 31 | { 32 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onError %p %d", device, error); 33 | } 34 | 35 | static void onImageAvailable(void* context, AImageReader* reader) 36 | { 37 | // __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onImageAvailable %p", reader); 38 | 39 | AImage* image = 0; 40 | media_status_t status = AImageReader_acquireLatestImage(reader, &image); 41 | 42 | if (status != AMEDIA_OK) 43 | { 44 | // error 45 | return; 46 | } 47 | 48 | int32_t format; 49 | AImage_getFormat(image, &format); 50 | 51 | // assert format == AIMAGE_FORMAT_YUV_420_888 52 | 53 | int32_t width = 0; 54 | int32_t height = 0; 55 | AImage_getWidth(image, &width); 56 | AImage_getHeight(image, &height); 57 | 58 | int32_t y_pixelStride = 0; 59 | int32_t u_pixelStride = 0; 60 | int32_t v_pixelStride = 0; 61 | AImage_getPlanePixelStride(image, 0, &y_pixelStride); 62 | AImage_getPlanePixelStride(image, 1, &u_pixelStride); 63 | AImage_getPlanePixelStride(image, 2, &v_pixelStride); 64 | 65 | int32_t y_rowStride = 0; 66 | int32_t u_rowStride = 0; 67 | int32_t v_rowStride = 0; 68 | AImage_getPlaneRowStride(image, 0, &y_rowStride); 69 | AImage_getPlaneRowStride(image, 1, &u_rowStride); 70 | AImage_getPlaneRowStride(image, 2, &v_rowStride); 71 | 72 | uint8_t* y_data = 0; 73 | uint8_t* u_data = 0; 74 | uint8_t* v_data = 0; 75 | int y_len = 0; 76 | int u_len = 0; 77 | int v_len = 0; 78 | AImage_getPlaneData(image, 0, &y_data, &y_len); 79 | AImage_getPlaneData(image, 1, &u_data, &u_len); 80 | AImage_getPlaneData(image, 2, &v_data, &v_len); 81 | 82 | if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) 83 | { 84 | // already nv21 :) 85 | ((NdkCamera*)context)->on_image((unsigned char*)y_data, (int)width, (int)height); 86 | } 87 | else 88 | { 89 | // construct nv21 90 | unsigned char* nv21 = new unsigned char[width * height + width * height / 2]; 91 | { 92 | // Y 93 | unsigned char* yptr = nv21; 94 | for (int y=0; yon_image((unsigned char*)nv21, (int)width, (int)height); 123 | 124 | delete[] nv21; 125 | } 126 | 127 | AImage_delete(image); 128 | } 129 | 130 | static void onSessionActive(void* context, ACameraCaptureSession *session) 131 | { 132 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onSessionActive %p", session); 133 | } 134 | 135 | static void onSessionReady(void* context, ACameraCaptureSession *session) 136 | { 137 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onSessionReady %p", session); 138 | } 139 | 140 | static void onSessionClosed(void* context, ACameraCaptureSession *session) 141 | { 142 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onSessionClosed %p", session); 143 | } 144 | 145 | void onCaptureFailed(void* context, ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure) 146 | { 147 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureFailed %p %p %p", session, request, failure); 148 | } 149 | 150 | void onCaptureSequenceCompleted(void* context, ACameraCaptureSession* session, int sequenceId, int64_t frameNumber) 151 | { 152 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureSequenceCompleted %p %d %ld", session, sequenceId, frameNumber); 153 | } 154 | 155 | void onCaptureSequenceAborted(void* context, ACameraCaptureSession* session, int sequenceId) 156 | { 157 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureSequenceAborted %p %d", session, sequenceId); 158 | } 159 | 160 | void onCaptureCompleted(void* context, ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result) 161 | { 162 | // __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "onCaptureCompleted %p %p %p", session, request, result); 163 | } 164 | 165 | NdkCamera::NdkCamera() 166 | { 167 | camera_facing = 0; 168 | camera_orientation = 0; 169 | 170 | camera_manager = 0; 171 | camera_device = 0; 172 | image_reader = 0; 173 | image_reader_surface = 0; 174 | image_reader_target = 0; 175 | capture_request = 0; 176 | capture_session_output_container = 0; 177 | capture_session_output = 0; 178 | capture_session = 0; 179 | 180 | 181 | // setup imagereader and its surface 182 | { 183 | AImageReader_new(640, 480, AIMAGE_FORMAT_YUV_420_888, /*maxImages*/2, &image_reader); 184 | 185 | AImageReader_ImageListener listener; 186 | listener.context = this; 187 | listener.onImageAvailable = onImageAvailable; 188 | 189 | AImageReader_setImageListener(image_reader, &listener); 190 | 191 | AImageReader_getWindow(image_reader, &image_reader_surface); 192 | 193 | ANativeWindow_acquire(image_reader_surface); 194 | } 195 | } 196 | 197 | NdkCamera::~NdkCamera() 198 | { 199 | close(); 200 | 201 | if (image_reader) 202 | { 203 | AImageReader_delete(image_reader); 204 | image_reader = 0; 205 | } 206 | 207 | if (image_reader_surface) 208 | { 209 | ANativeWindow_release(image_reader_surface); 210 | image_reader_surface = 0; 211 | } 212 | } 213 | 214 | int NdkCamera::open(int _camera_facing) 215 | { 216 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "open"); 217 | 218 | camera_facing = _camera_facing; 219 | 220 | camera_manager = ACameraManager_create(); 221 | 222 | // find front camera 223 | std::string camera_id; 224 | { 225 | ACameraIdList* camera_id_list = 0; 226 | ACameraManager_getCameraIdList(camera_manager, &camera_id_list); 227 | 228 | for (int i = 0; i < camera_id_list->numCameras; ++i) 229 | { 230 | const char* id = camera_id_list->cameraIds[i]; 231 | ACameraMetadata* camera_metadata = 0; 232 | ACameraManager_getCameraCharacteristics(camera_manager, id, &camera_metadata); 233 | 234 | // query faceing 235 | acamera_metadata_enum_android_lens_facing_t facing = ACAMERA_LENS_FACING_FRONT; 236 | { 237 | ACameraMetadata_const_entry e = { 0 }; 238 | ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_LENS_FACING, &e); 239 | facing = (acamera_metadata_enum_android_lens_facing_t)e.data.u8[0]; 240 | } 241 | 242 | if (camera_facing == 0 && facing != ACAMERA_LENS_FACING_FRONT) 243 | { 244 | ACameraMetadata_free(camera_metadata); 245 | continue; 246 | } 247 | 248 | if (camera_facing == 1 && facing != ACAMERA_LENS_FACING_BACK) 249 | { 250 | ACameraMetadata_free(camera_metadata); 251 | continue; 252 | } 253 | 254 | camera_id = id; 255 | 256 | // query orientation 257 | int orientation = 0; 258 | { 259 | ACameraMetadata_const_entry e = { 0 }; 260 | ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_ORIENTATION, &e); 261 | 262 | orientation = (int)e.data.i32[0]; 263 | } 264 | 265 | camera_orientation = orientation; 266 | 267 | ACameraMetadata_free(camera_metadata); 268 | 269 | break; 270 | } 271 | 272 | ACameraManager_deleteCameraIdList(camera_id_list); 273 | } 274 | 275 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "open %s %d", camera_id.c_str(), camera_orientation); 276 | 277 | // open camera 278 | { 279 | ACameraDevice_StateCallbacks camera_device_state_callbacks; 280 | camera_device_state_callbacks.context = this; 281 | camera_device_state_callbacks.onDisconnected = onDisconnected; 282 | camera_device_state_callbacks.onError = onError; 283 | 284 | ACameraManager_openCamera(camera_manager, camera_id.c_str(), &camera_device_state_callbacks, &camera_device); 285 | } 286 | 287 | // capture request 288 | { 289 | ACameraDevice_createCaptureRequest(camera_device, TEMPLATE_PREVIEW, &capture_request); 290 | 291 | ACameraOutputTarget_create(image_reader_surface, &image_reader_target); 292 | ACaptureRequest_addTarget(capture_request, image_reader_target); 293 | } 294 | 295 | // capture session 296 | { 297 | ACameraCaptureSession_stateCallbacks camera_capture_session_state_callbacks; 298 | camera_capture_session_state_callbacks.context = this; 299 | camera_capture_session_state_callbacks.onActive = onSessionActive; 300 | camera_capture_session_state_callbacks.onReady = onSessionReady; 301 | camera_capture_session_state_callbacks.onClosed = onSessionClosed; 302 | 303 | ACaptureSessionOutputContainer_create(&capture_session_output_container); 304 | 305 | ACaptureSessionOutput_create(image_reader_surface, &capture_session_output); 306 | 307 | ACaptureSessionOutputContainer_add(capture_session_output_container, capture_session_output); 308 | 309 | ACameraDevice_createCaptureSession(camera_device, capture_session_output_container, &camera_capture_session_state_callbacks, &capture_session); 310 | 311 | ACameraCaptureSession_captureCallbacks camera_capture_session_capture_callbacks; 312 | camera_capture_session_capture_callbacks.context = this; 313 | camera_capture_session_capture_callbacks.onCaptureStarted = 0; 314 | camera_capture_session_capture_callbacks.onCaptureProgressed = 0; 315 | camera_capture_session_capture_callbacks.onCaptureCompleted = onCaptureCompleted; 316 | camera_capture_session_capture_callbacks.onCaptureFailed = onCaptureFailed; 317 | camera_capture_session_capture_callbacks.onCaptureSequenceCompleted = onCaptureSequenceCompleted; 318 | camera_capture_session_capture_callbacks.onCaptureSequenceAborted = onCaptureSequenceAborted; 319 | camera_capture_session_capture_callbacks.onCaptureBufferLost = 0; 320 | 321 | ACameraCaptureSession_setRepeatingRequest(capture_session, &camera_capture_session_capture_callbacks, 1, &capture_request, nullptr); 322 | } 323 | 324 | return 0; 325 | } 326 | 327 | void NdkCamera::close() 328 | { 329 | __android_log_print(ANDROID_LOG_WARN, "NdkCamera", "close"); 330 | 331 | if (capture_session) 332 | { 333 | ACameraCaptureSession_stopRepeating(capture_session); 334 | ACameraCaptureSession_close(capture_session); 335 | capture_session = 0; 336 | } 337 | 338 | if (camera_device) 339 | { 340 | ACameraDevice_close(camera_device); 341 | camera_device = 0; 342 | } 343 | 344 | if (capture_session_output_container) 345 | { 346 | ACaptureSessionOutputContainer_free(capture_session_output_container); 347 | capture_session_output_container = 0; 348 | } 349 | 350 | if (capture_session_output) 351 | { 352 | ACaptureSessionOutput_free(capture_session_output); 353 | capture_session_output = 0; 354 | } 355 | 356 | if (capture_request) 357 | { 358 | ACaptureRequest_free(capture_request); 359 | capture_request = 0; 360 | } 361 | 362 | if (image_reader_target) 363 | { 364 | ACameraOutputTarget_free(image_reader_target); 365 | image_reader_target = 0; 366 | } 367 | 368 | if (camera_manager) 369 | { 370 | ACameraManager_delete(camera_manager); 371 | camera_manager = 0; 372 | } 373 | } 374 | 375 | void NdkCamera::on_image(const cv::Mat& rgb) const 376 | { 377 | } 378 | 379 | void NdkCamera::on_image(const unsigned char* nv21, int nv21_width, int nv21_height) const 380 | { 381 | // rotate nv21 382 | int w = 0; 383 | int h = 0; 384 | int rotate_type = 0; 385 | { 386 | if (camera_orientation == 0) 387 | { 388 | w = nv21_width; 389 | h = nv21_height; 390 | rotate_type = camera_facing == 0 ? 2 : 1; 391 | } 392 | if (camera_orientation == 90) 393 | { 394 | w = nv21_height; 395 | h = nv21_width; 396 | rotate_type = camera_facing == 0 ? 5 : 6; 397 | } 398 | if (camera_orientation == 180) 399 | { 400 | w = nv21_width; 401 | h = nv21_height; 402 | rotate_type = camera_facing == 0 ? 4 : 3; 403 | } 404 | if (camera_orientation == 270) 405 | { 406 | w = nv21_height; 407 | h = nv21_width; 408 | rotate_type = camera_facing == 0 ? 7 : 8; 409 | } 410 | } 411 | 412 | cv::Mat nv21_rotated(h + h / 2, w, CV_8UC1); 413 | ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type); 414 | 415 | // nv21_rotated to rgb 416 | cv::Mat rgb(h, w, CV_8UC3); 417 | ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data); 418 | 419 | on_image(rgb); 420 | } 421 | 422 | static const int NDKCAMERAWINDOW_ID = 233; 423 | 424 | NdkCameraWindow::NdkCameraWindow() : NdkCamera() 425 | { 426 | sensor_manager = 0; 427 | sensor_event_queue = 0; 428 | accelerometer_sensor = 0; 429 | win = 0; 430 | 431 | accelerometer_orientation = 0; 432 | 433 | // sensor 434 | sensor_manager = ASensorManager_getInstance(); 435 | 436 | accelerometer_sensor = ASensorManager_getDefaultSensor(sensor_manager, ASENSOR_TYPE_ACCELEROMETER); 437 | } 438 | 439 | NdkCameraWindow::~NdkCameraWindow() 440 | { 441 | if (accelerometer_sensor) 442 | { 443 | ASensorEventQueue_disableSensor(sensor_event_queue, accelerometer_sensor); 444 | accelerometer_sensor = 0; 445 | } 446 | 447 | if (sensor_event_queue) 448 | { 449 | ASensorManager_destroyEventQueue(sensor_manager, sensor_event_queue); 450 | sensor_event_queue = 0; 451 | } 452 | 453 | if (win) 454 | { 455 | ANativeWindow_release(win); 456 | } 457 | } 458 | 459 | void NdkCameraWindow::set_window(ANativeWindow* _win) 460 | { 461 | if (win) 462 | { 463 | ANativeWindow_release(win); 464 | } 465 | 466 | win = _win; 467 | ANativeWindow_acquire(win); 468 | } 469 | 470 | void NdkCameraWindow::on_image_render(cv::Mat& rgb) const 471 | { 472 | } 473 | 474 | void NdkCameraWindow::on_image(const unsigned char* nv21, int nv21_width, int nv21_height) const 475 | { 476 | // resolve orientation from camera_orientation and accelerometer_sensor 477 | { 478 | if (!sensor_event_queue) 479 | { 480 | sensor_event_queue = ASensorManager_createEventQueue(sensor_manager, ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS), NDKCAMERAWINDOW_ID, 0, 0); 481 | 482 | ASensorEventQueue_enableSensor(sensor_event_queue, accelerometer_sensor); 483 | } 484 | 485 | int id = ALooper_pollAll(0, 0, 0, 0); 486 | if (id == NDKCAMERAWINDOW_ID) 487 | { 488 | ASensorEvent e[8]; 489 | ssize_t num_event = 0; 490 | while (ASensorEventQueue_hasEvents(sensor_event_queue) == 1) 491 | { 492 | num_event = ASensorEventQueue_getEvents(sensor_event_queue, e, 8); 493 | if (num_event < 0) 494 | break; 495 | } 496 | 497 | if (num_event > 0) 498 | { 499 | float acceleration_x = e[num_event - 1].acceleration.x; 500 | float acceleration_y = e[num_event - 1].acceleration.y; 501 | float acceleration_z = e[num_event - 1].acceleration.z; 502 | // __android_log_print(ANDROID_LOG_WARN, "NdkCameraWindow", "x = %f, y = %f, z = %f", x, y, z); 503 | 504 | if (acceleration_y > 7) 505 | { 506 | accelerometer_orientation = 0; 507 | } 508 | if (acceleration_x < -7) 509 | { 510 | accelerometer_orientation = 90; 511 | } 512 | if (acceleration_y < -7) 513 | { 514 | accelerometer_orientation = 180; 515 | } 516 | if (acceleration_x > 7) 517 | { 518 | accelerometer_orientation = 270; 519 | } 520 | } 521 | } 522 | } 523 | 524 | // roi crop and rotate nv21 525 | int nv21_roi_x = 0; 526 | int nv21_roi_y = 0; 527 | int nv21_roi_w = 0; 528 | int nv21_roi_h = 0; 529 | int roi_x = 0; 530 | int roi_y = 0; 531 | int roi_w = 0; 532 | int roi_h = 0; 533 | int rotate_type = 0; 534 | int render_w = 0; 535 | int render_h = 0; 536 | int render_rotate_type = 0; 537 | { 538 | int win_w = ANativeWindow_getWidth(win); 539 | int win_h = ANativeWindow_getHeight(win); 540 | 541 | if (accelerometer_orientation == 90 || accelerometer_orientation == 270) 542 | { 543 | std::swap(win_w, win_h); 544 | } 545 | 546 | const int final_orientation = (camera_orientation + accelerometer_orientation) % 360; 547 | 548 | if (final_orientation == 0 || final_orientation == 180) 549 | { 550 | if (win_w * nv21_height > win_h * nv21_width) 551 | { 552 | roi_w = nv21_width; 553 | roi_h = (nv21_width * win_h / win_w) / 2 * 2; 554 | roi_x = 0; 555 | roi_y = ((nv21_height - roi_h) / 2) / 2 * 2; 556 | } 557 | else 558 | { 559 | roi_h = nv21_height; 560 | roi_w = (nv21_height * win_w / win_h) / 2 * 2; 561 | roi_x = ((nv21_width - roi_w) / 2) / 2 * 2; 562 | roi_y = 0; 563 | } 564 | 565 | nv21_roi_x = roi_x; 566 | nv21_roi_y = roi_y; 567 | nv21_roi_w = roi_w; 568 | nv21_roi_h = roi_h; 569 | } 570 | if (final_orientation == 90 || final_orientation == 270) 571 | { 572 | if (win_w * nv21_width > win_h * nv21_height) 573 | { 574 | roi_w = nv21_height; 575 | roi_h = (nv21_height * win_h / win_w) / 2 * 2; 576 | roi_x = 0; 577 | roi_y = ((nv21_width - roi_h) / 2) / 2 * 2; 578 | } 579 | else 580 | { 581 | roi_h = nv21_width; 582 | roi_w = (nv21_width * win_w / win_h) / 2 * 2; 583 | roi_x = ((nv21_height - roi_w) / 2) / 2 * 2; 584 | roi_y = 0; 585 | } 586 | 587 | nv21_roi_x = roi_y; 588 | nv21_roi_y = roi_x; 589 | nv21_roi_w = roi_h; 590 | nv21_roi_h = roi_w; 591 | } 592 | 593 | if (camera_facing == 0) 594 | { 595 | if (camera_orientation == 0 && accelerometer_orientation == 0) 596 | { 597 | rotate_type = 2; 598 | } 599 | if (camera_orientation == 0 && accelerometer_orientation == 90) 600 | { 601 | rotate_type = 7; 602 | } 603 | if (camera_orientation == 0 && accelerometer_orientation == 180) 604 | { 605 | rotate_type = 4; 606 | } 607 | if (camera_orientation == 0 && accelerometer_orientation == 270) 608 | { 609 | rotate_type = 5; 610 | } 611 | if (camera_orientation == 90 && accelerometer_orientation == 0) 612 | { 613 | rotate_type = 5; 614 | } 615 | if (camera_orientation == 90 && accelerometer_orientation == 90) 616 | { 617 | rotate_type = 2; 618 | } 619 | if (camera_orientation == 90 && accelerometer_orientation == 180) 620 | { 621 | rotate_type = 7; 622 | } 623 | if (camera_orientation == 90 && accelerometer_orientation == 270) 624 | { 625 | rotate_type = 4; 626 | } 627 | if (camera_orientation == 180 && accelerometer_orientation == 0) 628 | { 629 | rotate_type = 4; 630 | } 631 | if (camera_orientation == 180 && accelerometer_orientation == 90) 632 | { 633 | rotate_type = 5; 634 | } 635 | if (camera_orientation == 180 && accelerometer_orientation == 180) 636 | { 637 | rotate_type = 2; 638 | } 639 | if (camera_orientation == 180 && accelerometer_orientation == 270) 640 | { 641 | rotate_type = 7; 642 | } 643 | if (camera_orientation == 270 && accelerometer_orientation == 0) 644 | { 645 | rotate_type = 7; 646 | } 647 | if (camera_orientation == 270 && accelerometer_orientation == 90) 648 | { 649 | rotate_type = 4; 650 | } 651 | if (camera_orientation == 270 && accelerometer_orientation == 180) 652 | { 653 | rotate_type = 5; 654 | } 655 | if (camera_orientation == 270 && accelerometer_orientation == 270) 656 | { 657 | rotate_type = 2; 658 | } 659 | } 660 | else 661 | { 662 | if (final_orientation == 0) 663 | { 664 | rotate_type = 1; 665 | } 666 | if (final_orientation == 90) 667 | { 668 | rotate_type = 6; 669 | } 670 | if (final_orientation == 180) 671 | { 672 | rotate_type = 3; 673 | } 674 | if (final_orientation == 270) 675 | { 676 | rotate_type = 8; 677 | } 678 | } 679 | 680 | if (accelerometer_orientation == 0) 681 | { 682 | render_w = roi_w; 683 | render_h = roi_h; 684 | render_rotate_type = 1; 685 | } 686 | if (accelerometer_orientation == 90) 687 | { 688 | render_w = roi_h; 689 | render_h = roi_w; 690 | render_rotate_type = 8; 691 | } 692 | if (accelerometer_orientation == 180) 693 | { 694 | render_w = roi_w; 695 | render_h = roi_h; 696 | render_rotate_type = 3; 697 | } 698 | if (accelerometer_orientation == 270) 699 | { 700 | render_w = roi_h; 701 | render_h = roi_w; 702 | render_rotate_type = 6; 703 | } 704 | } 705 | 706 | // crop and rotate nv21 707 | cv::Mat nv21_croprotated(roi_h + roi_h / 2, roi_w, CV_8UC1); 708 | { 709 | const unsigned char* srcY = nv21 + nv21_roi_y * nv21_width + nv21_roi_x; 710 | unsigned char* dstY = nv21_croprotated.data; 711 | ncnn::kanna_rotate_c1(srcY, nv21_roi_w, nv21_roi_h, nv21_width, dstY, roi_w, roi_h, roi_w, rotate_type); 712 | 713 | const unsigned char* srcUV = nv21 + nv21_width * nv21_height + nv21_roi_y * nv21_width / 2 + nv21_roi_x; 714 | unsigned char* dstUV = nv21_croprotated.data + roi_w * roi_h; 715 | ncnn::kanna_rotate_c2(srcUV, nv21_roi_w / 2, nv21_roi_h / 2, nv21_width, dstUV, roi_w / 2, roi_h / 2, roi_w, rotate_type); 716 | } 717 | 718 | // nv21_croprotated to rgb 719 | cv::Mat rgb(roi_h, roi_w, CV_8UC3); 720 | ncnn::yuv420sp2rgb(nv21_croprotated.data, roi_w, roi_h, rgb.data); 721 | 722 | on_image_render(rgb); 723 | 724 | // rotate to native window orientation 725 | cv::Mat rgb_render(render_h, render_w, CV_8UC3); 726 | ncnn::kanna_rotate_c3(rgb.data, roi_w, roi_h, rgb_render.data, render_w, render_h, render_rotate_type); 727 | 728 | ANativeWindow_setBuffersGeometry(win, render_w, render_h, AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM); 729 | 730 | ANativeWindow_Buffer buf; 731 | ANativeWindow_lock(win, &buf, NULL); 732 | 733 | // scale to target size 734 | if (buf.format == AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM || buf.format == AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM) 735 | { 736 | for (int y = 0; y < render_h; y++) 737 | { 738 | const unsigned char* ptr = rgb_render.ptr(y); 739 | unsigned char* outptr = (unsigned char*)buf.bits + buf.stride * 4 * y; 740 | 741 | int x = 0; 742 | #if __ARM_NEON 743 | for (; x + 7 < render_w; x += 8) 744 | { 745 | uint8x8x3_t _rgb = vld3_u8(ptr); 746 | uint8x8x4_t _rgba; 747 | _rgba.val[0] = _rgb.val[0]; 748 | _rgba.val[1] = _rgb.val[1]; 749 | _rgba.val[2] = _rgb.val[2]; 750 | _rgba.val[3] = vdup_n_u8(255); 751 | vst4_u8(outptr, _rgba); 752 | 753 | ptr += 24; 754 | outptr += 32; 755 | } 756 | #endif // __ARM_NEON 757 | for (; x < render_w; x++) 758 | { 759 | outptr[0] = ptr[0]; 760 | outptr[1] = ptr[1]; 761 | outptr[2] = ptr[2]; 762 | outptr[3] = 255; 763 | 764 | ptr += 3; 765 | outptr += 4; 766 | } 767 | } 768 | } 769 | 770 | ANativeWindow_unlockAndPost(win); 771 | } 772 | -------------------------------------------------------------------------------- /app/src/main/jni/ndkcamera.h: -------------------------------------------------------------------------------- 1 | // Tencent is pleased to support the open source community by making ncnn available. 2 | // 3 | // Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. 4 | // 5 | // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except 6 | // in compliance with the License. You may obtain a copy of the License at 7 | // 8 | // https://opensource.org/licenses/BSD-3-Clause 9 | // 10 | // Unless required by applicable law or agreed to in writing, software distributed 11 | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR 12 | // CONDITIONS OF ANY KIND, either express or implied. See the License for the 13 | // specific language governing permissions and limitations under the License. 14 | 15 | #ifndef NDKCAMERA_H 16 | #define NDKCAMERA_H 17 | 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | 26 | #include 27 | 28 | class NdkCamera 29 | { 30 | public: 31 | NdkCamera(); 32 | virtual ~NdkCamera(); 33 | 34 | // facing 0=front 1=back 35 | int open(int camera_facing = 0); 36 | void close(); 37 | 38 | virtual void on_image(const cv::Mat& rgb) const; 39 | 40 | virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height) const; 41 | 42 | public: 43 | int camera_facing; 44 | int camera_orientation; 45 | 46 | private: 47 | ACameraManager* camera_manager; 48 | ACameraDevice* camera_device; 49 | AImageReader* image_reader; 50 | ANativeWindow* image_reader_surface; 51 | ACameraOutputTarget* image_reader_target; 52 | ACaptureRequest* capture_request; 53 | ACaptureSessionOutputContainer* capture_session_output_container; 54 | ACaptureSessionOutput* capture_session_output; 55 | ACameraCaptureSession* capture_session; 56 | }; 57 | 58 | class NdkCameraWindow : public NdkCamera 59 | { 60 | public: 61 | NdkCameraWindow(); 62 | virtual ~NdkCameraWindow(); 63 | 64 | void set_window(ANativeWindow* win); 65 | 66 | virtual void on_image_render(cv::Mat& rgb) const; 67 | 68 | virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height) const; 69 | 70 | public: 71 | mutable int accelerometer_orientation; 72 | 73 | private: 74 | ASensorManager* sensor_manager; 75 | mutable ASensorEventQueue* sensor_event_queue; 76 | const ASensor* accelerometer_sensor; 77 | ANativeWindow* win; 78 | }; 79 | 80 | #endif // NDKCAMERA_H 81 | -------------------------------------------------------------------------------- /app/src/main/jni/yolo.cpp: -------------------------------------------------------------------------------- 1 | #include "yolo.h" 2 | #include 3 | #include 4 | #include "cpu.h" 5 | 6 | struct GridAndStride 7 | { 8 | int grid0; 9 | int grid1; 10 | int stride; 11 | }; 12 | 13 | static inline float intersection_area(const Object& a, const Object& b) 14 | { 15 | cv::Rect_ inter = a.rect & b.rect; 16 | return inter.area(); 17 | } 18 | 19 | static void qsort_descent_inplace(std::vector& faceobjects, int left, int right) 20 | { 21 | int i = left; 22 | int j = right; 23 | float p = faceobjects[(left + right) / 2].prob; 24 | 25 | while (i <= j) 26 | { 27 | while (faceobjects[i].prob > p) 28 | i++; 29 | 30 | while (faceobjects[j].prob < p) 31 | j--; 32 | 33 | if (i <= j) 34 | { 35 | // swap 36 | std::swap(faceobjects[i], faceobjects[j]); 37 | 38 | i++; 39 | j--; 40 | } 41 | } 42 | 43 | #pragma omp parallel sections 44 | { 45 | #pragma omp section 46 | { 47 | if (left < j) qsort_descent_inplace(faceobjects, left, j); 48 | } 49 | #pragma omp section 50 | { 51 | if (i < right) qsort_descent_inplace(faceobjects, i, right); 52 | } 53 | } 54 | } 55 | 56 | static void qsort_descent_inplace(std::vector& objects) 57 | { 58 | if (objects.empty()) 59 | return; 60 | 61 | qsort_descent_inplace(objects, 0, objects.size() - 1); 62 | } 63 | 64 | static void nms_sorted_bboxes(const std::vector& faceobjects, std::vector& picked, float nms_threshold) 65 | { 66 | picked.clear(); 67 | 68 | const int n = faceobjects.size(); 69 | 70 | std::vector areas(n); 71 | for (int i = 0; i < n; i++) 72 | { 73 | areas[i] = faceobjects[i].rect.area(); 74 | } 75 | 76 | for (int i = 0; i < n; i++) 77 | { 78 | const Object& a = faceobjects[i]; 79 | 80 | int keep = 1; 81 | for (int j = 0; j < (int)picked.size(); j++) 82 | { 83 | const Object& b = faceobjects[picked[j]]; 84 | 85 | // intersection over union 86 | float inter_area = intersection_area(a, b); 87 | float union_area = areas[i] + areas[picked[j]] - inter_area; 88 | // float IoU = inter_area / union_area 89 | if (inter_area / union_area > nms_threshold) 90 | keep = 0; 91 | } 92 | 93 | if (keep) 94 | picked.push_back(i); 95 | } 96 | } 97 | 98 | static void generate_grids_and_stride(const int target_w, const int target_h, std::vector& strides, std::vector& grid_strides) 99 | { 100 | for (int i = 0; i < (int)strides.size(); i++) 101 | { 102 | int stride = strides[i]; 103 | int num_grid_w = target_w / stride; 104 | int num_grid_h = target_h / stride; 105 | for (int g1 = 0; g1 < num_grid_h; g1++) 106 | { 107 | for (int g0 = 0; g0 < num_grid_w; g0++) 108 | { 109 | GridAndStride gs; 110 | gs.grid0 = g0; 111 | gs.grid1 = g1; 112 | gs.stride = stride; 113 | grid_strides.push_back(gs); 114 | } 115 | } 116 | } 117 | } 118 | 119 | static void generate_yolox_proposals(std::vector grid_strides, const ncnn::Mat& feat_blob, float prob_threshold, std::vector& objects) 120 | { 121 | const int num_grid = feat_blob.h; 122 | 123 | const int num_class = feat_blob.w - 5; 124 | 125 | const int num_anchors = grid_strides.size(); 126 | 127 | const float* feat_ptr = feat_blob.channel(0); 128 | for (int anchor_idx = 0; anchor_idx < num_anchors; anchor_idx++) 129 | { 130 | const int grid0 = grid_strides[anchor_idx].grid0; 131 | const int grid1 = grid_strides[anchor_idx].grid1; 132 | const int stride = grid_strides[anchor_idx].stride; 133 | float x_center = (feat_ptr[0] + grid0) * stride; 134 | float y_center = (feat_ptr[1] + grid1) * stride; 135 | float w = exp(feat_ptr[2]) * stride; 136 | float h = exp(feat_ptr[3]) * stride; 137 | float x0 = x_center - w * 0.5f; 138 | float y0 = y_center - h * 0.5f; 139 | 140 | float box_objectness = feat_ptr[4]; 141 | for (int class_idx = 0; class_idx < num_class; class_idx++) 142 | { 143 | float box_cls_score = feat_ptr[5 + class_idx]; 144 | float box_prob = box_objectness * box_cls_score; 145 | if (box_prob > prob_threshold) 146 | { 147 | Object obj; 148 | obj.rect.x = x0; 149 | obj.rect.y = y0; 150 | obj.rect.width = w; 151 | obj.rect.height = h; 152 | obj.label = class_idx; 153 | obj.prob = box_prob; 154 | 155 | objects.push_back(obj); 156 | } 157 | 158 | } // class loop 159 | feat_ptr += feat_blob.w; 160 | 161 | } // point anchor loop 162 | } 163 | 164 | 165 | Yolo::Yolo() 166 | { 167 | blob_pool_allocator.set_size_compare_ratio(0.f); 168 | workspace_pool_allocator.set_size_compare_ratio(0.f); 169 | } 170 | 171 | int Yolo::load(const char* modeltype, int _target_size, const float* _norm_vals, bool use_gpu) 172 | { 173 | yolo.clear(); 174 | blob_pool_allocator.clear(); 175 | workspace_pool_allocator.clear(); 176 | 177 | ncnn::set_cpu_powersave(2); 178 | ncnn::set_omp_num_threads(ncnn::get_big_cpu_count()); 179 | 180 | yolo.opt = ncnn::Option(); 181 | 182 | #if NCNN_VULKAN 183 | yolo.opt.use_vulkan_compute = use_gpu; 184 | #endif 185 | 186 | yolo.opt.num_threads = ncnn::get_big_cpu_count(); 187 | yolo.opt.blob_allocator = &blob_pool_allocator; 188 | yolo.opt.workspace_allocator = &workspace_pool_allocator; 189 | 190 | char parampath[256]; 191 | char modelpath[256]; 192 | sprintf(parampath, "%s.param", modeltype); 193 | sprintf(modelpath, "%s.bin", modeltype); 194 | 195 | yolo.load_param(parampath); 196 | yolo.load_model(modelpath); 197 | 198 | target_size = _target_size; 199 | norm_vals[0] = _norm_vals[0]; 200 | norm_vals[1] = _norm_vals[1]; 201 | norm_vals[2] = _norm_vals[2]; 202 | 203 | return 0; 204 | } 205 | 206 | int Yolo::load(AAssetManager* mgr, const char* modeltype, int _target_size, const float* _norm_vals, bool use_gpu) 207 | { 208 | yolo.clear(); 209 | blob_pool_allocator.clear(); 210 | workspace_pool_allocator.clear(); 211 | 212 | ncnn::set_cpu_powersave(2); 213 | ncnn::set_omp_num_threads(ncnn::get_big_cpu_count()); 214 | 215 | yolo.opt = ncnn::Option(); 216 | #if NCNN_VULKAN 217 | yolo.opt.use_vulkan_compute = use_gpu; 218 | #endif 219 | //yolo.register_custom_layer("YoloV5Focus", YoloV5Focus_layer_creator); 220 | yolo.opt.num_threads = ncnn::get_big_cpu_count(); 221 | yolo.opt.blob_allocator = &blob_pool_allocator; 222 | yolo.opt.workspace_allocator = &workspace_pool_allocator; 223 | 224 | char parampath[256]; 225 | char modelpath[256]; 226 | sprintf(parampath, "%s.param", modeltype); 227 | sprintf(modelpath, "%s.bin", modeltype); 228 | 229 | yolo.load_param(mgr, parampath); 230 | yolo.load_model(mgr, modelpath); 231 | 232 | 233 | target_size = _target_size; 234 | norm_vals[0] = _norm_vals[0]; 235 | norm_vals[1] = _norm_vals[1]; 236 | norm_vals[2] = _norm_vals[2]; 237 | 238 | return 0; 239 | } 240 | 241 | int Yolo::detect(const cv::Mat& rgb, std::vector& objects, float prob_threshold, float nms_threshold) 242 | { 243 | int img_w = rgb.cols; 244 | int img_h = rgb.rows; 245 | // letterbox pad to multiple of 32 246 | int w = img_w; 247 | int h = img_h; 248 | float scale = 1.f; 249 | if (w > h) 250 | { 251 | scale = (float)target_size / w; 252 | w = target_size; 253 | h = h * scale; 254 | } 255 | else 256 | { 257 | scale = (float)target_size / h; 258 | h = target_size; 259 | w = w * scale; 260 | } 261 | ncnn::Mat in = ncnn::Mat::from_pixels_resize(rgb.data, ncnn::Mat::PIXEL_RGB, img_w, img_h, w, h); 262 | 263 | // pad to target_size rectangle 264 | // yolov5/utils/datasets.py letterbox 265 | int wpad = (w + 31) / 32 * 32 - w; 266 | int hpad = (h + 31) / 32 * 32 - h; 267 | ncnn::Mat in_pad; 268 | ncnn::copy_make_border(in, in_pad, 0, hpad, 0, wpad, ncnn::BORDER_CONSTANT, 114.f); 269 | in_pad.substract_mean_normalize(0, norm_vals); 270 | 271 | ncnn::Extractor ex = yolo.create_extractor(); 272 | 273 | ex.input("images", in_pad); 274 | 275 | std::vector proposals; 276 | 277 | { 278 | ncnn::Mat out; 279 | ex.extract("output", out); 280 | std::vector strides = {8, 16, 32}; // might have stride=64 281 | std::vector grid_strides; 282 | generate_grids_and_stride(in_pad.w, in_pad.h, strides, grid_strides); 283 | generate_yolox_proposals(grid_strides, out, prob_threshold, proposals); 284 | } 285 | 286 | // sort all proposals by score from highest to lowest 287 | qsort_descent_inplace(proposals); 288 | 289 | // apply nms with nms_threshold 290 | std::vector picked; 291 | nms_sorted_bboxes(proposals, picked, nms_threshold); 292 | 293 | int count = picked.size(); 294 | 295 | objects.resize(count); 296 | for (int i = 0; i < count; i++) 297 | { 298 | objects[i] = proposals[picked[i]]; 299 | 300 | // adjust offset to original unpadded 301 | float x0 = (objects[i].rect.x) / scale; 302 | float y0 = (objects[i].rect.y) / scale; 303 | float x1 = (objects[i].rect.x + objects[i].rect.width) / scale; 304 | float y1 = (objects[i].rect.y + objects[i].rect.height) / scale; 305 | 306 | // clip 307 | x0 = std::max(std::min(x0, (float)(img_w - 1)), 0.f); 308 | y0 = std::max(std::min(y0, (float)(img_h - 1)), 0.f); 309 | x1 = std::max(std::min(x1, (float)(img_w - 1)), 0.f); 310 | y1 = std::max(std::min(y1, (float)(img_h - 1)), 0.f); 311 | 312 | objects[i].rect.x = x0; 313 | objects[i].rect.y = y0; 314 | objects[i].rect.width = x1 - x0; 315 | objects[i].rect.height = y1 - y0; 316 | } 317 | 318 | return 0; 319 | } 320 | 321 | int Yolo::draw(cv::Mat& rgb, const std::vector& objects) 322 | { 323 | static const char* class_names[] = { 324 | "person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", 325 | "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", 326 | "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", 327 | "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", 328 | "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", 329 | "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", 330 | "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", 331 | "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", 332 | "hair drier", "toothbrush" 333 | }; 334 | static const unsigned char colors[19][3] = { 335 | { 54, 67, 244}, 336 | { 99, 30, 233}, 337 | {176, 39, 156}, 338 | {183, 58, 103}, 339 | {181, 81, 63}, 340 | {243, 150, 33}, 341 | {244, 169, 3}, 342 | {212, 188, 0}, 343 | {136, 150, 0}, 344 | { 80, 175, 76}, 345 | { 74, 195, 139}, 346 | { 57, 220, 205}, 347 | { 59, 235, 255}, 348 | { 7, 193, 255}, 349 | { 0, 152, 255}, 350 | { 34, 87, 255}, 351 | { 72, 85, 121}, 352 | {158, 158, 158}, 353 | {139, 125, 96} 354 | }; 355 | 356 | int color_index = 0; 357 | 358 | for (size_t i = 0; i < objects.size(); i++) 359 | { 360 | const Object& obj = objects[i]; 361 | 362 | const unsigned char* color = colors[color_index % 19]; 363 | color_index++; 364 | 365 | cv::Scalar cc(color[0], color[1], color[2]); 366 | 367 | cv::rectangle(rgb,obj.rect, cc, 2); 368 | 369 | char text[256]; 370 | sprintf(text, "%s %.1f%%", class_names[obj.label], obj.prob * 100); 371 | 372 | int baseLine = 0; 373 | cv::Size label_size = cv::getTextSize(text, cv::FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine); 374 | 375 | int x = obj.rect.x; 376 | int y = obj.rect.y - label_size.height - baseLine; 377 | if (y < 0) 378 | y = 0; 379 | if (x + label_size.width > rgb.cols) 380 | x = rgb.cols - label_size.width; 381 | cv::rectangle(rgb, cv::Rect(cv::Point(x, y), cv::Size(label_size.width, label_size.height + baseLine)), cc, -1); 382 | cv::Scalar textcc = (color[0] + color[1] + color[2] >= 381) ? cv::Scalar(0, 0, 0) : cv::Scalar(255, 255, 255); 383 | cv::putText(rgb, text, cv::Point(x, y + label_size.height), cv::FONT_HERSHEY_SIMPLEX, 0.5, textcc, 1); 384 | 385 | } 386 | 387 | 388 | return 0; 389 | } 390 | -------------------------------------------------------------------------------- /app/src/main/jni/yolo.h: -------------------------------------------------------------------------------- 1 | #ifndef YOLO_H 2 | #define YOLO_H 3 | 4 | #include 5 | 6 | #include 7 | 8 | struct Object 9 | { 10 | cv::Rect_ rect; 11 | int label; 12 | float prob; 13 | 14 | }; 15 | 16 | 17 | class Yolo 18 | { 19 | public: 20 | Yolo(); 21 | 22 | int load(const char* modeltype, int target_size, const float* norm_vals, bool use_gpu = false); 23 | 24 | int load(AAssetManager* mgr, const char* modeltype, int target_size, const float* norm_vals, bool use_gpu = false); 25 | 26 | int detect(const cv::Mat& rgb, std::vector& objects, float prob_threshold = 0.25f, float nms_threshold = 0.45f); 27 | 28 | int draw(cv::Mat& rgb, const std::vector& objects); 29 | 30 | private: 31 | 32 | ncnn::Net yolo; 33 | 34 | int target_size; 35 | float norm_vals[3]; 36 | int image_w; 37 | int image_h; 38 | int in_w; 39 | int in_h; 40 | 41 | ncnn::UnlockedPoolAllocator blob_pool_allocator; 42 | ncnn::PoolAllocator workspace_pool_allocator; 43 | }; 44 | 45 | #endif // YOLO_H 46 | -------------------------------------------------------------------------------- /app/src/main/jni/yoloncnn.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | #include 6 | 7 | #include 8 | 9 | #include 10 | #include 11 | 12 | #include 13 | #include 14 | 15 | #include "yolo.h" 16 | 17 | #include "ndkcamera.h" 18 | 19 | #include 20 | #include 21 | 22 | #if __ARM_NEON 23 | #include 24 | #endif // __ARM_NEON 25 | 26 | static int draw_unsupported(cv::Mat& rgb) 27 | { 28 | const char text[] = "unsupported"; 29 | 30 | int baseLine = 0; 31 | cv::Size label_size = cv::getTextSize(text, cv::FONT_HERSHEY_SIMPLEX, 1.0, 1, &baseLine); 32 | 33 | int y = (rgb.rows - label_size.height) / 2; 34 | int x = (rgb.cols - label_size.width) / 2; 35 | 36 | cv::rectangle(rgb, cv::Rect(cv::Point(x, y), cv::Size(label_size.width, label_size.height + baseLine)), 37 | cv::Scalar(255, 255, 255), -1); 38 | 39 | cv::putText(rgb, text, cv::Point(x, y + label_size.height), 40 | cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 0, 0)); 41 | 42 | return 0; 43 | } 44 | 45 | static int draw_fps(cv::Mat& rgb) 46 | { 47 | // resolve moving average 48 | float avg_fps = 0.f; 49 | { 50 | static double t0 = 0.f; 51 | static float fps_history[10] = {0.f}; 52 | double t1 = ncnn::get_current_time(); 53 | if (t0 == 0.f) 54 | { 55 | t0 = t1; 56 | return 0; 57 | } 58 | float fps = 1000.f / (t1 - t0); 59 | t0 = t1; 60 | 61 | for (int i = 9; i >= 1; i--) 62 | { 63 | fps_history[i] = fps_history[i - 1]; 64 | } 65 | fps_history[0] = fps; 66 | 67 | if (fps_history[9] == 0.f) 68 | { 69 | return 0; 70 | } 71 | 72 | for (int i = 0; i < 10; i++) 73 | { 74 | avg_fps += fps_history[i]; 75 | } 76 | avg_fps /= 10.f; 77 | } 78 | char text[32]; 79 | sprintf(text, "FPS=%.2f", avg_fps); 80 | int baseLine = 0; 81 | cv::Size label_size = cv::getTextSize(text, cv::FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine); 82 | int y = 0; 83 | int x = rgb.cols - label_size.width; 84 | cv::rectangle(rgb, cv::Rect(cv::Point(x, y), cv::Size(label_size.width, label_size.height + baseLine)), 85 | cv::Scalar(255, 255, 255), -1); 86 | cv::putText(rgb, text, cv::Point(x, y + label_size.height), 87 | cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 0)); 88 | 89 | return 0; 90 | } 91 | 92 | static Yolo* g_yolo = 0; 93 | static ncnn::Mutex lock; 94 | 95 | class MyNdkCamera : public NdkCameraWindow 96 | { 97 | public: 98 | virtual void on_image_render(cv::Mat& rgb) const; 99 | }; 100 | 101 | void MyNdkCamera::on_image_render(cv::Mat& rgb) const 102 | { 103 | // nanodet 104 | { 105 | ncnn::MutexLockGuard g(lock); 106 | 107 | if (g_yolo) 108 | { 109 | std::vector objects; 110 | g_yolo->detect(rgb, objects); 111 | 112 | g_yolo->draw(rgb, objects); 113 | } 114 | else 115 | { 116 | draw_unsupported(rgb); 117 | } 118 | } 119 | 120 | draw_fps(rgb); 121 | } 122 | 123 | static MyNdkCamera* g_camera = 0; 124 | 125 | extern "C" { 126 | 127 | JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved) 128 | { 129 | __android_log_print(ANDROID_LOG_DEBUG, "ncnn", "JNI_OnLoad"); 130 | 131 | g_camera = new MyNdkCamera; 132 | 133 | return JNI_VERSION_1_4; 134 | } 135 | 136 | JNIEXPORT void JNI_OnUnload(JavaVM* vm, void* reserved) 137 | { 138 | __android_log_print(ANDROID_LOG_DEBUG, "ncnn", "JNI_OnUnload"); 139 | 140 | { 141 | ncnn::MutexLockGuard g(lock); 142 | 143 | delete g_yolo; 144 | g_yolo = 0; 145 | } 146 | 147 | delete g_camera; 148 | g_camera = 0; 149 | } 150 | 151 | // public native boolean loadModel(AssetManager mgr, int modelid, int cpugpu); 152 | JNIEXPORT jboolean JNICALL Java_com_tencent_ncnnyolov6_NcnnYolov6_loadModel(JNIEnv* env, jobject thiz, jobject assetManager, jint modelid, jint cpugpu) 153 | { 154 | if (modelid < 0 || modelid > 6 || cpugpu < 0 || cpugpu > 1) 155 | { 156 | return JNI_FALSE; 157 | } 158 | 159 | AAssetManager* mgr = AAssetManager_fromJava(env, assetManager); 160 | 161 | __android_log_print(ANDROID_LOG_DEBUG, "ncnn", "loadModel %p", mgr); 162 | 163 | const char* modeltypes[] = 164 | { 165 | "yolov6n", 166 | }; 167 | 168 | const int target_sizes[] = 169 | { 170 | 640, 171 | }; 172 | 173 | const float norm_vals[][3] = 174 | { 175 | {1 / 255.f, 1 / 255.f , 1 / 255.f}, 176 | }; 177 | 178 | const char* modeltype = modeltypes[(int)modelid]; 179 | int target_size = target_sizes[(int)modelid]; 180 | bool use_gpu = (int)cpugpu == 1; 181 | 182 | // reload 183 | { 184 | ncnn::MutexLockGuard g(lock); 185 | 186 | if (use_gpu && ncnn::get_gpu_count() == 0) 187 | { 188 | // no gpu 189 | delete g_yolo; 190 | g_yolo = 0; 191 | } 192 | else 193 | { 194 | if (!g_yolo) 195 | g_yolo = new Yolo; 196 | g_yolo->load(mgr, modeltype, target_size, norm_vals[(int)modelid], use_gpu); 197 | } 198 | } 199 | 200 | return JNI_TRUE; 201 | } 202 | 203 | // public native boolean openCamera(int facing); 204 | JNIEXPORT jboolean JNICALL Java_com_tencent_ncnnyolov6_NcnnYolov6_openCamera(JNIEnv* env, jobject thiz, jint facing) 205 | { 206 | if (facing < 0 || facing > 1) 207 | return JNI_FALSE; 208 | 209 | __android_log_print(ANDROID_LOG_DEBUG, "ncnn", "openCamera %d", facing); 210 | 211 | g_camera->open((int)facing); 212 | 213 | return JNI_TRUE; 214 | } 215 | 216 | // public native boolean closeCamera(); 217 | JNIEXPORT jboolean JNICALL Java_com_tencent_ncnnyolov6_NcnnYolov6_closeCamera(JNIEnv* env, jobject thiz) 218 | { 219 | __android_log_print(ANDROID_LOG_DEBUG, "ncnn", "closeCamera"); 220 | 221 | g_camera->close(); 222 | 223 | return JNI_TRUE; 224 | } 225 | 226 | // public native boolean setOutputWindow(Surface surface); 227 | JNIEXPORT jboolean JNICALL Java_com_tencent_ncnnyolov6_NcnnYolov6_setOutputWindow(JNIEnv* env, jobject thiz, jobject surface) 228 | { 229 | ANativeWindow* win = ANativeWindow_fromSurface(env, surface); 230 | 231 | __android_log_print(ANDROID_LOG_DEBUG, "ncnn", "setOutputWindow %p", win); 232 | 233 | g_camera->set_window(win); 234 | 235 | return JNI_TRUE; 236 | } 237 | 238 | } 239 | -------------------------------------------------------------------------------- /app/src/main/res/layout/main.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 11 | 12 |