├── .flake8 ├── .github └── workflows │ └── workflow.yml ├── .gitignore ├── .isort.cfg ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── config ├── EfficientNet-Lite │ ├── nanodet-EfficientNet-Lite0_320.yml │ ├── nanodet-EfficientNet-Lite1_416.yml │ └── nanodet-EfficientNet-Lite2_512.yml ├── RepVGG │ └── nanodet-RepVGG-A0_416.yml ├── Transformer │ └── nanodet-t.yml ├── nanodet-g.yml ├── nanodet-m-0.5x.yml ├── nanodet-m-1.5x-416.yml ├── nanodet-m-1.5x.yml ├── nanodet-m-416.yml ├── nanodet-m.yml └── nanodet_custom_xml_dataset.yml ├── demo ├── demo-inference-with-pytorch.ipynb └── demo.py ├── demo_android_ncnn ├── .gitignore ├── Android_demo.jpg ├── LICENSE ├── README.md ├── app │ ├── .gitignore │ ├── build.gradle │ ├── proguard-rules.pro │ └── src │ │ ├── androidTest │ │ └── java │ │ │ └── com │ │ │ └── rangi │ │ │ └── nanodet │ │ │ └── ExampleInstrumentedTest.java │ │ ├── main │ │ ├── AndroidManifest.xml │ │ ├── cpp │ │ │ ├── CMakeLists.txt │ │ │ ├── NanoDet.cpp │ │ │ ├── NanoDet.h │ │ │ ├── YoloV4.cpp │ │ │ ├── YoloV4.h │ │ │ ├── YoloV5.cpp │ │ │ ├── YoloV5.h │ │ │ └── jni_interface.cpp │ │ ├── java │ │ │ └── com │ │ │ │ └── rangi │ │ │ │ └── nanodet │ │ │ │ ├── AppCrashHandler.java │ │ │ │ ├── Box.java │ │ │ │ ├── MainActivity.java │ │ │ │ ├── NanoDet.java │ │ │ │ ├── NcnnApp.java │ │ │ │ ├── WelcomeActivity.java │ │ │ │ ├── YOLOv4.java │ │ │ │ └── YOLOv5.java │ │ └── res │ │ │ ├── drawable-v24 │ │ │ └── ic_launcher_foreground.xml │ │ │ ├── drawable-xxhdpi │ │ │ ├── cpu.png │ │ │ ├── gpu.png │ │ │ └── ncnn_icon.png │ │ │ ├── drawable │ │ │ ├── cpu_gpu_bg.xml │ │ │ └── ic_launcher_background.xml │ │ │ ├── layout │ │ │ ├── activity_main.xml │ │ │ └── activity_welcome.xml │ │ │ ├── mipmap-anydpi-v26 │ │ │ ├── ic_launcher.xml │ │ │ └── ic_launcher_round.xml │ │ │ ├── mipmap-hdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-mdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-xhdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-xxhdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ ├── mipmap-xxxhdpi │ │ │ ├── ic_launcher.png │ │ │ └── ic_launcher_round.png │ │ │ └── values │ │ │ ├── colors.xml │ │ │ ├── strings.xml │ │ │ └── styles.xml │ │ └── test │ │ └── java │ │ └── com │ │ └── rangi │ │ └── nanodet │ │ └── ExampleUnitTest.java ├── build.gradle ├── gradle.properties ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── settings.gradle ├── demo_libtorch ├── CMakeLists.txt ├── README.md ├── main.cpp ├── nanodet_libtorch.cpp └── nanodet_libtorch.h ├── demo_mnn ├── CMakeLists.txt ├── README.md ├── imgs │ ├── 000252.jpg │ └── 000258.jpg ├── main.cpp ├── nanodet_mnn.cpp ├── nanodet_mnn.hpp ├── python │ └── demo_mnn.py └── results │ ├── 000252.jpg │ └── 000258.jpg ├── demo_ncnn ├── CMakeLists.txt ├── README.md ├── benchmark.jpg ├── main.cpp ├── nanodet.cpp ├── nanodet.h └── python │ └── demo_ncnn.py ├── demo_openvino ├── CMakeLists.txt ├── README.md ├── main.cpp ├── nanodet_openvino.cpp └── nanodet_openvino.h ├── docs ├── config_file_detail.md ├── imgs │ ├── Android_demo.jpg │ ├── Model_arch.png │ └── Title.jpg └── update.md ├── nanodet ├── __about__.py ├── __init__.py ├── data │ ├── batch_process.py │ ├── collate.py │ ├── dataset │ │ ├── __init__.py │ │ ├── base.py │ │ ├── coco.py │ │ └── xml_dataset.py │ └── transform │ │ ├── __init__.py │ │ ├── color.py │ │ ├── mosaic.py │ │ ├── pipeline.py │ │ └── warp.py ├── evaluator │ ├── __init__.py │ └── coco_detection.py ├── model │ ├── arch │ │ ├── __init__.py │ │ └── one_stage_detector.py │ ├── backbone │ │ ├── __init__.py │ │ ├── custom_csp.py │ │ ├── efficientnet_lite.py │ │ ├── ghostnet.py │ │ ├── mobilenetv2.py │ │ ├── repvgg.py │ │ ├── resnet.py │ │ └── shufflenetv2.py │ ├── fpn │ │ ├── __init__.py │ │ ├── fpn.py │ │ ├── pan.py │ │ └── tan.py │ ├── head │ │ ├── __init__.py │ │ ├── assigner │ │ │ ├── assign_result.py │ │ │ ├── atss_assigner.py │ │ │ └── base_assigner.py │ │ ├── gfl_head.py │ │ └── nanodet_head.py │ ├── loss │ │ ├── gfocal_loss.py │ │ ├── iou_loss.py │ │ └── utils.py │ └── module │ │ ├── activation.py │ │ ├── conv.py │ │ ├── init_weights.py │ │ ├── nms.py │ │ ├── norm.py │ │ ├── scale.py │ │ └── transformer.py ├── trainer │ ├── __init__.py │ ├── dist_trainer.py │ ├── task.py │ └── trainer.py └── util │ ├── __init__.py │ ├── box_transform.py │ ├── check_point.py │ ├── config.py │ ├── data_parallel.py │ ├── distributed_data_parallel.py │ ├── flops_counter.py │ ├── logger.py │ ├── misc.py │ ├── path.py │ ├── rank_filter.py │ ├── scatter_gather.py │ ├── util_mixins.py │ ├── visualization.py │ └── yacs.py ├── requirements.txt ├── setup.py ├── tests ├── data │ ├── batched_nms_data.pkl │ ├── dummy_coco.json │ ├── test_img.jpg │ └── test_img.xml ├── test_configs │ └── test_config.py ├── test_data │ ├── test_batch_process.py │ ├── test_collate.py │ ├── test_dataset │ │ ├── test_cocodataset.py │ │ └── test_xmldataset.py │ └── test_transform │ │ ├── test_color.py │ │ └── test_warp.py ├── test_evaluator │ └── test_coco_detection.py ├── test_models │ ├── test_backbone │ │ ├── test_custom_csp.py │ │ ├── test_efficient_lite.py │ │ ├── test_ghostnet.py │ │ ├── test_mobilenetv2.py │ │ ├── test_repvgg.py │ │ ├── test_resnet.py │ │ └── test_shufflenetv2.py │ ├── test_fpn │ │ ├── test_fpn.py │ │ ├── test_pan.py │ │ └── test_tan.py │ ├── test_head │ │ ├── test_gfl_head.py │ │ └── test_nanodet_head.py │ ├── test_loss │ │ ├── test_gfocal_loss.py │ │ └── test_iou_loss.py │ └── test_modules │ │ ├── test_conv.py │ │ ├── test_dwconv.py │ │ ├── test_init_weights.py │ │ ├── test_nms.py │ │ ├── test_norm.py │ │ ├── test_repvgg_conv.py │ │ ├── test_scale.py │ │ └── test_transformer.py ├── test_trainer │ ├── test_lightning_task.py │ └── test_trainer.py └── test_utils │ ├── test_flops.py │ └── test_logger.py └── tools ├── convert_old_checkpoint.py ├── deprecated ├── test.py └── train.py ├── export_onnx.py ├── export_torchscript.py ├── flops.py ├── inference.py ├── test.py └── train.py /.flake8: -------------------------------------------------------------------------------- 1 | # This is an example .flake8 config, used when developing *Black* itself. 2 | # Keep in sync with setup.cfg which is used for source packages. 3 | 4 | [flake8] 5 | ignore = W503, E203, E221, C901, C408, E741, C407, E741, B006, B007, B017, B950, C416 6 | max-line-length = 88 7 | max-complexity = 18 8 | select = B,C,E,F,W,T4,B9 9 | exclude = build 10 | per-file-ignores = 11 | **/__init__.py:F401,F403,E402 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | 106 | .vscode 107 | .idea 108 | .DS_Store 109 | 110 | # custom 111 | *.pkl 112 | *.pkl.json 113 | *.log.json 114 | work_dirs/ 115 | 116 | # Pytorch 117 | *.pth 118 | *.py~ 119 | *.sh~ 120 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [tool.isort] 2 | profile = "black" 3 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v2.5.0 4 | hooks: 5 | - id: trailing-whitespace 6 | - id: end-of-file-fixer 7 | - id: check-docstring-first 8 | - id: check-yaml 9 | - id: debug-statements 10 | - id: requirements-txt-fixer 11 | 12 | - repo: https://github.com/pycqa/isort 13 | rev: 5.8.0 14 | hooks: 15 | - id: isort 16 | args: ["--profile", "black"] 17 | 18 | - repo: https://github.com/psf/black 19 | rev: 21.6b0 20 | hooks: 21 | - id: black 22 | 23 | - repo: https://gitlab.com/pycqa/flake8 24 | rev: 3.9.2 25 | hooks: 26 | - id: flake8 27 | -------------------------------------------------------------------------------- /config/RepVGG/nanodet-RepVGG-A0_416.yml: -------------------------------------------------------------------------------- 1 | # nanodet-EfficientNet-Lite1_416 2 | save_dir: workspace/RepVGG-A0-416 3 | model: 4 | arch: 5 | name: OneStageDetector 6 | backbone: 7 | name: RepVGG 8 | arch: A0 9 | out_stages: [2,3,4] 10 | activation: ReLU 11 | last_channel: 512 12 | deploy: False 13 | fpn: 14 | name: PAN 15 | in_channels: [96, 192, 512] 16 | out_channels: 128 17 | start_level: 0 18 | num_outs: 3 19 | head: 20 | name: NanoDetHead 21 | num_classes: 80 22 | conv_type: Conv 23 | input_channel: 128 24 | feat_channels: 128 25 | stacked_convs: 2 26 | activation: ReLU 27 | share_cls_reg: True 28 | octave_base_scale: 8 29 | scales_per_octave: 1 30 | strides: [8, 16, 32] 31 | reg_max: 10 32 | norm_cfg: 33 | type: BN 34 | loss: 35 | loss_qfl: 36 | name: QualityFocalLoss 37 | use_sigmoid: True 38 | beta: 2.0 39 | loss_weight: 1.0 40 | loss_dfl: 41 | name: DistributionFocalLoss 42 | loss_weight: 0.25 43 | loss_bbox: 44 | name: GIoULoss 45 | loss_weight: 2.0 46 | data: 47 | train: 48 | name: CocoDataset 49 | img_path: /coco/train2017 50 | ann_path: /coco/annotations/instances_train2017.json 51 | input_size: [416,416] #[w,h] 52 | keep_ratio: True 53 | pipeline: 54 | perspective: 0.0 55 | scale: [0.5, 1.5] 56 | stretch: [[1, 1], [1, 1]] 57 | rotation: 0 58 | shear: 0 59 | translate: 0.2 60 | flip: 0.5 61 | brightness: 0.2 62 | contrast: [0.6, 1.4] 63 | saturation: [0.5, 1.2] 64 | normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] 65 | val: 66 | name: CocoDataset 67 | img_path: /coco/val2017 68 | ann_path: /coco/annotations/instances_val2017.json 69 | input_size: [416,416] #[w,h] 70 | keep_ratio: True 71 | pipeline: 72 | normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] 73 | device: 74 | gpu_ids: [0] 75 | workers_per_gpu: 1 76 | batchsize_per_gpu: 100 77 | schedule: 78 | # resume: 79 | # load_model: YOUR_MODEL_PATH 80 | optimizer: 81 | name: SGD 82 | lr: 0.07 83 | momentum: 0.9 84 | weight_decay: 0.0001 85 | warmup: 86 | name: linear 87 | steps: 500 88 | ratio: 0.01 89 | total_epochs: 170 90 | lr_schedule: 91 | name: MultiStepLR 92 | milestones: [130,150,160,165] 93 | gamma: 0.1 94 | val_intervals: 5 95 | evaluator: 96 | name: CocoDetectionEvaluator 97 | save_key: mAP 98 | 99 | log: 100 | interval: 10 101 | 102 | class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 103 | 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', 104 | 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', 105 | 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 106 | 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 107 | 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', 108 | 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', 109 | 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 110 | 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', 111 | 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', 112 | 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', 113 | 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', 114 | 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 115 | 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] 116 | -------------------------------------------------------------------------------- /config/nanodet-m.yml: -------------------------------------------------------------------------------- 1 | #Config File example 2 | save_dir: workspace/nanodet_m 3 | model: 4 | arch: 5 | name: OneStageDetector 6 | backbone: 7 | name: ShuffleNetV2 8 | model_size: 1.0x 9 | out_stages: [2,3,4] 10 | activation: LeakyReLU 11 | fpn: 12 | name: PAN 13 | in_channels: [116, 232, 464] 14 | out_channels: 96 15 | start_level: 0 16 | num_outs: 3 17 | head: 18 | name: NanoDetHead 19 | num_classes: 80 20 | input_channel: 96 21 | feat_channels: 96 22 | stacked_convs: 2 23 | share_cls_reg: True 24 | octave_base_scale: 5 25 | scales_per_octave: 1 26 | strides: [8, 16, 32] 27 | reg_max: 7 28 | norm_cfg: 29 | type: BN 30 | loss: 31 | loss_qfl: 32 | name: QualityFocalLoss 33 | use_sigmoid: True 34 | beta: 2.0 35 | loss_weight: 1.0 36 | loss_dfl: 37 | name: DistributionFocalLoss 38 | loss_weight: 0.25 39 | loss_bbox: 40 | name: GIoULoss 41 | loss_weight: 2.0 42 | data: 43 | train: 44 | name: CocoDataset 45 | img_path: coco/train2017 46 | ann_path: coco/annotations/instances_train2017.json 47 | input_size: [320,320] #[w,h] 48 | keep_ratio: True 49 | pipeline: 50 | perspective: 0.0 51 | scale: [0.6, 1.4] 52 | stretch: [[1, 1], [1, 1]] 53 | rotation: 0 54 | shear: 0 55 | translate: 0.2 56 | flip: 0.5 57 | brightness: 0.2 58 | contrast: [0.6, 1.4] 59 | saturation: [0.5, 1.2] 60 | normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] 61 | val: 62 | name: CocoDataset 63 | img_path: coco/val2017 64 | ann_path: coco/annotations/instances_val2017.json 65 | input_size: [320,320] #[w,h] 66 | keep_ratio: True 67 | pipeline: 68 | normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] 69 | device: 70 | gpu_ids: [0] 71 | workers_per_gpu: 8 72 | batchsize_per_gpu: 192 73 | schedule: 74 | # resume: 75 | # load_model: YOUR_MODEL_PATH 76 | optimizer: 77 | name: SGD 78 | lr: 0.14 79 | momentum: 0.9 80 | weight_decay: 0.0001 81 | warmup: 82 | name: linear 83 | steps: 300 84 | ratio: 0.1 85 | total_epochs: 280 86 | lr_schedule: 87 | name: MultiStepLR 88 | milestones: [240,260,275] 89 | gamma: 0.1 90 | val_intervals: 10 91 | evaluator: 92 | name: CocoDetectionEvaluator 93 | save_key: mAP 94 | 95 | log: 96 | interval: 10 97 | 98 | class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 99 | 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', 100 | 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', 101 | 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 102 | 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 103 | 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', 104 | 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', 105 | 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 106 | 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', 107 | 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', 108 | 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', 109 | 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', 110 | 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 111 | 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] 112 | -------------------------------------------------------------------------------- /config/nanodet_custom_xml_dataset.yml: -------------------------------------------------------------------------------- 1 | #Config File example 2 | save_dir: workspace/nanodet_m 3 | model: 4 | arch: 5 | name: OneStageDetector 6 | backbone: 7 | name: ShuffleNetV2 8 | model_size: 1.0x 9 | out_stages: [2,3,4] 10 | activation: LeakyReLU 11 | fpn: 12 | name: PAN 13 | in_channels: [116, 232, 464] 14 | out_channels: 96 15 | start_level: 0 16 | num_outs: 3 17 | head: 18 | name: NanoDetHead 19 | num_classes: 80 #Please fill in the number of categories (not include background category) 20 | input_channel: 96 21 | feat_channels: 96 22 | stacked_convs: 2 23 | share_cls_reg: True 24 | octave_base_scale: 5 25 | scales_per_octave: 1 26 | strides: [8, 16, 32] 27 | reg_max: 7 28 | norm_cfg: 29 | type: BN 30 | loss: 31 | loss_qfl: 32 | name: QualityFocalLoss 33 | use_sigmoid: True 34 | beta: 2.0 35 | loss_weight: 1.0 36 | loss_dfl: 37 | name: DistributionFocalLoss 38 | loss_weight: 0.25 39 | loss_bbox: 40 | name: GIoULoss 41 | loss_weight: 2.0 42 | 43 | class_names: &class_names ['NAME1', 'NAME2', 'NAME3', 'NAME4', '...'] #Please fill in the category names (not include background category) 44 | data: 45 | train: 46 | name: XMLDataset 47 | class_names: *class_names 48 | img_path: TRAIN_IMAGE_FOLDER #Please fill in train image path 49 | ann_path: TRAIN_XML_FOLDER #Please fill in train xml path 50 | input_size: [320,320] #[w,h] 51 | keep_ratio: True 52 | pipeline: 53 | perspective: 0.0 54 | scale: [0.6, 1.4] 55 | stretch: [[1, 1], [1, 1]] 56 | rotation: 0 57 | shear: 0 58 | translate: 0.2 59 | flip: 0.5 60 | brightness: 0.2 61 | contrast: [0.8, 1.2] 62 | saturation: [0.8, 1.2] 63 | normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] 64 | val: 65 | name: XMLDataset 66 | class_names: *class_names 67 | img_path: VAL_IMAGE_FOLDER #Please fill in val image path 68 | ann_path: VAL_XML_FOLDER #Please fill in val xml path 69 | input_size: [320,320] #[w,h] 70 | keep_ratio: True 71 | pipeline: 72 | normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] 73 | device: 74 | gpu_ids: [0] 75 | workers_per_gpu: 12 76 | batchsize_per_gpu: 160 77 | schedule: 78 | # resume: 79 | # load_model: YOUR_MODEL_PATH 80 | optimizer: 81 | name: SGD 82 | lr: 0.14 83 | momentum: 0.9 84 | weight_decay: 0.0001 85 | warmup: 86 | name: linear 87 | steps: 300 88 | ratio: 0.1 89 | total_epochs: 190 90 | lr_schedule: 91 | name: MultiStepLR 92 | milestones: [130,160,175,185] 93 | gamma: 0.1 94 | val_intervals: 10 95 | evaluator: 96 | name: CocoDetectionEvaluator 97 | save_key: mAP 98 | 99 | log: 100 | interval: 10 101 | -------------------------------------------------------------------------------- /demo_android_ncnn/.gitignore: -------------------------------------------------------------------------------- 1 | *.iml 2 | .gradle 3 | /local.properties 4 | /.idea 5 | /.idea/caches 6 | /.idea/libraries 7 | /.idea/modules.xml 8 | /.idea/workspace.xml 9 | /.idea/navEditor.xml 10 | /.idea/assetWizardSettings.xml 11 | .DS_Store 12 | /build 13 | /captures 14 | .externalNativeBuild 15 | .cxx 16 | -------------------------------------------------------------------------------- /demo_android_ncnn/Android_demo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nihui/nanodet/0f32d4a7d2817af30fe1a7139477fbadab165fa3/demo_android_ncnn/Android_demo.jpg -------------------------------------------------------------------------------- /demo_android_ncnn/README.md: -------------------------------------------------------------------------------- 1 | # NanoDet NCNN Android Demo 2 | 3 | This repo is an Android object detection demo of NanoDet using 4 | [Tencent's NCNN framework](https://github.com/Tencent/ncnn). 5 | 6 | # Tutorial 7 | 8 | ## Step1. 9 | Download ncnn-android-vulkan.zip from ncnn repo or build ncnn-android from source. 10 | 11 | - [ncnn-android-vulkan.zip download link](https://github.com/Tencent/ncnn/releases) 12 | 13 | ## Step2. 14 | Unzip ncnn-android-vulkan.zip into demo_android_ncnn/app/src/main/cpp or change the ncnn_DIR path to yours in demo_android_ncnn/app/src/main/cpp/CMakeLists.txt 15 | 16 | ## Step3. 17 | Copy the NanoDet ncnn model file (nanodet_m.param and nanodet_m.bin) from models folder into demo_android_ncnn/app/src/main/assets 18 | 19 | * [NanoDet ncnn model download link](https://github.com/RangiLyu/nanodet/releases/download/v0.3.0/nanodet_m_ncnn_model.zip) 20 | 21 | If you want to run yolov4-tiny and yolov5s, download them and also put in demo_android_ncnn/app/src/main/assets. 22 | 23 | * [Yolov4 and v5 ncnn model download link](https://drive.google.com/file/d/1Qk_1fDvOcFmNppDnaMFW-xFpMgLDyeAs/view?usp=sharing) 24 | 25 | ## Step4. 26 | Open demo_android_ncnn folder with Android Studio and then build it. 27 | 28 | # Screenshot 29 | ![](Android_demo.jpg) 30 | 31 | 32 | # Reference 33 | 34 | * [ncnn](https://github.com/tencent/ncnn) 35 | * [YOLOv5_NCNN](https://github.com/WZTENG/YOLOv5_NCNN) 36 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.android.application' 2 | 3 | android { 4 | compileSdkVersion 29 5 | buildToolsVersion "29.0.3" 6 | defaultConfig { 7 | applicationId "com.rangi.nanodet" 8 | minSdkVersion 26 9 | targetSdkVersion 29 10 | versionCode 1 11 | versionName "1.0" 12 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" 13 | externalNativeBuild { 14 | cmake { 15 | cppFlags "" 16 | arguments '-DANDROID_PLATFORM=android-24', '-DANDROID_STL=c++_static', '-DANDROID_STL=c++_shared' 17 | } 18 | } 19 | 20 | ndk { 21 | moduleName "NcnnJniLog" 22 | ldLibs "log", "z", "m" 23 | abiFilters "armeabi-v7a", "arm64-v8a" 24 | } 25 | 26 | multiDexEnabled true 27 | } 28 | buildTypes { 29 | release { 30 | minifyEnabled false 31 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' 32 | } 33 | } 34 | externalNativeBuild { 35 | cmake { 36 | path "src/main/cpp/CMakeLists.txt" 37 | version "3.10.2" 38 | } 39 | } 40 | sourceSets { 41 | main { 42 | jniLibs.srcDirs = ['libs'] 43 | } 44 | } 45 | 46 | repositories { 47 | flatDir { 48 | dirs 'libs' 49 | } 50 | } 51 | } 52 | 53 | dependencies { 54 | implementation fileTree(dir: 'libs', include: ['*.jar']) 55 | implementation 'androidx.appcompat:appcompat:1.1.0' 56 | implementation 'androidx.constraintlayout:constraintlayout:1.1.3' 57 | testImplementation 'junit:junit:4.12' 58 | androidTestImplementation 'androidx.test.ext:junit:1.1.1' 59 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' 60 | 61 | // Use the most recent version of CameraX, currently that is alpha04 62 | def camerax_version = "1.0.0-alpha05" 63 | //noinspection GradleDependency 64 | implementation "androidx.camera:camera-core:${camerax_version}" 65 | //noinspection GradleDependency 66 | implementation "androidx.camera:camera-camera2:${camerax_version}" 67 | 68 | implementation 'com.android.support:multidex:1.0.3' 69 | // crash 70 | implementation 'com.zxy.android:recovery:1.0.0' 71 | // photoview 72 | implementation 'com.github.chrisbanes:PhotoView:2.3.0' 73 | // implementation 'com.bm.photoview:library:1.4.1' 74 | 75 | } 76 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # You can control the set of applied configuration files using the 3 | # proguardFiles setting in build.gradle. 4 | # 5 | # For more details, see 6 | # http://developer.android.com/guide/developing/tools/proguard.html 7 | 8 | # If your project uses WebView with JS, uncomment the following 9 | # and specify the fully qualified class name to the JavaScript interface 10 | # class: 11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 12 | # public *; 13 | #} 14 | 15 | # Uncomment this to preserve the line number information for 16 | # debugging stack traces. 17 | #-keepattributes SourceFile,LineNumberTable 18 | 19 | # If you keep the line number information, uncomment this to 20 | # hide the original source file name. 21 | #-renamesourcefileattribute SourceFile 22 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/androidTest/java/com/rangi/nanodet/ExampleInstrumentedTest.java: -------------------------------------------------------------------------------- 1 | package com.rangi.nanodet; 2 | 3 | import android.content.Context; 4 | 5 | import androidx.test.platform.app.InstrumentationRegistry; 6 | import androidx.test.ext.junit.runners.AndroidJUnit4; 7 | 8 | import org.junit.Test; 9 | import org.junit.runner.RunWith; 10 | 11 | import static org.junit.Assert.*; 12 | 13 | /** 14 | * Instrumented test, which will execute on an Android device. 15 | * 16 | * @see Testing documentation 17 | */ 18 | @RunWith(AndroidJUnit4.class) 19 | public class ExampleInstrumentedTest { 20 | @Test 21 | public void useAppContext() { 22 | // Context of the app under test. 23 | Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext(); 24 | 25 | assertEquals("gd.hq.yolov5", appContext.getPackageName()); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/cpp/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.10) 2 | 3 | set(ncnn_DIR ${CMAKE_SOURCE_DIR}/ncnn-20201218-android-vulkan/${ANDROID_ABI}/lib/cmake/ncnn) 4 | find_package(ncnn REQUIRED) 5 | 6 | add_library(yolov5 SHARED 7 | jni_interface.cpp 8 | YoloV5.cpp 9 | YoloV4.cpp 10 | NanoDet.cpp 11 | ) 12 | 13 | target_link_libraries(yolov5 ncnn jnigraphics) 14 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/cpp/NanoDet.h: -------------------------------------------------------------------------------- 1 | // 2 | // Create by RangiLyu 3 | // 2020 / 10 / 2 4 | // 5 | 6 | #ifndef NANODET_H 7 | #define NANODET_H 8 | 9 | #include "net.h" 10 | #include "YoloV5.h" 11 | 12 | typedef struct HeadInfo 13 | { 14 | std::string cls_layer; 15 | std::string dis_layer; 16 | int stride; 17 | } HeadInfo; 18 | 19 | 20 | class NanoDet{ 21 | public: 22 | NanoDet(AAssetManager *mgr, const char *param, const char *bin, bool useGPU); 23 | 24 | ~NanoDet(); 25 | 26 | std::vector detect(JNIEnv *env, jobject image, float score_threshold, float nms_threshold); 27 | std::vector labels{"person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", 28 | "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", 29 | "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", 30 | "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", 31 | "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", 32 | "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", 33 | "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", 34 | "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", 35 | "hair drier", "toothbrush"}; 36 | private: 37 | void preprocess(JNIEnv *env, jobject image, ncnn::Mat& in); 38 | void decode_infer(ncnn::Mat& cls_pred, ncnn::Mat& dis_pred, int stride, float threshold, std::vector>& results, float width_ratio, float height_ratio); 39 | BoxInfo disPred2Bbox(const float*& dfl_det, int label, float score, int x, int y, int stride, float width_ratio, float height_ratio); 40 | 41 | static void nms(std::vector& result, float nms_threshold); 42 | 43 | ncnn::Net *Net; 44 | int input_size = 320; 45 | int num_class = 80; 46 | int reg_max = 7; 47 | std::vector heads_info{ 48 | // cls_pred|dis_pred|stride 49 | {"cls_pred_stride_8", "dis_pred_stride_8", 8}, 50 | {"cls_pred_stride_16", "dis_pred_stride_16", 16}, 51 | {"cls_pred_stride_32", "dis_pred_stride_32", 32}, 52 | }; 53 | 54 | public: 55 | static NanoDet *detector; 56 | static bool hasGPU; 57 | }; 58 | 59 | 60 | #endif //NANODET_H 61 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/cpp/YoloV4.cpp: -------------------------------------------------------------------------------- 1 | #include "YoloV4.h" 2 | 3 | bool YoloV4::hasGPU = true; 4 | YoloV4 *YoloV4::detector = nullptr; 5 | 6 | YoloV4::YoloV4(AAssetManager *mgr, const char *param, const char *bin, bool useGPU) { 7 | Net = new ncnn::Net(); 8 | // opt 需要在加载前设置 9 | hasGPU = ncnn::get_gpu_count() > 0; 10 | Net->opt.use_vulkan_compute = hasGPU && useGPU; // gpu 11 | Net->opt.use_fp16_arithmetic = true; // fp16运算加速 12 | Net->load_param(mgr, param); 13 | Net->load_model(mgr, bin); 14 | } 15 | 16 | YoloV4::~YoloV4() { 17 | delete Net; 18 | } 19 | 20 | std::vector YoloV4::detect(JNIEnv *env, jobject image, float threshold, float nms_threshold) { 21 | AndroidBitmapInfo img_size; 22 | AndroidBitmap_getInfo(env, image, &img_size); 23 | ncnn::Mat in_net = ncnn::Mat::from_android_bitmap_resize(env, image, ncnn::Mat::PIXEL_RGBA2RGB, input_size, 24 | input_size); 25 | float norm[3] = {1 / 255.f, 1 / 255.f, 1 / 255.f}; 26 | float mean[3] = {0, 0, 0}; 27 | in_net.substract_mean_normalize(mean, norm); 28 | auto ex = Net->create_extractor(); 29 | ex.set_light_mode(true); 30 | ex.set_num_threads(4); 31 | hasGPU = ncnn::get_gpu_count() > 0; 32 | ex.set_vulkan_compute(hasGPU); 33 | ex.input(0, in_net); 34 | std::vector result; 35 | ncnn::Mat blob; 36 | ex.extract("output", blob); 37 | auto boxes = decode_infer(blob, {(int) img_size.width, (int) img_size.height}, input_size, num_class, threshold); 38 | result.insert(result.begin(), boxes.begin(), boxes.end()); 39 | // nms(result,nms_threshold); 40 | return result; 41 | } 42 | 43 | inline float fast_exp(float x) { 44 | union { 45 | uint32_t i; 46 | float f; 47 | } v{}; 48 | v.i = (1 << 23) * (1.4426950409 * x + 126.93490512f); 49 | return v.f; 50 | } 51 | 52 | inline float sigmoid(float x) { 53 | return 1.0f / (1.0f + fast_exp(-x)); 54 | } 55 | 56 | std::vector 57 | YoloV4::decode_infer(ncnn::Mat &data, const yolocv::YoloSize &frame_size, int net_size, int num_classes, float threshold) { 58 | std::vector result; 59 | for (int i = 0; i < data.h; i++) { 60 | BoxInfo box; 61 | const float *values = data.row(i); 62 | box.label = values[0] - 1; 63 | box.score = values[1]; 64 | box.x1 = values[2] * (float) frame_size.width; 65 | box.y1 = values[3] * (float) frame_size.height; 66 | box.x2 = values[4] * (float) frame_size.width; 67 | box.y2 = values[5] * (float) frame_size.height; 68 | result.push_back(box); 69 | } 70 | return result; 71 | } 72 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/cpp/YoloV4.h: -------------------------------------------------------------------------------- 1 | #ifndef YOLOV4_H 2 | #define YOLOV4_H 3 | 4 | #include "net.h" 5 | #include "YoloV5.h" 6 | 7 | 8 | class YoloV4 { 9 | public: 10 | YoloV4(AAssetManager *mgr, const char *param, const char *bin, bool useGPU); 11 | 12 | ~YoloV4(); 13 | 14 | std::vector detect(JNIEnv *env, jobject image, float threshold, float nms_threshold); 15 | std::vector labels{"person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", 16 | "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", 17 | "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", 18 | "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", 19 | "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", 20 | "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", 21 | "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", 22 | "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", 23 | "hair drier", "toothbrush"}; 24 | private: 25 | static std::vector 26 | decode_infer(ncnn::Mat &data, const yolocv::YoloSize &frame_size, int net_size, int num_classes, float threshold); 27 | 28 | // static void nms(std::vector& result,float nms_threshold); 29 | ncnn::Net *Net; 30 | int input_size = 640 / 2; 31 | int num_class = 80; 32 | public: 33 | static YoloV4 *detector; 34 | static bool hasGPU; 35 | }; 36 | 37 | 38 | #endif //YOLOV4_H 39 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/cpp/YoloV5.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by 邓昊晴 on 14/6/2020. 3 | // 4 | 5 | #ifndef YOLOV5_H 6 | #define YOLOV5_H 7 | 8 | #include "net.h" 9 | 10 | namespace yolocv { 11 | typedef struct { 12 | int width; 13 | int height; 14 | } YoloSize; 15 | } 16 | 17 | typedef struct { 18 | std::string name; 19 | int stride; 20 | std::vector anchors; 21 | } YoloLayerData; 22 | 23 | typedef struct BoxInfo { 24 | float x1; 25 | float y1; 26 | float x2; 27 | float y2; 28 | float score; 29 | int label; 30 | } BoxInfo; 31 | 32 | class YoloV5 { 33 | public: 34 | YoloV5(AAssetManager *mgr, const char *param, const char *bin, bool useGPU); 35 | 36 | ~YoloV5(); 37 | 38 | std::vector detect(JNIEnv *env, jobject image, float threshold, float nms_threshold); 39 | std::vector labels{"person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", 40 | "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", 41 | "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", 42 | "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", 43 | "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", 44 | "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", 45 | "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", 46 | "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", 47 | "hair drier", "toothbrush"}; 48 | private: 49 | static std::vector 50 | decode_infer(ncnn::Mat &data, int stride, const yolocv::YoloSize &frame_size, int net_size, int num_classes, 51 | const std::vector &anchors, float threshold); 52 | 53 | static void nms(std::vector &result, float nms_threshold); 54 | 55 | ncnn::Net *Net; 56 | int input_size = 640; 57 | int num_class = 80; 58 | std::vector layers{ 59 | {"394", 32, {{116, 90}, {156, 198}, {373, 326}}}, 60 | {"375", 16, {{30, 61}, {62, 45}, {59, 119}}}, 61 | {"output", 8, {{10, 13}, {16, 30}, {33, 23}}}, 62 | }; 63 | 64 | public: 65 | static YoloV5 *detector; 66 | static bool hasGPU; 67 | }; 68 | 69 | 70 | #endif //YOLOV5_H 71 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/java/com/rangi/nanodet/AppCrashHandler.java: -------------------------------------------------------------------------------- 1 | package com.rangi.nanodet; 2 | 3 | import androidx.annotation.NonNull; 4 | 5 | class AppCrashHandler implements Thread.UncaughtExceptionHandler { 6 | 7 | private Thread.UncaughtExceptionHandler uncaughtExceptionHandler = Thread.getDefaultUncaughtExceptionHandler(); 8 | 9 | @Override 10 | public void uncaughtException(@NonNull Thread t, @NonNull Throwable e) { 11 | uncaughtExceptionHandler.uncaughtException(t, e); 12 | } 13 | 14 | public static void register() { 15 | Thread.setDefaultUncaughtExceptionHandler(new AppCrashHandler()); 16 | } 17 | 18 | } 19 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/java/com/rangi/nanodet/Box.java: -------------------------------------------------------------------------------- 1 | package com.rangi.nanodet; 2 | 3 | import android.graphics.Color; 4 | import android.graphics.RectF; 5 | 6 | import java.util.Random; 7 | 8 | public class Box { 9 | public float x0,y0,x1,y1; 10 | private int label; 11 | private float score; 12 | private static String[] labels={"person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", 13 | "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", 14 | "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", 15 | "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", 16 | "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", 17 | "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", 18 | "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", 19 | "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", 20 | "hair drier", "toothbrush"}; 21 | public Box(float x0,float y0, float x1, float y1, int label, float score){ 22 | this.x0 = x0; 23 | this.y0 = y0; 24 | this.x1 = x1; 25 | this.y1 = y1; 26 | this.label = label; 27 | this.score = score; 28 | } 29 | 30 | public RectF getRect(){ 31 | return new RectF(x0,y0,x1,y1); 32 | } 33 | 34 | public String getLabel(){ 35 | return labels[label]; 36 | } 37 | 38 | public float getScore(){ 39 | return score; 40 | } 41 | 42 | public int getColor(){ 43 | Random random = new Random(label); 44 | return Color.argb(255,random.nextInt(256),random.nextInt(256),random.nextInt(256)); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/java/com/rangi/nanodet/NanoDet.java: -------------------------------------------------------------------------------- 1 | package com.rangi.nanodet; 2 | 3 | import android.content.res.AssetManager; 4 | import android.graphics.Bitmap; 5 | 6 | public class NanoDet { 7 | static { 8 | System.loadLibrary("yolov5"); 9 | } 10 | 11 | public static native void init(AssetManager manager, boolean useGPU); 12 | public static native Box[] detect(Bitmap bitmap, double threshold, double nms_threshold); 13 | } 14 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/java/com/rangi/nanodet/NcnnApp.java: -------------------------------------------------------------------------------- 1 | package com.rangi.nanodet; 2 | 3 | import android.app.Application; 4 | import android.content.Context; 5 | import android.util.Log; 6 | 7 | import androidx.multidex.MultiDex; 8 | 9 | import com.zxy.recovery.callback.RecoveryCallback; 10 | import com.zxy.recovery.core.Recovery; 11 | 12 | 13 | public class NcnnApp extends Application { 14 | 15 | @Override 16 | public void onCreate() { 17 | super.onCreate(); 18 | 19 | //崩溃界面 20 | initRecovery(); 21 | } 22 | 23 | @Override 24 | protected void attachBaseContext(Context base) { 25 | super.attachBaseContext(base); 26 | MultiDex.install(base); 27 | } 28 | 29 | private void initRecovery() { 30 | Recovery.getInstance() 31 | .debug(BuildConfig.DEBUG) 32 | .recoverInBackground(true) 33 | .recoverStack(true) 34 | .mainPage(MainActivity.class) 35 | .recoverEnabled(true) 36 | .callback(new MyCrashCallback()) 37 | .silent(false, Recovery.SilentMode.RECOVER_ACTIVITY_STACK) 38 | // .skip(TestActivity.class) 39 | .init(this); 40 | AppCrashHandler.register(); 41 | } 42 | 43 | static final class MyCrashCallback implements RecoveryCallback { 44 | @Override 45 | public void stackTrace(String exceptionMessage) { 46 | Log.e("wzt", "exceptionMessage:" + exceptionMessage); 47 | } 48 | 49 | @Override 50 | public void cause(String cause) { 51 | Log.e("wzt", "cause:" + cause); 52 | } 53 | 54 | @Override 55 | public void exception(String exceptionType, String throwClassName, String throwMethodName, int throwLineNumber) { 56 | Log.e("wzt", "exceptionClassName:" + exceptionType); 57 | Log.e("wzt", "throwClassName:" + throwClassName); 58 | Log.e("wzt", "throwMethodName:" + throwMethodName); 59 | Log.e("wzt", "throwLineNumber:" + throwLineNumber); 60 | } 61 | 62 | @Override 63 | public void throwable(Throwable throwable) { 64 | 65 | } 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/java/com/rangi/nanodet/WelcomeActivity.java: -------------------------------------------------------------------------------- 1 | package com.rangi.nanodet; 2 | 3 | import androidx.appcompat.app.AlertDialog; 4 | import androidx.appcompat.app.AppCompatActivity; 5 | 6 | import android.content.Intent; 7 | import android.os.Bundle; 8 | import android.view.View; 9 | import android.widget.Button; 10 | import android.widget.CompoundButton; 11 | import android.widget.ToggleButton; 12 | 13 | 14 | public class WelcomeActivity extends AppCompatActivity { 15 | 16 | private ToggleButton tbUseGpu; 17 | private Button nanodet; 18 | private Button yolov5s; 19 | private Button yolov4tiny; 20 | 21 | private boolean useGPU = false; 22 | 23 | @Override 24 | protected void onCreate(Bundle savedInstanceState) { 25 | super.onCreate(savedInstanceState); 26 | setContentView(R.layout.activity_welcome); 27 | 28 | tbUseGpu = findViewById(R.id.tb_use_gpu); 29 | tbUseGpu.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { 30 | @Override 31 | public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { 32 | useGPU = isChecked; 33 | MainActivity.USE_GPU = useGPU; 34 | if (useGPU) { 35 | AlertDialog.Builder builder = new AlertDialog.Builder(WelcomeActivity.this); 36 | builder.setTitle("Warning"); 37 | builder.setMessage("It may not work well in GPU mode, or errors may occur."); 38 | builder.setCancelable(true); 39 | builder.setPositiveButton("OK", null); 40 | AlertDialog dialog = builder.create(); 41 | dialog.show(); 42 | } 43 | } 44 | }); 45 | 46 | nanodet = findViewById(R.id.btn_start_detect0); 47 | nanodet.setOnClickListener(new View.OnClickListener() { 48 | @Override 49 | public void onClick(View v) { 50 | MainActivity.USE_MODEL = MainActivity.NANODET; 51 | Intent intent = new Intent(WelcomeActivity.this, MainActivity.class); 52 | WelcomeActivity.this.startActivity(intent); 53 | } 54 | }); 55 | 56 | yolov5s = findViewById(R.id.btn_start_detect1); 57 | yolov5s.setOnClickListener(new View.OnClickListener() { 58 | @Override 59 | public void onClick(View v) { 60 | MainActivity.USE_MODEL = MainActivity.YOLOV5S; 61 | Intent intent = new Intent(WelcomeActivity.this, MainActivity.class); 62 | WelcomeActivity.this.startActivity(intent); 63 | } 64 | }); 65 | 66 | yolov4tiny = findViewById(R.id.btn_start_detect2); 67 | yolov4tiny.setOnClickListener(new View.OnClickListener() { 68 | @Override 69 | public void onClick(View v) { 70 | MainActivity.USE_MODEL = MainActivity.YOLOV4_TINY; 71 | Intent intent = new Intent(WelcomeActivity.this, MainActivity.class); 72 | WelcomeActivity.this.startActivity(intent); 73 | } 74 | }); 75 | 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/java/com/rangi/nanodet/YOLOv4.java: -------------------------------------------------------------------------------- 1 | package com.rangi.nanodet; 2 | 3 | import android.content.res.AssetManager; 4 | import android.graphics.Bitmap; 5 | 6 | public class YOLOv4 { 7 | static { 8 | System.loadLibrary("yolov5"); // 存放在yolov5.so中 9 | } 10 | 11 | public static native void init(AssetManager manager, boolean useGPU); 12 | public static native Box[] detect(Bitmap bitmap, double threshold, double nms_threshold); 13 | } 14 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/java/com/rangi/nanodet/YOLOv5.java: -------------------------------------------------------------------------------- 1 | package com.rangi.nanodet; 2 | 3 | import android.content.res.AssetManager; 4 | import android.graphics.Bitmap; 5 | 6 | public class YOLOv5 { 7 | static { 8 | System.loadLibrary("yolov5"); 9 | } 10 | 11 | public static native void init(AssetManager manager, boolean useGPU); 12 | public static native Box[] detect(Bitmap bitmap, double threshold, double nms_threshold); 13 | } 14 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/res/drawable-v24/ic_launcher_foreground.xml: -------------------------------------------------------------------------------- 1 | 7 | 12 | 13 | 19 | 22 | 25 | 26 | 27 | 28 | 34 | 35 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/res/drawable-xxhdpi/cpu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nihui/nanodet/0f32d4a7d2817af30fe1a7139477fbadab165fa3/demo_android_ncnn/app/src/main/res/drawable-xxhdpi/cpu.png -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/res/drawable-xxhdpi/gpu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nihui/nanodet/0f32d4a7d2817af30fe1a7139477fbadab165fa3/demo_android_ncnn/app/src/main/res/drawable-xxhdpi/gpu.png -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/res/drawable-xxhdpi/ncnn_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nihui/nanodet/0f32d4a7d2817af30fe1a7139477fbadab165fa3/demo_android_ncnn/app/src/main/res/drawable-xxhdpi/ncnn_icon.png -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/res/drawable/cpu_gpu_bg.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /demo_android_ncnn/app/src/main/res/layout/activity_welcome.xml: -------------------------------------------------------------------------------- 1 | 2 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 30 | 31 | 38 | 39 | 47 | 48 |