├── .DS_Store ├── LICENSE ├── README.md ├── cocoapi ├── .gitignore ├── .travis.yml ├── PythonAPI │ ├── Makefile │ ├── pycocoDemo.ipynb │ ├── pycocoEvalDemo.ipynb │ ├── pycocotools │ │ ├── __init__.py │ │ ├── _mask.pyx │ │ ├── coco.py │ │ ├── cocoeval.py │ │ └── mask.py │ └── setup.py ├── README.txt ├── common │ ├── gason.cpp │ ├── gason.h │ ├── maskApi.c │ └── maskApi.h └── license.txt ├── config ├── custom_faster_rcnn_inceptionv2.config └── label_map.pbtxt ├── data └── dataset │ ├── image_1.jpg │ ├── image_10.jpg │ ├── image_11.jpg │ ├── image_12.jpg │ ├── image_13.jpg │ ├── image_14.jpg │ ├── image_15.jpg │ ├── image_16.jpg │ ├── image_17.jpg │ ├── image_18.jpg │ ├── image_19.jpg │ ├── image_2.jpg │ ├── image_20.jpg │ ├── image_21.jpg │ ├── image_22.jpg │ ├── image_23.jpg │ ├── image_24.jpg │ ├── image_25.jpg │ ├── image_26.jpg │ ├── image_27.jpg │ ├── image_28.jpg │ ├── image_29.jpg │ ├── image_3.jpg │ ├── image_31.jpg │ ├── image_32.jpg │ ├── image_33.jpg │ ├── image_34.jpg │ ├── image_35.jpg │ ├── image_37.jpg │ ├── image_38.jpg │ ├── image_39.jpg │ ├── image_4.jpg │ ├── image_40.jpg │ ├── image_41.jpg │ ├── image_42.jpg │ ├── image_44.jpg │ ├── image_45.jpg │ ├── image_46.jpg │ ├── image_5.jpg │ ├── image_6.jpg │ ├── image_7.jpg │ ├── image_8.jpg │ ├── image_9.jpg │ ├── per_img_labels │ ├── 02e00bc1b9b68ac7198bf5449e28ef26-asset.json │ ├── 09eda920710aeac7fd806503829aead7-asset.json │ ├── 0d3920127c8fa96c428f6a897de97141-asset.json │ ├── 12d258e3acea75a420a93a11d1a06eab-asset.json │ ├── 1665e44f43ab635ebdfd5cc4d23706d8-asset.json │ ├── 1d9ced0f78abc8fa69c45e6eff59592f-asset.json │ ├── 1feb953f4987564e020c6aff15fec83a-asset.json │ ├── 22655bb2c4cba5393f437ca34fb94c77-asset.json │ ├── 22b7b013a3d853d6ed962e60fde8bc72-asset.json │ ├── 2338d4d6665b7c787ab59e657ad5f8a2-asset.json │ ├── 243feb2b55c4363c906e30fc95b7543a-asset.json │ ├── 2741eff2d159c91fd479197db5bedf5a-asset.json │ ├── 2876353694512d97ce746aed6799cc46-asset.json │ ├── 289e0c5761f92ca0a407ffed5146f1d3-asset.json │ ├── 2d30d86f62170e9e489ea540e8b4a39f-asset.json │ ├── 2dbe1038abcbff47e8acfc775ce6db21-asset.json │ ├── 3222b42a5f6e8a6458326fffba292188-asset.json │ ├── 35b5365cfbbe4b186dc24c2f995f8916-asset.json │ ├── 35f22bbd45d5fffdcc0f7cc746493e4e-asset.json │ ├── 369a007eeff07a61d2b046ff643e2fa6-asset.json │ ├── 39469fc1e79e0a3e8235ea772be6dd72-asset.json │ ├── 3aedd678daf035361c65e15d73d08793-asset.json │ ├── 3e3cc0461e9c11e75602394fc7fbca95-asset.json │ ├── 4e74618cdcf958b0cf108318c54a405e-asset.json │ ├── 5f842697452d03175d79a4da15680256-asset.json │ ├── 62db0598df57738f9884f2d587ed7631-asset.json │ ├── 74b6542be280e332ef6986e2cfd0c2d1-asset.json │ ├── 7c3047752836e4fd16c02024d2d9b220-asset.json │ ├── 86af257b88349482e224a29a40c0d4c1-asset.json │ ├── 9563f7136cf9e3cfd6b33bf886b1a3be-asset.json │ ├── 99789719a193ba0ca5965bd3d9376b12-asset.json │ ├── a0956876b476836630a9310ff51e88c4-asset.json │ ├── c9428a2d6e8f5305a8558118d2b5f5d4-asset.json │ ├── e2637bfe01168d4e028e28b72a1484e3-asset.json │ ├── e3218a671baf8454c595b7947a3b3dbe-asset.json │ ├── e5254f780f7afb1b2dbd86b37adcf8da-asset.json │ ├── e72121c269e8a5f3d882e925b773d1b9-asset.json │ ├── f0b47a56307f8dc2ef8d7fd152cad921-asset.json │ ├── faecf8232185bead71569293253b7fa6-asset.json │ └── ff2d61792d2d08620df8593cebfc2212-asset.json │ ├── signature_detection_blog-export.json │ └── signature_detection_blog.vott ├── dataset_utils └── make_official_dataset.py ├── image_blog.jpg ├── inference ├── HandwritingDetector.py ├── instructions.txt ├── run_detector_inference.sh └── utils │ ├── signature_detect_config.json │ └── visualization_utils.py ├── models ├── .gitignore ├── .gitmodules ├── CODEOWNERS ├── CONTRIBUTING.md ├── LICENSE ├── README.md └── research │ ├── make_logs.py │ ├── object_detection │ ├── CONTRIBUTING.md │ ├── README.md │ ├── __init__.py │ ├── anchor_generators │ │ ├── __init__.py │ │ ├── grid_anchor_generator.py │ │ ├── grid_anchor_generator_test.py │ │ ├── multiple_grid_anchor_generator.py │ │ ├── multiple_grid_anchor_generator_test.py │ │ ├── multiscale_grid_anchor_generator.py │ │ └── multiscale_grid_anchor_generator_test.py │ ├── box_coders │ │ ├── __init__.py │ │ ├── faster_rcnn_box_coder.py │ │ ├── faster_rcnn_box_coder_test.py │ │ ├── keypoint_box_coder.py │ │ ├── keypoint_box_coder_test.py │ │ ├── mean_stddev_box_coder.py │ │ ├── mean_stddev_box_coder_test.py │ │ ├── square_box_coder.py │ │ └── square_box_coder_test.py │ ├── builders │ │ ├── __init__.py │ │ ├── anchor_generator_builder.py │ │ ├── anchor_generator_builder_test.py │ │ ├── box_coder_builder.py │ │ ├── box_coder_builder_test.py │ │ ├── box_predictor_builder.py │ │ ├── box_predictor_builder_test.py │ │ ├── calibration_builder.py │ │ ├── calibration_builder_test.py │ │ ├── dataset_builder.py │ │ ├── dataset_builder_test.py │ │ ├── graph_rewriter_builder.py │ │ ├── graph_rewriter_builder_test.py │ │ ├── hyperparams_builder.py │ │ ├── hyperparams_builder_test.py │ │ ├── image_resizer_builder.py │ │ ├── image_resizer_builder_test.py │ │ ├── input_reader_builder.py │ │ ├── input_reader_builder_test.py │ │ ├── losses_builder.py │ │ ├── losses_builder_test.py │ │ ├── matcher_builder.py │ │ ├── matcher_builder_test.py │ │ ├── model_builder.py │ │ ├── model_builder_test.py │ │ ├── optimizer_builder.py │ │ ├── optimizer_builder_test.py │ │ ├── post_processing_builder.py │ │ ├── post_processing_builder_test.py │ │ ├── preprocessor_builder.py │ │ ├── preprocessor_builder_test.py │ │ ├── region_similarity_calculator_builder.py │ │ └── region_similarity_calculator_builder_test.py │ ├── core │ │ ├── __init__.py │ │ ├── anchor_generator.py │ │ ├── balanced_positive_negative_sampler.py │ │ ├── balanced_positive_negative_sampler_test.py │ │ ├── batcher.py │ │ ├── batcher_test.py │ │ ├── box_coder.py │ │ ├── box_coder_test.py │ │ ├── box_list.py │ │ ├── box_list_ops.py │ │ ├── box_list_ops_test.py │ │ ├── box_list_test.py │ │ ├── box_predictor.py │ │ ├── data_decoder.py │ │ ├── data_parser.py │ │ ├── freezable_batch_norm.py │ │ ├── freezable_batch_norm_test.py │ │ ├── keypoint_ops.py │ │ ├── keypoint_ops_test.py │ │ ├── losses.py │ │ ├── losses_test.py │ │ ├── matcher.py │ │ ├── matcher_test.py │ │ ├── minibatch_sampler.py │ │ ├── minibatch_sampler_test.py │ │ ├── model.py │ │ ├── post_processing.py │ │ ├── post_processing_test.py │ │ ├── prefetcher.py │ │ ├── prefetcher_test.py │ │ ├── preprocessor.py │ │ ├── preprocessor_cache.py │ │ ├── preprocessor_test.py │ │ ├── region_similarity_calculator.py │ │ ├── region_similarity_calculator_test.py │ │ ├── standard_fields.py │ │ ├── target_assigner.py │ │ └── target_assigner_test.py │ ├── data_decoders │ │ ├── __init__.py │ │ ├── tf_example_decoder.py │ │ └── tf_example_decoder_test.py │ ├── dataset_tools │ │ ├── __init__.py │ │ ├── create_coco_tf_record.py │ │ ├── create_coco_tf_record_test.py │ │ ├── create_kitti_tf_record.py │ │ ├── create_kitti_tf_record_test.py │ │ ├── create_oid_tf_record.py │ │ ├── create_pascal_tf_record.py │ │ ├── create_pascal_tf_record_test.py │ │ ├── create_pet_tf_record.py │ │ ├── create_pycocotools_package.sh │ │ ├── download_and_preprocess_mscoco.sh │ │ ├── oid_hierarchical_labels_expansion.py │ │ ├── oid_hierarchical_labels_expansion_test.py │ │ ├── oid_tfrecord_creation.py │ │ ├── oid_tfrecord_creation_test.py │ │ ├── tf_record_creation_util.py │ │ └── tf_record_creation_util_test.py │ ├── eval_util.py │ ├── eval_util_test.py │ ├── export_inference_graph.py │ ├── export_model.sh │ ├── export_tflite_ssd_graph.py │ ├── export_tflite_ssd_graph_lib.py │ ├── export_tflite_ssd_graph_lib_test.py │ ├── exporter.py │ ├── exporter_test.py │ ├── inference │ │ ├── __init__.py │ │ ├── detection_inference.py │ │ ├── detection_inference_test.py │ │ └── infer_detections.py │ ├── inputs.py │ ├── inputs_test.py │ ├── legacy │ │ ├── __init__.py │ │ ├── eval.py │ │ ├── evaluator.py │ │ ├── train.py │ │ ├── trainer.py │ │ └── trainer_test.py │ ├── matchers │ │ ├── __init__.py │ │ ├── argmax_matcher.py │ │ ├── argmax_matcher_test.py │ │ ├── bipartite_matcher.py │ │ └── bipartite_matcher_test.py │ ├── meta_architectures │ │ ├── __init__.py │ │ ├── faster_rcnn_meta_arch.py │ │ ├── faster_rcnn_meta_arch_test.py │ │ ├── faster_rcnn_meta_arch_test_lib.py │ │ ├── rfcn_meta_arch.py │ │ ├── rfcn_meta_arch_test.py │ │ ├── ssd_meta_arch.py │ │ ├── ssd_meta_arch_test.py │ │ └── ssd_meta_arch_test_lib.py │ ├── metrics │ │ ├── __init__.py │ │ ├── calibration_evaluation.py │ │ ├── calibration_evaluation_test.py │ │ ├── calibration_metrics.py │ │ ├── calibration_metrics_test.py │ │ ├── coco_evaluation.py │ │ ├── coco_evaluation_test.py │ │ ├── coco_tools.py │ │ ├── coco_tools_test.py │ │ ├── io_utils.py │ │ ├── offline_eval_map_corloc.py │ │ ├── offline_eval_map_corloc_test.py │ │ ├── oid_od_challenge_evaluation.py │ │ ├── oid_od_challenge_evaluation_utils.py │ │ ├── oid_od_challenge_evaluation_utils_test.py │ │ ├── oid_vrd_challenge_evaluation.py │ │ ├── oid_vrd_challenge_evaluation_utils.py │ │ ├── oid_vrd_challenge_evaluation_utils_test.py │ │ ├── tf_example_parser.py │ │ └── tf_example_parser_test.py │ ├── model_hparams.py │ ├── model_lib.py │ ├── model_lib_test.py │ ├── model_main.py │ ├── model_tpu_main.py │ ├── models │ │ ├── __init__.py │ │ ├── embedded_ssd_mobilenet_v1_feature_extractor.py │ │ ├── embedded_ssd_mobilenet_v1_feature_extractor_test.py │ │ ├── faster_rcnn_inception_resnet_v2_feature_extractor.py │ │ ├── faster_rcnn_inception_resnet_v2_feature_extractor_test.py │ │ ├── faster_rcnn_inception_v2_feature_extractor.py │ │ ├── faster_rcnn_inception_v2_feature_extractor_test.py │ │ ├── faster_rcnn_mobilenet_v1_feature_extractor.py │ │ ├── faster_rcnn_mobilenet_v1_feature_extractor_test.py │ │ ├── faster_rcnn_nas_feature_extractor.py │ │ ├── faster_rcnn_nas_feature_extractor_test.py │ │ ├── faster_rcnn_pnas_feature_extractor.py │ │ ├── faster_rcnn_pnas_feature_extractor_test.py │ │ ├── faster_rcnn_resnet_v1_feature_extractor.py │ │ ├── faster_rcnn_resnet_v1_feature_extractor_test.py │ │ ├── feature_map_generators.py │ │ ├── feature_map_generators_test.py │ │ ├── keras_models │ │ │ ├── __init__.py │ │ │ ├── mobilenet_v1.py │ │ │ ├── mobilenet_v1_test.py │ │ │ ├── mobilenet_v2.py │ │ │ ├── mobilenet_v2_test.py │ │ │ ├── original_mobilenet_v2.py │ │ │ └── test_utils.py │ │ ├── ssd_feature_extractor_test.py │ │ ├── ssd_inception_v2_feature_extractor.py │ │ ├── ssd_inception_v2_feature_extractor_test.py │ │ ├── ssd_inception_v3_feature_extractor.py │ │ ├── ssd_inception_v3_feature_extractor_test.py │ │ ├── ssd_mobilenet_v1_feature_extractor.py │ │ ├── ssd_mobilenet_v1_feature_extractor_test.py │ │ ├── ssd_mobilenet_v1_fpn_feature_extractor.py │ │ ├── ssd_mobilenet_v1_fpn_feature_extractor_test.py │ │ ├── ssd_mobilenet_v1_keras_feature_extractor.py │ │ ├── ssd_mobilenet_v1_ppn_feature_extractor.py │ │ ├── ssd_mobilenet_v1_ppn_feature_extractor_test.py │ │ ├── ssd_mobilenet_v2_feature_extractor.py │ │ ├── ssd_mobilenet_v2_feature_extractor_test.py │ │ ├── ssd_mobilenet_v2_fpn_feature_extractor.py │ │ ├── ssd_mobilenet_v2_fpn_feature_extractor_test.py │ │ ├── ssd_mobilenet_v2_keras_feature_extractor.py │ │ ├── ssd_pnasnet_feature_extractor.py │ │ ├── ssd_pnasnet_feature_extractor_test.py │ │ ├── ssd_resnet_v1_fpn_feature_extractor.py │ │ ├── ssd_resnet_v1_fpn_feature_extractor_test.py │ │ ├── ssd_resnet_v1_fpn_feature_extractor_testbase.py │ │ ├── ssd_resnet_v1_ppn_feature_extractor.py │ │ ├── ssd_resnet_v1_ppn_feature_extractor_test.py │ │ └── ssd_resnet_v1_ppn_feature_extractor_testbase.py │ ├── predictors │ │ ├── __init__.py │ │ ├── convolutional_box_predictor.py │ │ ├── convolutional_box_predictor_test.py │ │ ├── convolutional_keras_box_predictor.py │ │ ├── convolutional_keras_box_predictor_test.py │ │ ├── heads │ │ │ ├── __init__.py │ │ │ ├── box_head.py │ │ │ ├── box_head_test.py │ │ │ ├── class_head.py │ │ │ ├── class_head_test.py │ │ │ ├── head.py │ │ │ ├── keras_box_head.py │ │ │ ├── keras_box_head_test.py │ │ │ ├── keras_class_head.py │ │ │ ├── keras_class_head_test.py │ │ │ ├── keras_mask_head.py │ │ │ ├── keras_mask_head_test.py │ │ │ ├── keypoint_head.py │ │ │ ├── keypoint_head_test.py │ │ │ ├── mask_head.py │ │ │ └── mask_head_test.py │ │ ├── mask_rcnn_box_predictor.py │ │ ├── mask_rcnn_box_predictor_test.py │ │ ├── rfcn_box_predictor.py │ │ └── rfcn_box_predictor_test.py │ ├── protos │ │ ├── __init__.py │ │ ├── anchor_generator.proto │ │ ├── anchor_generator_pb2.py │ │ ├── argmax_matcher.proto │ │ ├── argmax_matcher_pb2.py │ │ ├── bipartite_matcher.proto │ │ ├── bipartite_matcher_pb2.py │ │ ├── box_coder.proto │ │ ├── box_coder_pb2.py │ │ ├── box_predictor.proto │ │ ├── box_predictor_pb2.py │ │ ├── calibration.proto │ │ ├── calibration_pb2.py │ │ ├── eval.proto │ │ ├── eval_pb2.py │ │ ├── faster_rcnn.proto │ │ ├── faster_rcnn_box_coder.proto │ │ ├── faster_rcnn_box_coder_pb2.py │ │ ├── faster_rcnn_pb2.py │ │ ├── graph_rewriter.proto │ │ ├── graph_rewriter_pb2.py │ │ ├── grid_anchor_generator.proto │ │ ├── grid_anchor_generator_pb2.py │ │ ├── hyperparams.proto │ │ ├── hyperparams_pb2.py │ │ ├── image_resizer.proto │ │ ├── image_resizer_pb2.py │ │ ├── input_reader.proto │ │ ├── input_reader_pb2.py │ │ ├── keypoint_box_coder.proto │ │ ├── keypoint_box_coder_pb2.py │ │ ├── losses.proto │ │ ├── losses_pb2.py │ │ ├── matcher.proto │ │ ├── matcher_pb2.py │ │ ├── mean_stddev_box_coder.proto │ │ ├── mean_stddev_box_coder_pb2.py │ │ ├── model.proto │ │ ├── model_pb2.py │ │ ├── multiscale_anchor_generator.proto │ │ ├── multiscale_anchor_generator_pb2.py │ │ ├── optimizer.proto │ │ ├── optimizer_pb2.py │ │ ├── pipeline.proto │ │ ├── pipeline_pb2.py │ │ ├── post_processing.proto │ │ ├── post_processing_pb2.py │ │ ├── preprocessor.proto │ │ ├── preprocessor_pb2.py │ │ ├── region_similarity_calculator.proto │ │ ├── region_similarity_calculator_pb2.py │ │ ├── square_box_coder.proto │ │ ├── square_box_coder_pb2.py │ │ ├── ssd.proto │ │ ├── ssd_anchor_generator.proto │ │ ├── ssd_anchor_generator_pb2.py │ │ ├── ssd_pb2.py │ │ ├── string_int_label_map.proto │ │ ├── string_int_label_map_pb2.py │ │ ├── train.proto │ │ └── train_pb2.py │ └── utils │ │ ├── __init__.py │ │ ├── category_util.py │ │ ├── category_util_test.py │ │ ├── config_util.py │ │ ├── config_util_test.py │ │ ├── context_manager.py │ │ ├── context_manager_test.py │ │ ├── dataset_util.py │ │ ├── dataset_util_test.py │ │ ├── json_utils.py │ │ ├── json_utils_test.py │ │ ├── label_map_util.py │ │ ├── label_map_util_test.py │ │ ├── learning_schedules.py │ │ ├── learning_schedules_test.py │ │ ├── metrics.py │ │ ├── metrics_test.py │ │ ├── np_box_list.py │ │ ├── np_box_list_ops.py │ │ ├── np_box_list_ops_test.py │ │ ├── np_box_list_test.py │ │ ├── np_box_mask_list.py │ │ ├── np_box_mask_list_ops.py │ │ ├── np_box_mask_list_ops_test.py │ │ ├── np_box_mask_list_test.py │ │ ├── np_box_ops.py │ │ ├── np_box_ops_test.py │ │ ├── np_mask_ops.py │ │ ├── np_mask_ops_test.py │ │ ├── object_detection_evaluation.py │ │ ├── object_detection_evaluation_test.py │ │ ├── ops.py │ │ ├── ops_test.py │ │ ├── per_image_evaluation.py │ │ ├── per_image_evaluation_test.py │ │ ├── per_image_vrd_evaluation.py │ │ ├── per_image_vrd_evaluation_test.py │ │ ├── shape_utils.py │ │ ├── shape_utils_test.py │ │ ├── static_shape.py │ │ ├── static_shape_test.py │ │ ├── test_case.py │ │ ├── test_utils.py │ │ ├── test_utils_test.py │ │ ├── variables_helper.py │ │ ├── variables_helper_test.py │ │ ├── visualization_utils.py │ │ ├── visualization_utils_test.py │ │ ├── vrd_evaluation.py │ │ └── vrd_evaluation_test.py │ ├── protobuf.zip │ ├── setup.py │ ├── slim │ ├── BUILD │ ├── README.md │ ├── WORKSPACE │ ├── __init__.py │ ├── datasets │ │ ├── __init__.py │ │ ├── build_imagenet_data.py │ │ ├── cifar10.py │ │ ├── dataset_factory.py │ │ ├── dataset_utils.py │ │ ├── download_and_convert_cifar10.py │ │ ├── download_and_convert_flowers.py │ │ ├── download_and_convert_imagenet.sh │ │ ├── download_and_convert_mnist.py │ │ ├── download_imagenet.sh │ │ ├── flowers.py │ │ ├── imagenet.py │ │ ├── imagenet_2012_validation_synset_labels.txt │ │ ├── imagenet_lsvrc_2015_synsets.txt │ │ ├── imagenet_metadata.txt │ │ ├── mnist.py │ │ ├── preprocess_imagenet_validation_data.py │ │ └── process_bounding_boxes.py │ ├── deployment │ │ ├── __init__.py │ │ ├── model_deploy.py │ │ └── model_deploy_test.py │ ├── download_and_convert_data.py │ ├── eval_image_classifier.py │ ├── export_inference_graph.py │ ├── export_inference_graph_test.py │ ├── nets │ │ ├── __init__.py │ │ ├── alexnet.py │ │ ├── alexnet_test.py │ │ ├── cifarnet.py │ │ ├── cyclegan.py │ │ ├── cyclegan_test.py │ │ ├── dcgan.py │ │ ├── dcgan_test.py │ │ ├── i3d.py │ │ ├── i3d_test.py │ │ ├── i3d_utils.py │ │ ├── inception.py │ │ ├── inception_resnet_v2.py │ │ ├── inception_resnet_v2_test.py │ │ ├── inception_utils.py │ │ ├── inception_v1.py │ │ ├── inception_v1_test.py │ │ ├── inception_v2.py │ │ ├── inception_v2_test.py │ │ ├── inception_v3.py │ │ ├── inception_v3_test.py │ │ ├── inception_v4.py │ │ ├── inception_v4_test.py │ │ ├── lenet.py │ │ ├── mobilenet │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ ├── conv_blocks.py │ │ │ ├── madds_top1_accuracy.png │ │ │ ├── mnet_v1_vs_v2_pixel1_latency.png │ │ │ ├── mobilenet.py │ │ │ ├── mobilenet_example.ipynb │ │ │ ├── mobilenet_v2.py │ │ │ └── mobilenet_v2_test.py │ │ ├── mobilenet_v1.md │ │ ├── mobilenet_v1.png │ │ ├── mobilenet_v1.py │ │ ├── mobilenet_v1_eval.py │ │ ├── mobilenet_v1_test.py │ │ ├── mobilenet_v1_train.py │ │ ├── nasnet │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ ├── nasnet.py │ │ │ ├── nasnet_test.py │ │ │ ├── nasnet_utils.py │ │ │ ├── nasnet_utils_test.py │ │ │ ├── pnasnet.py │ │ │ └── pnasnet_test.py │ │ ├── nets_factory.py │ │ ├── nets_factory_test.py │ │ ├── overfeat.py │ │ ├── overfeat_test.py │ │ ├── pix2pix.py │ │ ├── pix2pix_test.py │ │ ├── resnet_utils.py │ │ ├── resnet_v1.py │ │ ├── resnet_v1_test.py │ │ ├── resnet_v2.py │ │ ├── resnet_v2_test.py │ │ ├── s3dg.py │ │ ├── s3dg_test.py │ │ ├── vgg.py │ │ └── vgg_test.py │ ├── preprocessing │ │ ├── __init__.py │ │ ├── cifarnet_preprocessing.py │ │ ├── inception_preprocessing.py │ │ ├── lenet_preprocessing.py │ │ ├── preprocessing_factory.py │ │ └── vgg_preprocessing.py │ ├── scripts │ │ ├── export_mobilenet.sh │ │ ├── finetune_inception_resnet_v2_on_flowers.sh │ │ ├── finetune_inception_v1_on_flowers.sh │ │ ├── finetune_inception_v3_on_flowers.sh │ │ ├── finetune_resnet_v1_50_on_flowers.sh │ │ ├── train_cifarnet_on_cifar10.sh │ │ └── train_lenet_on_mnist.sh │ ├── setup.py │ ├── slim_walkthrough.ipynb │ └── train_image_classifier.py │ └── train_sign_detect_frcnn.sh └── requirements.txt /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/.DS_Store -------------------------------------------------------------------------------- /cocoapi/.gitignore: -------------------------------------------------------------------------------- 1 | images/ 2 | annotations/ 3 | results/ 4 | external/ 5 | .DS_Store 6 | 7 | MatlabAPI/analyze*/ 8 | MatlabAPI/visualize*/ 9 | MatlabAPI/private/maskApiMex.* 10 | 11 | PythonAPI/pycocotools/__init__.pyc 12 | PythonAPI/pycocotools/_mask.c 13 | PythonAPI/pycocotools/_mask.so 14 | PythonAPI/pycocotools/coco.pyc 15 | PythonAPI/pycocotools/cocoeval.pyc 16 | PythonAPI/pycocotools/mask.pyc 17 | -------------------------------------------------------------------------------- /cocoapi/.travis.yml: -------------------------------------------------------------------------------- 1 | group: travis_latest 2 | language: python 3 | cache: pip 4 | python: 5 | - 2.7 6 | - 3.6 7 | install: 8 | - pip install --upgrade pip 9 | - pip install pycocotools 10 | script: 11 | - true 12 | -------------------------------------------------------------------------------- /cocoapi/PythonAPI/Makefile: -------------------------------------------------------------------------------- 1 | all: 2 | # install pycocotools locally 3 | python setup.py build_ext --inplace 4 | rm -rf build 5 | 6 | install: 7 | # install pycocotools to the Python site-packages 8 | python setup.py build_ext install 9 | rm -rf build -------------------------------------------------------------------------------- /cocoapi/PythonAPI/pycocotools/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'tylin' 2 | -------------------------------------------------------------------------------- /cocoapi/PythonAPI/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, Extension 2 | import numpy as np 3 | 4 | # To compile and install locally run "python setup.py build_ext --inplace" 5 | # To install library to Python site-packages run "python setup.py build_ext install" 6 | 7 | ext_modules = [ 8 | Extension( 9 | 'pycocotools._mask', 10 | sources=['../common/maskApi.c', 'pycocotools/_mask.pyx'], 11 | include_dirs = [np.get_include(), '../common'], 12 | extra_compile_args=['-Wno-cpp', '-Wno-unused-function', '-std=c99'], 13 | ) 14 | ] 15 | 16 | setup( 17 | name='pycocotools', 18 | packages=['pycocotools'], 19 | package_dir = {'pycocotools': 'pycocotools'}, 20 | install_requires=[ 21 | 'setuptools>=18.0', 22 | 'cython>=0.27.3', 23 | 'matplotlib>=2.1.0' 24 | ], 25 | version='2.0', 26 | ext_modules= ext_modules 27 | ) 28 | -------------------------------------------------------------------------------- /cocoapi/README.txt: -------------------------------------------------------------------------------- 1 | COCO API - http://cocodataset.org/ 2 | 3 | COCO is a large image dataset designed for object detection, segmentation, person keypoints detection, stuff segmentation, and caption generation. This package provides Matlab, Python, and Lua APIs that assists in loading, parsing, and visualizing the annotations in COCO. Please visit http://cocodataset.org/ for more information on COCO, including for the data, paper, and tutorials. The exact format of the annotations is also described on the COCO website. The Matlab and Python APIs are complete, the Lua API provides only basic functionality. 4 | 5 | In addition to this API, please download both the COCO images and annotations in order to run the demos and use the API. Both are available on the project website. 6 | -Please download, unzip, and place the images in: coco/images/ 7 | -Please download and place the annotations in: coco/annotations/ 8 | For substantially more details on the API please see http://cocodataset.org/#download. 9 | 10 | After downloading the images and annotations, run the Matlab, Python, or Lua demos for example usage. 11 | 12 | To install: 13 | -For Matlab, add coco/MatlabApi to the Matlab path (OSX/Linux binaries provided) 14 | -For Python, run "make" under coco/PythonAPI 15 | -For Lua, run “luarocks make LuaAPI/rocks/coco-scm-1.rockspec” under coco/ 16 | -------------------------------------------------------------------------------- /cocoapi/common/maskApi.h: -------------------------------------------------------------------------------- 1 | /************************************************************************** 2 | * Microsoft COCO Toolbox. version 2.0 3 | * Data, paper, and tutorials available at: http://mscoco.org/ 4 | * Code written by Piotr Dollar and Tsung-Yi Lin, 2015. 5 | * Licensed under the Simplified BSD License [see coco/license.txt] 6 | **************************************************************************/ 7 | #pragma once 8 | 9 | typedef unsigned int uint; 10 | typedef unsigned long siz; 11 | typedef unsigned char byte; 12 | typedef double* BB; 13 | typedef struct { siz h, w, m; uint *cnts; } RLE; 14 | 15 | /* Initialize/destroy RLE. */ 16 | void rleInit( RLE *R, siz h, siz w, siz m, uint *cnts ); 17 | void rleFree( RLE *R ); 18 | 19 | /* Initialize/destroy RLE array. */ 20 | void rlesInit( RLE **R, siz n ); 21 | void rlesFree( RLE **R, siz n ); 22 | 23 | /* Encode binary masks using RLE. */ 24 | void rleEncode( RLE *R, const byte *mask, siz h, siz w, siz n ); 25 | 26 | /* Decode binary masks encoded via RLE. */ 27 | void rleDecode( const RLE *R, byte *mask, siz n ); 28 | 29 | /* Compute union or intersection of encoded masks. */ 30 | void rleMerge( const RLE *R, RLE *M, siz n, int intersect ); 31 | 32 | /* Compute area of encoded masks. */ 33 | void rleArea( const RLE *R, siz n, uint *a ); 34 | 35 | /* Compute intersection over union between masks. */ 36 | void rleIou( RLE *dt, RLE *gt, siz m, siz n, byte *iscrowd, double *o ); 37 | 38 | /* Compute non-maximum suppression between bounding masks */ 39 | void rleNms( RLE *dt, siz n, uint *keep, double thr ); 40 | 41 | /* Compute intersection over union between bounding boxes. */ 42 | void bbIou( BB dt, BB gt, siz m, siz n, byte *iscrowd, double *o ); 43 | 44 | /* Compute non-maximum suppression between bounding boxes */ 45 | void bbNms( BB dt, siz n, uint *keep, double thr ); 46 | 47 | /* Get bounding boxes surrounding encoded masks. */ 48 | void rleToBbox( const RLE *R, BB bb, siz n ); 49 | 50 | /* Convert bounding boxes to encoded masks. */ 51 | void rleFrBbox( RLE *R, const BB bb, siz h, siz w, siz n ); 52 | 53 | /* Convert polygon to encoded mask. */ 54 | void rleFrPoly( RLE *R, const double *xy, siz k, siz h, siz w ); 55 | 56 | /* Get compressed string representation of encoded mask. */ 57 | char* rleToString( const RLE *R ); 58 | 59 | /* Convert from compressed string representation of encoded mask. */ 60 | void rleFrString( RLE *R, char *s, siz h, siz w ); 61 | -------------------------------------------------------------------------------- /cocoapi/license.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014, Piotr Dollar and Tsung-Yi Lin 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 2. Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | 13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 14 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 15 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 16 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 17 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 18 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 19 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 20 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 21 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 22 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 23 | 24 | The views and conclusions contained in the software and documentation are those 25 | of the authors and should not be interpreted as representing official policies, 26 | either expressed or implied, of the FreeBSD Project. 27 | -------------------------------------------------------------------------------- /config/label_map.pbtxt: -------------------------------------------------------------------------------- 1 | item { 2 | id: 1 3 | name: "signature" 4 | } 5 | -------------------------------------------------------------------------------- /data/dataset/image_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_1.jpg -------------------------------------------------------------------------------- /data/dataset/image_10.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_10.jpg -------------------------------------------------------------------------------- /data/dataset/image_11.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_11.jpg -------------------------------------------------------------------------------- /data/dataset/image_12.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_12.jpg -------------------------------------------------------------------------------- /data/dataset/image_13.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_13.jpg -------------------------------------------------------------------------------- /data/dataset/image_14.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_14.jpg -------------------------------------------------------------------------------- /data/dataset/image_15.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_15.jpg -------------------------------------------------------------------------------- /data/dataset/image_16.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_16.jpg -------------------------------------------------------------------------------- /data/dataset/image_17.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_17.jpg -------------------------------------------------------------------------------- /data/dataset/image_18.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_18.jpg -------------------------------------------------------------------------------- /data/dataset/image_19.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_19.jpg -------------------------------------------------------------------------------- /data/dataset/image_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_2.jpg -------------------------------------------------------------------------------- /data/dataset/image_20.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_20.jpg -------------------------------------------------------------------------------- /data/dataset/image_21.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_21.jpg -------------------------------------------------------------------------------- /data/dataset/image_22.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_22.jpg -------------------------------------------------------------------------------- /data/dataset/image_23.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_23.jpg -------------------------------------------------------------------------------- /data/dataset/image_24.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_24.jpg -------------------------------------------------------------------------------- /data/dataset/image_25.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_25.jpg -------------------------------------------------------------------------------- /data/dataset/image_26.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_26.jpg -------------------------------------------------------------------------------- /data/dataset/image_27.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_27.jpg -------------------------------------------------------------------------------- /data/dataset/image_28.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_28.jpg -------------------------------------------------------------------------------- /data/dataset/image_29.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_29.jpg -------------------------------------------------------------------------------- /data/dataset/image_3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_3.jpg -------------------------------------------------------------------------------- /data/dataset/image_31.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_31.jpg -------------------------------------------------------------------------------- /data/dataset/image_32.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_32.jpg -------------------------------------------------------------------------------- /data/dataset/image_33.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_33.jpg -------------------------------------------------------------------------------- /data/dataset/image_34.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_34.jpg -------------------------------------------------------------------------------- /data/dataset/image_35.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_35.jpg -------------------------------------------------------------------------------- /data/dataset/image_37.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_37.jpg -------------------------------------------------------------------------------- /data/dataset/image_38.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_38.jpg -------------------------------------------------------------------------------- /data/dataset/image_39.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_39.jpg -------------------------------------------------------------------------------- /data/dataset/image_4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_4.jpg -------------------------------------------------------------------------------- /data/dataset/image_40.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_40.jpg -------------------------------------------------------------------------------- /data/dataset/image_41.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_41.jpg -------------------------------------------------------------------------------- /data/dataset/image_42.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_42.jpg -------------------------------------------------------------------------------- /data/dataset/image_44.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_44.jpg -------------------------------------------------------------------------------- /data/dataset/image_45.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_45.jpg -------------------------------------------------------------------------------- /data/dataset/image_46.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_46.jpg -------------------------------------------------------------------------------- /data/dataset/image_5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_5.jpg -------------------------------------------------------------------------------- /data/dataset/image_6.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_6.jpg -------------------------------------------------------------------------------- /data/dataset/image_7.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_7.jpg -------------------------------------------------------------------------------- /data/dataset/image_8.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_8.jpg -------------------------------------------------------------------------------- /data/dataset/image_9.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/data/dataset/image_9.jpg -------------------------------------------------------------------------------- /data/dataset/per_img_labels/02e00bc1b9b68ac7198bf5449e28ef26-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "02e00bc1b9b68ac7198bf5449e28ef26", 5 | "name": "image_45.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_45.jpg", 7 | "size": { 8 | "width": 1684, 9 | "height": 2359 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "XHrIT9kDV", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 525.6608122941822, 23 | "width": 764.276923076923, 24 | "left": 838.5591346153846, 25 | "top": 939.9747530186607 26 | }, 27 | "points": [ 28 | { 29 | "x": 838.5591346153846, 30 | "y": 939.9747530186607 31 | }, 32 | { 33 | "x": 1602.8360576923076, 34 | "y": 939.9747530186607 35 | }, 36 | { 37 | "x": 1602.8360576923076, 38 | "y": 1465.635565312843 39 | }, 40 | { 41 | "x": 838.5591346153846, 42 | "y": 1465.635565312843 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "GlAY8vH2_", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 160.54665203073546, 54 | "width": 207.26153846153846, 55 | "left": 315.22375, 56 | "top": 813.0911086717892 57 | }, 58 | "points": [ 59 | { 60 | "x": 315.22375, 61 | "y": 813.0911086717892 62 | }, 63 | { 64 | "x": 522.4852884615384, 65 | "y": 813.0911086717892 66 | }, 67 | { 68 | "x": 522.4852884615384, 69 | "y": 973.6377607025247 70 | }, 71 | { 72 | "x": 315.22375, 73 | "y": 973.6377607025247 74 | } 75 | ] 76 | } 77 | ], 78 | "version": "2.1.0" 79 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/09eda920710aeac7fd806503829aead7-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "09eda920710aeac7fd806503829aead7", 5 | "name": "image_38.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_38.jpg", 7 | "size": { 8 | "width": 1664, 9 | "height": 2346 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "neCwXt-mT", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 90.13172338090011, 23 | "width": 113.3374613003096, 24 | "left": 1513.5541795665636, 25 | "top": 2232.691547749726 26 | }, 27 | "points": [ 28 | { 29 | "x": 1513.5541795665636, 30 | "y": 2232.691547749726 31 | }, 32 | { 33 | "x": 1626.891640866873, 34 | "y": 2232.691547749726 35 | }, 36 | { 37 | "x": 1626.891640866873, 38 | "y": 2322.823271130626 39 | }, 40 | { 41 | "x": 1513.5541795665636, 42 | "y": 2322.823271130626 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/0d3920127c8fa96c428f6a897de97141-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "0d3920127c8fa96c428f6a897de97141", 5 | "name": "image_41.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_41.jpg", 7 | "size": { 8 | "width": 1665, 9 | "height": 2346 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "zVlgk8FZd", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 350.2261251372119, 23 | "width": 555.8578052550232, 24 | "left": 624.7770962132921, 25 | "top": 584.5686059275522 26 | }, 27 | "points": [ 28 | { 29 | "x": 624.7770962132921, 30 | "y": 584.5686059275522 31 | }, 32 | { 33 | "x": 1180.6349014683153, 34 | "y": 584.5686059275522 35 | }, 36 | { 37 | "x": 1180.6349014683153, 38 | "y": 934.794731064764 39 | }, 40 | { 41 | "x": 624.7770962132921, 42 | "y": 934.794731064764 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/12d258e3acea75a420a93a11d1a06eab-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "12d258e3acea75a420a93a11d1a06eab", 5 | "name": "image_16.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_16.jpg", 7 | "size": { 8 | "width": 1661, 9 | "height": 2343 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "DjKAuXaou", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 293.1964873765093, 23 | "width": 560.5232198142414, 24 | "left": 137.35872194272446, 25 | "top": 1797.7574094401757 26 | }, 27 | "points": [ 28 | { 29 | "x": 137.35872194272446, 30 | "y": 1797.7574094401757 31 | }, 32 | { 33 | "x": 697.881941756966, 34 | "y": 1797.7574094401757 35 | }, 36 | { 37 | "x": 697.881941756966, 38 | "y": 2090.953896816685 39 | }, 40 | { 41 | "x": 137.35872194272446, 42 | "y": 2090.953896816685 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "QR_ly2n4L", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 105.44785949506037, 54 | "width": 84.8498452012384, 55 | "left": 240.20701915634675, 56 | "top": 2080.6663007683865 57 | }, 58 | "points": [ 59 | { 60 | "x": 240.20701915634675, 61 | "y": 2080.6663007683865 62 | }, 63 | { 64 | "x": 325.05686435758514, 65 | "y": 2080.6663007683865 66 | }, 67 | { 68 | "x": 325.05686435758514, 69 | "y": 2186.1141602634466 70 | }, 71 | { 72 | "x": 240.20701915634675, 73 | "y": 2186.1141602634466 74 | } 75 | ] 76 | } 77 | ], 78 | "version": "2.1.0" 79 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/1665e44f43ab635ebdfd5cc4d23706d8-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "1665e44f43ab635ebdfd5cc4d23706d8", 5 | "name": "image_20.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_20.jpg", 7 | "size": { 8 | "width": 1662, 9 | "height": 2344 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "ByjO_UGBN", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 283.02963776070254, 23 | "width": 110.62848297213623, 24 | "left": 1261.8963332043343, 25 | "top": 1934.893523600439 26 | }, 27 | "points": [ 28 | { 29 | "x": 1261.8963332043343, 30 | "y": 1934.893523600439 31 | }, 32 | { 33 | "x": 1372.5248161764705, 34 | "y": 1934.893523600439 35 | }, 36 | { 37 | "x": 1372.5248161764705, 38 | "y": 2217.923161361142 39 | }, 40 | { 41 | "x": 1261.8963332043343, 42 | "y": 2217.923161361142 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "spQd5unZA", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 167.24478594950605, 54 | "width": 95.19195046439629, 55 | "left": 1395.6796149380805, 56 | "top": 2050.6783754116354 57 | }, 58 | "points": [ 59 | { 60 | "x": 1395.6796149380805, 61 | "y": 2050.6783754116354 62 | }, 63 | { 64 | "x": 1490.8715654024768, 65 | "y": 2050.6783754116354 66 | }, 67 | { 68 | "x": 1490.8715654024768, 69 | "y": 2217.923161361142 70 | }, 71 | { 72 | "x": 1395.6796149380805, 73 | "y": 2217.923161361142 74 | } 75 | ] 76 | }, 77 | { 78 | "id": "imAqm3_8T", 79 | "type": "RECTANGLE", 80 | "tags": [ 81 | "paraphe" 82 | ], 83 | "boundingBox": { 84 | "height": 92.6278814489572, 85 | "width": 128.6377708978328, 86 | "left": 1493.4443208204334, 87 | "top": 2127.8682766191 88 | }, 89 | "points": [ 90 | { 91 | "x": 1493.4443208204334, 92 | "y": 2127.8682766191 93 | }, 94 | { 95 | "x": 1622.0820917182662, 96 | "y": 2127.8682766191 97 | }, 98 | { 99 | "x": 1622.0820917182662, 100 | "y": 2220.496158068057 101 | }, 102 | { 103 | "x": 1493.4443208204334, 104 | "y": 2220.496158068057 105 | } 106 | ] 107 | } 108 | ], 109 | "version": "2.1.0" 110 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/1d9ced0f78abc8fa69c45e6eff59592f-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "1d9ced0f78abc8fa69c45e6eff59592f", 5 | "name": "image_46.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_46.jpg", 7 | "size": { 8 | "width": 1658, 9 | "height": 2342 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "vxNraCXTv", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 79.69484083424807, 23 | "width": 113.10387596899226, 24 | "left": 1408.5770348837211, 25 | "top": 1935.8133918770582 26 | }, 27 | "points": [ 28 | { 29 | "x": 1408.5770348837211, 30 | "y": 1935.8133918770582 31 | }, 32 | { 33 | "x": 1521.6809108527134, 34 | "y": 1935.8133918770582 35 | }, 36 | { 37 | "x": 1521.6809108527134, 38 | "y": 2015.5082327113062 39 | }, 40 | { 41 | "x": 1408.5770348837211, 42 | "y": 2015.5082327113062 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/1feb953f4987564e020c6aff15fec83a-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "1feb953f4987564e020c6aff15fec83a", 5 | "name": "image_15.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_15.jpg", 7 | "size": { 8 | "width": 1656, 9 | "height": 2341 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "SC10BJ5QL", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 84.80021953896816, 23 | "width": 90, 24 | "left": 1467.5625, 25 | "top": 2233.072447859495 26 | }, 27 | "points": [ 28 | { 29 | "x": 1467.5625, 30 | "y": 2233.072447859495 31 | }, 32 | { 33 | "x": 1557.5625000000002, 34 | "y": 2233.072447859495 35 | }, 36 | { 37 | "x": 1557.5625000000002, 38 | "y": 2317.872667398463 39 | }, 40 | { 41 | "x": 1467.5625, 42 | "y": 2317.872667398463 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/22655bb2c4cba5393f437ca34fb94c77-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "22655bb2c4cba5393f437ca34fb94c77", 5 | "name": "image_28.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_28.jpg", 7 | "size": { 8 | "width": 1665, 9 | "height": 2346 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "nXPGIwVIH", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 301.2974753018661, 23 | "width": 756.5842349304482, 24 | "left": 135.82810085007728, 25 | "top": 450.6586169045006 26 | }, 27 | "points": [ 28 | { 29 | "x": 135.82810085007728, 30 | "y": 450.6586169045006 31 | }, 32 | { 33 | "x": 892.4123357805256, 34 | "y": 450.6586169045006 35 | }, 36 | { 37 | "x": 892.4123357805256, 38 | "y": 751.9560922063667 39 | }, 40 | { 41 | "x": 135.82810085007728, 42 | "y": 751.9560922063667 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "YouUNXiMa", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 128.75960482985732, 54 | "width": 133.8176197836167, 55 | "left": 1365.9208365533232, 56 | "top": 1787.1833150384196 57 | }, 58 | "points": [ 59 | { 60 | "x": 1365.9208365533232, 61 | "y": 1787.1833150384196 62 | }, 63 | { 64 | "x": 1499.7384563369399, 65 | "y": 1787.1833150384196 66 | }, 67 | { 68 | "x": 1499.7384563369399, 69 | "y": 1915.9429198682767 70 | }, 71 | { 72 | "x": 1365.9208365533232, 73 | "y": 1915.9429198682767 74 | } 75 | ] 76 | } 77 | ], 78 | "version": "2.1.0" 79 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/22b7b013a3d853d6ed962e60fde8bc72-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "22b7b013a3d853d6ed962e60fde8bc72", 5 | "name": "image_21.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_21.jpg", 7 | "size": { 8 | "width": 1662, 9 | "height": 2344 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "X1RNHJR2a", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 79.7628979143798, 23 | "width": 115.77399380804954, 24 | "left": 1357.0882836687306, 25 | "top": 2264.2371020856203 26 | }, 27 | "points": [ 28 | { 29 | "x": 1357.0882836687306, 30 | "y": 2264.2371020856203 31 | }, 32 | { 33 | "x": 1472.8622774767803, 34 | "y": 2264.2371020856203 35 | }, 36 | { 37 | "x": 1472.8622774767803, 38 | "y": 2344 39 | }, 40 | { 41 | "x": 1357.0882836687306, 42 | "y": 2344 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "Wt3zoHAcY", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 154.37980241492866, 54 | "width": 84.90092879256966, 55 | "left": 1488.2988099845202, 56 | "top": 2181.901207464325 57 | }, 58 | "points": [ 59 | { 60 | "x": 1488.2988099845202, 61 | "y": 2181.901207464325 62 | }, 63 | { 64 | "x": 1573.1997387770898, 65 | "y": 2181.901207464325 66 | }, 67 | { 68 | "x": 1573.1997387770898, 69 | "y": 2336.281009879254 70 | }, 71 | { 72 | "x": 1488.2988099845202, 73 | "y": 2336.281009879254 74 | } 75 | ] 76 | }, 77 | { 78 | "id": "yHbrkPEwH", 79 | "type": "RECTANGLE", 80 | "tags": [ 81 | "paraphe" 82 | ], 83 | "boundingBox": { 84 | "height": 236.71569703622393, 85 | "width": 102.91021671826626, 86 | "left": 1552.6176954334364, 87 | "top": 2094.419319429199 88 | }, 89 | "points": [ 90 | { 91 | "x": 1552.6176954334364, 92 | "y": 2094.419319429199 93 | }, 94 | { 95 | "x": 1655.5279121517028, 96 | "y": 2094.419319429199 97 | }, 98 | { 99 | "x": 1655.5279121517028, 100 | "y": 2331.1350164654227 101 | }, 102 | { 103 | "x": 1552.6176954334364, 104 | "y": 2331.1350164654227 105 | } 106 | ] 107 | } 108 | ], 109 | "version": "2.1.0" 110 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/2338d4d6665b7c787ab59e657ad5f8a2-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "2338d4d6665b7c787ab59e657ad5f8a2", 5 | "name": "image_9.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_9.jpg", 7 | "size": { 8 | "width": 1669, 9 | "height": 2349 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "s05jQaSs3", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 97.98243688254665, 23 | "width": 110.92272024729522, 24 | "left": 1305.639417503864, 25 | "top": 2232.968166849616 26 | }, 27 | "points": [ 28 | { 29 | "x": 1305.639417503864, 30 | "y": 2232.968166849616 31 | }, 32 | { 33 | "x": 1416.5621377511593, 34 | "y": 2232.968166849616 35 | }, 36 | { 37 | "x": 1416.5621377511593, 38 | "y": 2330.9506037321626 39 | }, 40 | { 41 | "x": 1305.639417503864, 42 | "y": 2330.9506037321626 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "_OF0KQbwT", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 170.1800219538968, 54 | "width": 77.38794435857805, 55 | "left": 1434.6193247681608, 56 | "top": 2165.9275521405048 57 | }, 58 | "points": [ 59 | { 60 | "x": 1434.6193247681608, 61 | "y": 2165.9275521405048 62 | }, 63 | { 64 | "x": 1512.0072691267387, 65 | "y": 2165.9275521405048 66 | }, 67 | { 68 | "x": 1512.0072691267387, 69 | "y": 2336.1075740944016 70 | }, 71 | { 72 | "x": 1434.6193247681608, 73 | "y": 2336.1075740944016 74 | } 75 | ] 76 | }, 77 | { 78 | "id": "EmDqllkHJ", 79 | "type": "RECTANGLE", 80 | "tags": [ 81 | "paraphe" 82 | ], 83 | "boundingBox": { 84 | "height": 273.31942919868277, 85 | "width": 103.1839258114374, 86 | "left": 1486.2112876738795, 87 | "top": 2065.3666300768386 88 | }, 89 | "points": [ 90 | { 91 | "x": 1486.2112876738795, 92 | "y": 2065.3666300768386 93 | }, 94 | { 95 | "x": 1589.3952134853168, 96 | "y": 2065.3666300768386 97 | }, 98 | { 99 | "x": 1589.3952134853168, 100 | "y": 2338.6860592755215 101 | }, 102 | { 103 | "x": 1486.2112876738795, 104 | "y": 2338.6860592755215 105 | } 106 | ] 107 | } 108 | ], 109 | "version": "2.1.0" 110 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/243feb2b55c4363c906e30fc95b7543a-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "243feb2b55c4363c906e30fc95b7543a", 5 | "name": "image_31.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_31.jpg", 7 | "size": { 8 | "width": 1660, 9 | "height": 2342 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "FdiABoQqG", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 59.1284302963776, 23 | "width": 97.6470588235294, 24 | "left": 1493.8956075851393, 25 | "top": 2252.0219538968167 26 | }, 27 | "points": [ 28 | { 29 | "x": 1493.8956075851393, 30 | "y": 2252.0219538968167 31 | }, 32 | { 33 | "x": 1591.5426664086688, 34 | "y": 2252.0219538968167 35 | }, 36 | { 37 | "x": 1591.5426664086688, 38 | "y": 2311.150384193194 39 | }, 40 | { 41 | "x": 1493.8956075851393, 42 | "y": 2311.150384193194 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/2741eff2d159c91fd479197db5bedf5a-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "2741eff2d159c91fd479197db5bedf5a", 5 | "name": "image_35.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_35.jpg", 7 | "size": { 8 | "width": 1662, 9 | "height": 2344 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "iWHXuaAV8", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 77.18990120746433, 23 | "width": 118.3467492260062, 24 | "left": 1336.5062403250774, 25 | "top": 2259.0911086717892 26 | }, 27 | "points": [ 28 | { 29 | "x": 1336.5062403250774, 30 | "y": 2259.0911086717892 31 | }, 32 | { 33 | "x": 1454.8529895510835, 34 | "y": 2259.0911086717892 35 | }, 36 | { 37 | "x": 1454.8529895510835, 38 | "y": 2336.281009879254 39 | }, 40 | { 41 | "x": 1336.5062403250774, 42 | "y": 2336.281009879254 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "XSOBUblRk", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 174.96377607025246, 54 | "width": 82.328173374613, 55 | "left": 1465.1440112229102, 56 | "top": 2151.025246981339 57 | }, 58 | "points": [ 59 | { 60 | "x": 1465.1440112229102, 61 | "y": 2151.025246981339 62 | }, 63 | { 64 | "x": 1547.4721845975232, 65 | "y": 2151.025246981339 66 | }, 67 | { 68 | "x": 1547.4721845975232, 69 | "y": 2325.9890230515916 70 | }, 71 | { 72 | "x": 1465.1440112229102, 73 | "y": 2325.9890230515916 74 | } 75 | ] 76 | }, 77 | { 78 | "id": "qAp1XLHhi", 79 | "type": "RECTANGLE", 80 | "tags": [ 81 | "paraphe" 82 | ], 83 | "boundingBox": { 84 | "height": 228.9967069154775, 85 | "width": 108.05572755417957, 86 | "left": 1516.5991195820434, 87 | "top": 2089.2733260153677 88 | }, 89 | "points": [ 90 | { 91 | "x": 1516.5991195820434, 92 | "y": 2089.2733260153677 93 | }, 94 | { 95 | "x": 1624.6548471362228, 96 | "y": 2089.2733260153677 97 | }, 98 | { 99 | "x": 1624.6548471362228, 100 | "y": 2318.2700329308454 101 | }, 102 | { 103 | "x": 1516.5991195820434, 104 | "y": 2318.2700329308454 105 | } 106 | ] 107 | } 108 | ], 109 | "version": "2.1.0" 110 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/2876353694512d97ce746aed6799cc46-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "2876353694512d97ce746aed6799cc46", 5 | "name": "image_5.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_5.jpg", 7 | "size": { 8 | "width": 1674, 9 | "height": 2353 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "GLQcPtIyO", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 193.71569703622393, 23 | "width": 209.25, 24 | "left": 272.703125, 25 | "top": 981.4928649835346 26 | }, 27 | "points": [ 28 | { 29 | "x": 272.703125, 30 | "y": 981.4928649835346 31 | }, 32 | { 33 | "x": 481.953125, 34 | "y": 981.4928649835346 35 | }, 36 | { 37 | "x": 481.953125, 38 | "y": 1175.2085620197583 39 | }, 40 | { 41 | "x": 272.703125, 42 | "y": 1175.2085620197583 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/289e0c5761f92ca0a407ffed5146f1d3-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "289e0c5761f92ca0a407ffed5146f1d3", 5 | "name": "image_17.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_17.jpg", 7 | "size": { 8 | "width": 1657, 9 | "height": 2340 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "KxJ-_jcoA", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 285.1152579582876, 23 | "width": 783.5426356589147, 24 | "left": 113.19622093023256, 25 | "top": 1073.6772777167948 26 | }, 27 | "points": [ 28 | { 29 | "x": 113.19622093023256, 30 | "y": 1073.6772777167948 31 | }, 32 | { 33 | "x": 896.7388565891473, 34 | "y": 1073.6772777167948 35 | }, 36 | { 37 | "x": 896.7388565891473, 38 | "y": 1358.7925356750825 39 | }, 40 | { 41 | "x": 113.19622093023256, 42 | "y": 1358.7925356750825 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "ezxYdoQWJ", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 120.72447859495061, 54 | "width": 159.27751937984496, 55 | "left": 1451.6411821705426, 56 | "top": 1947.0032930845227 57 | }, 58 | "points": [ 59 | { 60 | "x": 1451.6411821705426, 61 | "y": 1947.0032930845227 62 | }, 63 | { 64 | "x": 1610.9187015503876, 65 | "y": 1947.0032930845227 66 | }, 67 | { 68 | "x": 1610.9187015503876, 69 | "y": 2067.7277716794733 70 | }, 71 | { 72 | "x": 1451.6411821705426, 73 | "y": 2067.7277716794733 74 | } 75 | ] 76 | } 77 | ], 78 | "version": "2.1.0" 79 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/2d30d86f62170e9e489ea540e8b4a39f-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "2d30d86f62170e9e489ea540e8b4a39f", 5 | "name": "image_6.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_6.jpg", 7 | "size": { 8 | "width": 1662, 9 | "height": 2344 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "1cQIBWUOo", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 74.61690450054886, 23 | "width": 126.06501547987615, 24 | "left": 1279.905621130031, 25 | "top": 2225.642151481888 26 | }, 27 | "points": [ 28 | { 29 | "x": 1279.905621130031, 30 | "y": 2225.642151481888 31 | }, 32 | { 33 | "x": 1405.9706366099072, 34 | "y": 2225.642151481888 35 | }, 36 | { 37 | "x": 1405.9706366099072, 38 | "y": 2300.259055982437 39 | }, 40 | { 41 | "x": 1279.905621130031, 42 | "y": 2300.259055982437 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "XEzPo1AUb", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 247.00768386388586, 54 | "width": 100.3374613003096, 55 | "left": 1511.45360874613, 56 | "top": 2073.835345773875 57 | }, 58 | "points": [ 59 | { 60 | "x": 1511.45360874613, 61 | "y": 2073.835345773875 62 | }, 63 | { 64 | "x": 1611.7910700464397, 65 | "y": 2073.835345773875 66 | }, 67 | { 68 | "x": 1611.7910700464397, 69 | "y": 2320.8430296377605 70 | }, 71 | { 72 | "x": 1511.45360874613, 73 | "y": 2320.8430296377605 74 | } 75 | ] 76 | }, 77 | { 78 | "id": "brNIguMIU", 79 | "type": "RECTANGLE", 80 | "tags": [ 81 | "paraphe" 82 | ], 83 | "boundingBox": { 84 | "height": 169.8177826564215, 85 | "width": 113.20123839009288, 86 | "left": 1418.8344136996905, 87 | "top": 2145.8792535675084 88 | }, 89 | "points": [ 90 | { 91 | "x": 1418.8344136996905, 92 | "y": 2145.8792535675084 93 | }, 94 | { 95 | "x": 1532.0356520897833, 96 | "y": 2145.8792535675084 97 | }, 98 | { 99 | "x": 1532.0356520897833, 100 | "y": 2315.69703622393 101 | }, 102 | { 103 | "x": 1418.8344136996905, 104 | "y": 2315.69703622393 105 | } 106 | ] 107 | } 108 | ], 109 | "version": "2.1.0" 110 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/2dbe1038abcbff47e8acfc775ce6db21-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "2dbe1038abcbff47e8acfc775ce6db21", 5 | "name": "image_4.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_4.jpg", 7 | "size": { 8 | "width": 1660, 9 | "height": 2343 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "AQ9swiWvQ", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 105.44785949506037, 23 | "width": 164.71317829457365, 24 | "left": 1313.1211240310079, 25 | "top": 2152.6794731064765 26 | }, 27 | "points": [ 28 | { 29 | "x": 1313.1211240310079, 30 | "y": 2152.6794731064765 31 | }, 32 | { 33 | "x": 1477.8343023255816, 34 | "y": 2152.6794731064765 35 | }, 36 | { 37 | "x": 1477.8343023255816, 38 | "y": 2258.1273326015366 39 | }, 40 | { 41 | "x": 1313.1211240310079, 42 | "y": 2258.1273326015366 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/3222b42a5f6e8a6458326fffba292188-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "3222b42a5f6e8a6458326fffba292188", 5 | "name": "image_40.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_40.jpg", 7 | "size": { 8 | "width": 1663, 9 | "height": 2344 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "NPJsyOFY6", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 334.4895718990121, 23 | "width": 728.5278637770898, 24 | "left": 138.16769059597524, 25 | "top": 1855.1306256860594 26 | }, 27 | "points": [ 28 | { 29 | "x": 138.16769059597524, 30 | "y": 1855.1306256860594 31 | }, 32 | { 33 | "x": 866.695554373065, 34 | "y": 1855.1306256860594 35 | }, 36 | { 37 | "x": 866.695554373065, 38 | "y": 2189.6201975850713 39 | }, 40 | { 41 | "x": 138.16769059597524, 42 | "y": 2189.6201975850713 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/35f22bbd45d5fffdcc0f7cc746493e4e-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "35f22bbd45d5fffdcc0f7cc746493e4e", 5 | "name": "image_3.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_3.jpg", 7 | "size": { 8 | "width": 1689, 9 | "height": 2362 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "dNnbchh9K", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 220.384193194292, 23 | "width": 241.28571428571428, 24 | "left": 228.88090437788017, 25 | "top": 1267.8572996706916 26 | }, 27 | "points": [ 28 | { 29 | "x": 228.88090437788017, 30 | "y": 1267.8572996706916 31 | }, 32 | { 33 | "x": 470.1666186635945, 34 | "y": 1267.8572996706916 35 | }, 36 | { 37 | "x": 470.1666186635945, 38 | "y": 1488.2414928649837 39 | }, 40 | { 41 | "x": 228.88090437788017, 42 | "y": 1488.2414928649837 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/369a007eeff07a61d2b046ff643e2fa6-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "369a007eeff07a61d2b046ff643e2fa6", 5 | "name": "image_13.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_13.jpg", 7 | "size": { 8 | "width": 1659, 9 | "height": 2342 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "ynlpefVxR", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 128.54006586169044, 23 | "width": 185.1906976744186, 24 | "left": 1196.4653343023256, 25 | "top": 2144.048298572997 26 | }, 27 | "points": [ 28 | { 29 | "x": 1196.4653343023256, 30 | "y": 2144.048298572997 31 | }, 32 | { 33 | "x": 1381.656031976744, 34 | "y": 2144.048298572997 35 | }, 36 | { 37 | "x": 1381.656031976744, 38 | "y": 2272.5883644346873 39 | }, 40 | { 41 | "x": 1196.4653343023256, 42 | "y": 2272.5883644346873 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/39469fc1e79e0a3e8235ea772be6dd72-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "39469fc1e79e0a3e8235ea772be6dd72", 5 | "name": "image_12.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_12.jpg", 7 | "size": { 8 | "width": 1655, 9 | "height": 2339 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "7yJzX-KS9", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 249.0482985729967, 23 | "width": 351.52713178294573, 24 | "left": 635.859980620155, 25 | "top": 1214.4313940724478 26 | }, 27 | "points": [ 28 | { 29 | "x": 635.859980620155, 30 | "y": 1214.4313940724478 31 | }, 32 | { 33 | "x": 987.3871124031008, 34 | "y": 1214.4313940724478 35 | }, 36 | { 37 | "x": 987.3871124031008, 38 | "y": 1463.4796926454446 39 | }, 40 | { 41 | "x": 635.859980620155, 42 | "y": 1463.4796926454446 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "3oIlpFURO", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "signature" 51 | ], 52 | "boundingBox": { 53 | "height": 310.6684961580681, 54 | "width": 418.2403100775194, 55 | "left": 987.3871124031008, 56 | "top": 1211.8638858397367 57 | }, 58 | "points": [ 59 | { 60 | "x": 987.3871124031008, 61 | "y": 1211.8638858397367 62 | }, 63 | { 64 | "x": 1405.6274224806202, 65 | "y": 1211.8638858397367 66 | }, 67 | { 68 | "x": 1405.6274224806202, 69 | "y": 1522.5323819978046 70 | }, 71 | { 72 | "x": 987.3871124031008, 73 | "y": 1522.5323819978046 74 | } 75 | ] 76 | } 77 | ], 78 | "version": "2.1.0" 79 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/3aedd678daf035361c65e15d73d08793-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "3aedd678daf035361c65e15d73d08793", 5 | "name": "image_19.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_19.jpg", 7 | "size": { 8 | "width": 1655, 9 | "height": 2339 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "HOjn7bk1v", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 359.45115257958287, 23 | "width": 618.3798449612403, 24 | "left": 692.3095930232558, 25 | "top": 1453.2096597145994 26 | }, 27 | "points": [ 28 | { 29 | "x": 692.3095930232558, 30 | "y": 1453.2096597145994 31 | }, 32 | { 33 | "x": 1310.6894379844962, 34 | "y": 1453.2096597145994 35 | }, 36 | { 37 | "x": 1310.6894379844962, 38 | "y": 1812.6608122941823 39 | }, 40 | { 41 | "x": 692.3095930232558, 42 | "y": 1812.6608122941823 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/3e3cc0461e9c11e75602394fc7fbca95-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "3e3cc0461e9c11e75602394fc7fbca95", 5 | "name": "image_26.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_26.jpg", 7 | "size": { 8 | "width": 1663, 9 | "height": 2344 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "SGW-BgyC1", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 82.33589462129528, 23 | "width": 126.14086687306501, 24 | "left": 1185.9091766640868, 25 | "top": 2228.2151481888036 26 | }, 27 | "points": [ 28 | { 29 | "x": 1185.9091766640868, 30 | "y": 2228.2151481888036 31 | }, 32 | { 33 | "x": 1312.0500435371516, 34 | "y": 2228.2151481888036 35 | }, 36 | { 37 | "x": 1312.0500435371516, 38 | "y": 2310.551042810099 39 | }, 40 | { 41 | "x": 1185.9091766640868, 42 | "y": 2310.551042810099 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "mUx2bgWLx", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 97.77387486278815, 54 | "width": 118.41795665634675, 55 | "left": 1512.8457091718267, 56 | "top": 2212.7771679473108 57 | }, 58 | "points": [ 59 | { 60 | "x": 1512.8457091718267, 61 | "y": 2212.7771679473108 62 | }, 63 | { 64 | "x": 1631.2636658281733, 65 | "y": 2212.7771679473108 66 | }, 67 | { 68 | "x": 1631.2636658281733, 69 | "y": 2310.551042810099 70 | }, 71 | { 72 | "x": 1512.8457091718267, 73 | "y": 2310.551042810099 74 | } 75 | ] 76 | } 77 | ], 78 | "version": "2.1.0" 79 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/4e74618cdcf958b0cf108318c54a405e-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "4e74618cdcf958b0cf108318c54a405e", 5 | "name": "image_42.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_42.jpg", 7 | "size": { 8 | "width": 1674, 9 | "height": 2352 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "g1eHG8Hd6", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 250.43249176728867, 23 | "width": 555.4166666666667, 24 | "left": 1022.2330729166667, 25 | "top": 2034.441273326015 26 | }, 27 | "points": [ 28 | { 29 | "x": 1022.2330729166667, 30 | "y": 2034.441273326015 31 | }, 32 | { 33 | "x": 1577.6497395833335, 34 | "y": 2034.441273326015 35 | }, 36 | { 37 | "x": 1577.6497395833335, 38 | "y": 2284.873765093304 39 | }, 40 | { 41 | "x": 1022.2330729166667, 42 | "y": 2284.873765093304 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/5f842697452d03175d79a4da15680256-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "5f842697452d03175d79a4da15680256", 5 | "name": "image_25.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_25.jpg", 7 | "size": { 8 | "width": 1661, 9 | "height": 2345 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "z1N_DUewj", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 236.8166849615807, 23 | "width": 499.58759689922476, 24 | "left": 986.6613614341085, 25 | "top": 1390.0109769484084 26 | }, 27 | "points": [ 28 | { 29 | "x": 986.6613614341085, 30 | "y": 1390.0109769484084 31 | }, 32 | { 33 | "x": 1486.2489583333333, 34 | "y": 1390.0109769484084 35 | }, 36 | { 37 | "x": 1486.2489583333333, 38 | "y": 1626.827661909989 39 | }, 40 | { 41 | "x": 986.6613614341085, 42 | "y": 1626.827661909989 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/62db0598df57738f9884f2d587ed7631-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "62db0598df57738f9884f2d587ed7631", 5 | "name": "image_39.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_39.jpg", 7 | "size": { 8 | "width": 1681, 9 | "height": 2357 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "LHNC7AFAO", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 77.61800219538969, 23 | "width": 80.17076923076922, 24 | "left": 1340.5974999999999, 25 | "top": 2142.2568605927554 26 | }, 27 | "points": [ 28 | { 29 | "x": 1340.5974999999999, 30 | "y": 2142.2568605927554 31 | }, 32 | { 33 | "x": 1420.7682692307692, 34 | "y": 2142.2568605927554 35 | }, 36 | { 37 | "x": 1420.7682692307692, 38 | "y": 2219.874862788145 39 | }, 40 | { 41 | "x": 1340.5974999999999, 42 | "y": 2219.874862788145 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/74b6542be280e332ef6986e2cfd0c2d1-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "74b6542be280e332ef6986e2cfd0c2d1", 5 | "name": "image_24.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_24.jpg", 7 | "size": { 8 | "width": 1659, 9 | "height": 2344 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "-mSnWjCMS", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 100.34687156970362, 23 | "width": 105.45581395348837, 24 | "left": 1434.9465843023256, 25 | "top": 2202.4851811196486 26 | }, 27 | "points": [ 28 | { 29 | "x": 1434.9465843023256, 30 | "y": 2202.4851811196486 31 | }, 32 | { 33 | "x": 1540.402398255814, 34 | "y": 2202.4851811196486 35 | }, 36 | { 37 | "x": 1540.402398255814, 38 | "y": 2302.8320526893526 39 | }, 40 | { 41 | "x": 1434.9465843023256, 42 | "y": 2302.8320526893526 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/7c3047752836e4fd16c02024d2d9b220-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "7c3047752836e4fd16c02024d2d9b220", 5 | "name": "image_33.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_33.jpg", 7 | "size": { 8 | "width": 1656, 9 | "height": 2341 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "XiGybgVWj", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 249.26125137211852, 23 | "width": 627.4285714285714, 24 | "left": 96.99107142857143, 25 | "top": 197.86717892425904 26 | }, 27 | "points": [ 28 | { 29 | "x": 96.99107142857143, 30 | "y": 197.86717892425904 31 | }, 32 | { 33 | "x": 724.4196428571429, 34 | "y": 197.86717892425904 35 | }, 36 | { 37 | "x": 724.4196428571429, 38 | "y": 447.1284302963776 39 | }, 40 | { 41 | "x": 96.99107142857143, 42 | "y": 447.1284302963776 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "0TCfe1jer", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 100.21844127332601, 54 | "width": 108.00000000000001, 55 | "left": 1457.2767857142858, 56 | "top": 2220.22392974753 57 | }, 58 | "points": [ 59 | { 60 | "x": 1457.2767857142858, 61 | "y": 2220.22392974753 62 | }, 63 | { 64 | "x": 1565.2767857142858, 65 | "y": 2220.22392974753 66 | }, 67 | { 68 | "x": 1565.2767857142858, 69 | "y": 2320.442371020856 70 | }, 71 | { 72 | "x": 1457.2767857142858, 73 | "y": 2320.442371020856 74 | } 75 | ] 76 | } 77 | ], 78 | "version": "2.1.0" 79 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/86af257b88349482e224a29a40c0d4c1-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "86af257b88349482e224a29a40c0d4c1", 5 | "name": "image_18.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_18.jpg", 7 | "size": { 8 | "width": 1671, 9 | "height": 2350 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "NqaVNQy4G", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 87.70581778265642, 23 | "width": 105.72685185185185, 24 | "left": 1499.234157986111, 25 | "top": 2223.6004390779362 26 | }, 27 | "points": [ 28 | { 29 | "x": 1499.234157986111, 30 | "y": 2223.6004390779362 31 | }, 32 | { 33 | "x": 1604.961009837963, 34 | "y": 2223.6004390779362 35 | }, 36 | { 37 | "x": 1604.961009837963, 38 | "y": 2311.306256860593 39 | }, 40 | { 41 | "x": 1499.234157986111, 42 | "y": 2311.306256860593 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/9563f7136cf9e3cfd6b33bf886b1a3be-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "9563f7136cf9e3cfd6b33bf886b1a3be", 5 | "name": "image_10.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_10.jpg", 7 | "size": { 8 | "width": 1657, 9 | "height": 2341 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "8Q1zXOqqN", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 77.09110867178924, 23 | "width": 136.15658914728684, 24 | "left": 1253.4675145348838, 25 | "top": 2114.866081229418 26 | }, 27 | "points": [ 28 | { 29 | "x": 1253.4675145348838, 30 | "y": 2114.866081229418 31 | }, 32 | { 33 | "x": 1389.6241036821705, 34 | "y": 2114.866081229418 35 | }, 36 | { 37 | "x": 1389.6241036821705, 38 | "y": 2191.957189901207 39 | }, 40 | { 41 | "x": 1253.4675145348838, 42 | "y": 2191.957189901207 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/99789719a193ba0ca5965bd3d9376b12-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "99789719a193ba0ca5965bd3d9376b12", 5 | "name": "image_8.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_8.jpg", 7 | "size": { 8 | "width": 1695, 9 | "height": 2368 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "LcgM92Bk2", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 215.74533479692647, 23 | "width": 231.3726993865031, 24 | "left": 261.39103240030676, 25 | "top": 1299.6706915477498 26 | }, 27 | "points": [ 28 | { 29 | "x": 261.39103240030676, 30 | "y": 1299.6706915477498 31 | }, 32 | { 33 | "x": 492.76373178680984, 34 | "y": 1299.6706915477498 35 | }, 36 | { 37 | "x": 492.76373178680984, 38 | "y": 1515.4160263446763 39 | }, 40 | { 41 | "x": 261.39103240030676, 42 | "y": 1515.4160263446763 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/a0956876b476836630a9310ff51e88c4-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "a0956876b476836630a9310ff51e88c4", 5 | "name": "image_7.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_7.jpg", 7 | "size": { 8 | "width": 1692, 9 | "height": 2365 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "HdNTQxcAm", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 197.29967069154776, 23 | "width": 251.7239263803681, 24 | "left": 265.7130943251534, 25 | "top": 1399.2700329308452 26 | }, 27 | "points": [ 28 | { 29 | "x": 265.7130943251534, 30 | "y": 1399.2700329308452 31 | }, 32 | { 33 | "x": 517.4370207055215, 34 | "y": 1399.2700329308452 35 | }, 36 | { 37 | "x": 517.4370207055215, 38 | "y": 1596.569703622393 39 | }, 40 | { 41 | "x": 265.7130943251534, 42 | "y": 1596.569703622393 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/c9428a2d6e8f5305a8558118d2b5f5d4-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "c9428a2d6e8f5305a8558118d2b5f5d4", 5 | "name": "image_27.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_27.jpg", 7 | "size": { 8 | "width": 1665, 9 | "height": 2346 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "yRjt7_zTf", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 126.18441273326016, 23 | "width": 123.5239567233385, 24 | "left": 1175.4880699381763, 25 | "top": 2132.259055982437 26 | }, 27 | "points": [ 28 | { 29 | "x": 1175.4880699381763, 30 | "y": 2132.259055982437 31 | }, 32 | { 33 | "x": 1299.0120266615147, 34 | "y": 2132.259055982437 35 | }, 36 | { 37 | "x": 1299.0120266615147, 38 | "y": 2258.4434687156972 39 | }, 40 | { 41 | "x": 1175.4880699381763, 42 | "y": 2258.4434687156972 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "Izd1ma4HI", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "paraphe" 51 | ], 52 | "boundingBox": { 53 | "height": 159.66190998902306, 54 | "width": 131.24420401854715, 55 | "left": 1332.466431607419, 56 | "top": 2111.6575192096598 57 | }, 58 | "points": [ 59 | { 60 | "x": 1332.466431607419, 61 | "y": 2111.6575192096598 62 | }, 63 | { 64 | "x": 1463.710635625966, 65 | "y": 2111.6575192096598 66 | }, 67 | { 68 | "x": 1463.710635625966, 69 | "y": 2271.319429198683 70 | }, 71 | { 72 | "x": 1332.466431607419, 73 | "y": 2271.319429198683 74 | } 75 | ] 76 | } 77 | ], 78 | "version": "2.1.0" 79 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/e3218a671baf8454c595b7947a3b3dbe-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "e3218a671baf8454c595b7947a3b3dbe", 5 | "name": "image_11.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_11.jpg", 7 | "size": { 8 | "width": 1682, 9 | "height": 2358 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "NGKt5hmcO", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 481.43578485181115, 23 | "width": 753.0184615384616, 24 | "left": 541.9193750000001, 25 | "top": 1105.231613611416 26 | }, 27 | "points": [ 28 | { 29 | "x": 541.9193750000001, 30 | "y": 1105.231613611416 31 | }, 32 | { 33 | "x": 1294.9378365384616, 34 | "y": 1105.231613611416 35 | }, 36 | { 37 | "x": 1294.9378365384616, 38 | "y": 1586.667398463227 39 | }, 40 | { 41 | "x": 541.9193750000001, 42 | "y": 1586.667398463227 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/e5254f780f7afb1b2dbd86b37adcf8da-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "e5254f780f7afb1b2dbd86b37adcf8da", 5 | "name": "image_34.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_34.jpg", 7 | "size": { 8 | "width": 1655, 9 | "height": 2339 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "hZtuvqg77", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 164.3205268935236, 23 | "width": 269.4186046511628, 24 | "left": 1138.7747093023256, 25 | "top": 1635.5027442371022 26 | }, 27 | "points": [ 28 | { 29 | "x": 1138.7747093023256, 30 | "y": 1635.5027442371022 31 | }, 32 | { 33 | "x": 1408.1933139534885, 34 | "y": 1635.5027442371022 35 | }, 36 | { 37 | "x": 1408.1933139534885, 38 | "y": 1799.8232711306257 39 | }, 40 | { 41 | "x": 1138.7747093023256, 42 | "y": 1799.8232711306257 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/f0b47a56307f8dc2ef8d7fd152cad921-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "f0b47a56307f8dc2ef8d7fd152cad921", 5 | "name": "image_22.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_22.jpg", 7 | "size": { 8 | "width": 1656, 9 | "height": 2340 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "_vN2M5grg", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 372.4478594950604, 23 | "width": 543.9366279069767, 24 | "left": 231.06976744186045, 25 | "top": 349.33040614709114 26 | }, 27 | "points": [ 28 | { 29 | "x": 231.06976744186045, 30 | "y": 349.33040614709114 31 | }, 32 | { 33 | "x": 775.0063953488371, 34 | "y": 349.33040614709114 35 | }, 36 | { 37 | "x": 775.0063953488371, 38 | "y": 721.7782656421515 39 | }, 40 | { 41 | "x": 231.06976744186045, 42 | "y": 721.7782656421515 43 | } 44 | ] 45 | }, 46 | { 47 | "id": "pY_UYKWMf", 48 | "type": "RECTANGLE", 49 | "tags": [ 50 | "signature" 51 | ], 52 | "boundingBox": { 53 | "height": 380.15367727771684, 54 | "width": 457.00465116279065, 55 | "left": 934.1877906976744, 56 | "top": 385.2908891328211 57 | }, 58 | "points": [ 59 | { 60 | "x": 934.1877906976744, 61 | "y": 385.2908891328211 62 | }, 63 | { 64 | "x": 1391.1924418604651, 65 | "y": 385.2908891328211 66 | }, 67 | { 68 | "x": 1391.1924418604651, 69 | "y": 765.444566410538 70 | }, 71 | { 72 | "x": 934.1877906976744, 73 | "y": 765.444566410538 74 | } 75 | ] 76 | } 77 | ], 78 | "version": "2.1.0" 79 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/faecf8232185bead71569293253b7fa6-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "faecf8232185bead71569293253b7fa6", 5 | "name": "image_29.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_29.jpg", 7 | "size": { 8 | "width": 1660, 9 | "height": 2342 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "QsEeAf0ic", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "signature" 20 | ], 21 | "boundingBox": { 22 | "height": 228.801317233809, 23 | "width": 488.235294117647, 24 | "left": 165.3816756965944, 25 | "top": 1182.5686059275522 26 | }, 27 | "points": [ 28 | { 29 | "x": 165.3816756965944, 30 | "y": 1182.5686059275522 31 | }, 32 | { 33 | "x": 653.6169698142414, 34 | "y": 1182.5686059275522 35 | }, 36 | { 37 | "x": 653.6169698142414, 38 | "y": 1411.3699231613612 39 | }, 40 | { 41 | "x": 165.3816756965944, 42 | "y": 1411.3699231613612 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /data/dataset/per_img_labels/ff2d61792d2d08620df8593cebfc2212-asset.json: -------------------------------------------------------------------------------- 1 | { 2 | "asset": { 3 | "format": "jpg", 4 | "id": "ff2d61792d2d08620df8593cebfc2212", 5 | "name": "image_37.jpg", 6 | "path": "file:/home/ouardinik/signature_detection_practical_guide/data/dataset/image_37.jpg", 7 | "size": { 8 | "width": 1660, 9 | "height": 2343 10 | }, 11 | "state": 2, 12 | "type": 1 13 | }, 14 | "regions": [ 15 | { 16 | "id": "d-JGHw1LK", 17 | "type": "RECTANGLE", 18 | "tags": [ 19 | "paraphe" 20 | ], 21 | "boundingBox": { 22 | "height": 69.44127332601536, 23 | "width": 123.53488372093024, 24 | "left": 605.3691860465117, 25 | "top": 1975.2184412733259 26 | }, 27 | "points": [ 28 | { 29 | "x": 605.3691860465117, 30 | "y": 1975.2184412733259 31 | }, 32 | { 33 | "x": 728.9040697674419, 34 | "y": 1975.2184412733259 35 | }, 36 | { 37 | "x": 728.9040697674419, 38 | "y": 2044.6597145993414 39 | }, 40 | { 41 | "x": 605.3691860465117, 42 | "y": 2044.6597145993414 43 | } 44 | ] 45 | } 46 | ], 47 | "version": "2.1.0" 48 | } -------------------------------------------------------------------------------- /image_blog.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/image_blog.jpg -------------------------------------------------------------------------------- /inference/instructions.txt: -------------------------------------------------------------------------------- 1 | Handwriting detector: 2 | 3 | for quick inference 4 | 5 | 1. use parser to define the config file path, the model path (frozen graph) and the directory of images 6 | to run the inference on. (see run_detector.sh) 7 | 2. If images are already loaded in memory, you can also pass a list of PIL.Image objects to the detector. 8 | In that case directly change the inference_samples variable to pass the list of images as 9 | PIL.JpegImagePlugin.JpegImageFile objects. 10 | 3. The main function of the script will output a dictionary (output_dict) with the bounding boxes coordinates 11 | and a matplotlib visualization of each image with the plotted bounding boxes. -------------------------------------------------------------------------------- /inference/run_detector_inference.sh: -------------------------------------------------------------------------------- 1 | python3 HandwritingDetector.py \ 2 | --config_path utils/signature_detect_config.json \ 3 | --model_path frozen_inference_graph/frozen_inference_graph.pb \ 4 | --img_dir ../data/dataset 5 | -------------------------------------------------------------------------------- /inference/utils/signature_detect_config.json: -------------------------------------------------------------------------------- 1 | {"module": "HandwritingDetector", 2 | "config": { 3 | "NeuralNetwork": "faster_rcnn_inceptionv2", 4 | "resizing": "No", 5 | "ImageSize": [50, 30], 6 | "MaxIterVisualization" : 10 7 | }, 8 | "LabelMap": { 9 | "1": 10 | { 11 | "id": 1, 12 | "name": "signature" 13 | } 14 | 15 | } 16 | 17 | } -------------------------------------------------------------------------------- /models/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | 91 | # PyCharm 92 | .idea/ 93 | 94 | # For mac 95 | .DS_Store 96 | 97 | # Training data for word embedding tutorial 98 | tutorials/embedding/text8 99 | tutorials/embedding/questions-words.txt 100 | 101 | samples/outreach/blogs/segmentation_blogpost/carvana-image-masking-challenge/ 102 | -------------------------------------------------------------------------------- /models/.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "tensorflow"] 2 | path = research/syntaxnet/tensorflow 3 | url = https://github.com/tensorflow/tensorflow.git 4 | -------------------------------------------------------------------------------- /models/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @tensorflow/tf-garden-team 2 | /official/ @tensorflow/tf-garden-team @karmel 3 | /research/adversarial_crypto/ @dave-andersen 4 | /research/adversarial_logit_pairing/ @AlexeyKurakin 5 | /research/adversarial_text/ @rsepassi @a-dai 6 | /research/adv_imagenet_models/ @AlexeyKurakin 7 | /research/attention_ocr/ @alexgorban 8 | /research/audioset/ @plakal @dpwe 9 | /research/autoaugment/* @barretzoph 10 | /research/autoencoders/ @snurkabill 11 | /research/brain_coder/ @danabo 12 | /research/cognitive_mapping_and_planning/ @s-gupta 13 | /research/compression/ @nmjohn 14 | /research/cvt_text/ @clarkkev @lmthang 15 | /research/deep_contextual_bandits/ @rikel 16 | /research/deeplab/ @aquariusjay @yknzhu @gpapan 17 | /research/delf/ @andrefaraujo 18 | /research/differential_privacy/ @ilyamironov @ananthr 19 | /research/domain_adaptation/ @bousmalis @dmrd 20 | /research/efficient-hrl/ @ofirnachum 21 | /research/gan/ @joel-shor 22 | /research/global_objectives/ @mackeya-google 23 | /research/im2txt/ @cshallue 24 | /research/inception/ @shlens @vincentvanhoucke 25 | /research/keypointnet/ @mnorouzi 26 | /research/learned_optimizer/ @olganw @nirum 27 | /research/learning_to_remember_rare_events/ @lukaszkaiser @ofirnachum 28 | /research/learning_unsupervised_learning/ @lukemetz @nirum 29 | /research/lexnet_nc/ @vered1986 @waterson 30 | /research/lfads/ @jazcollins @susillo 31 | /research/lm_1b/ @oriolvinyals @panyx0718 32 | /research/lm_commonsense/ @thtrieu 33 | /research/lstm_object_detection/ @dreamdragon @masonliuw @yinxiaoli 34 | /research/marco/ @vincentvanhoucke 35 | /research/maskgan/ @a-dai 36 | /research/morph_net/ @gariel-google 37 | /research/namignizer/ @knathanieltucker 38 | /research/neural_gpu/ @lukaszkaiser 39 | /research/neural_programmer/ @arvind2505 40 | /research/next_frame_prediction/ @panyx0718 41 | /research/object_detection/ @jch1 @tombstone @derekjchow @jesu9 @dreamdragon @pkulzc 42 | /research/pcl_rl/ @ofirnachum 43 | /research/ptn/ @xcyan @arkanath @hellojas @honglaklee 44 | /research/real_nvp/ @laurent-dinh 45 | /research/rebar/ @gjtucker 46 | /research/resnet/ @panyx0718 47 | /research/seq2species/ @apbusia @depristo 48 | /research/skip_thoughts/ @cshallue 49 | /research/slim/ @sguada @nathansilberman 50 | /research/steve/ @buckman-google 51 | /research/street/ @theraysmith 52 | /research/struct2depth/ @aneliaangelova 53 | /research/swivel/ @waterson 54 | /research/syntaxnet/ @calberti @andorardo @bogatyy @markomernick 55 | /research/tcn/ @coreylynch @sermanet 56 | /research/tensorrt/ @karmel 57 | /research/textsum/ @panyx0718 @peterjliu 58 | /research/transformer/ @daviddao 59 | /research/vid2depth/ @rezama 60 | /research/video_prediction/ @cbfinn 61 | /research/fivo/ @dieterichlawson 62 | /samples/ @MarkDaoust @lamberta 63 | /samples/languages/java/ @asimshankar 64 | /tutorials/embedding/ @zffchen78 @a-dai 65 | /tutorials/image/ @sherrym @shlens 66 | /tutorials/image/cifar10_estimator/ @tfboyd @protoget 67 | /tutorials/rnn/ @lukaszkaiser @ebrevdo 68 | -------------------------------------------------------------------------------- /models/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing guidelines 2 | 3 | If you have created a model and would like to publish it here, please send us a 4 | pull request. For those just getting started with pull requests, GitHub has a 5 | [howto](https://help.github.com/articles/using-pull-requests/). 6 | 7 | The code for any model in this repository is licensed under the Apache License 8 | 2.0. 9 | 10 | In order to accept our code, we have to make sure that we can publish your code: 11 | You have to sign a Contributor License Agreement (CLA). 12 | 13 | ### Contributor License Agreements 14 | 15 | Please fill out either the individual or corporate Contributor License Agreement (CLA). 16 | 17 | * If you are an individual writing original source code and you're sure you own the intellectual property, then you'll need to sign an [individual CLA](http://code.google.com/legal/individual-cla-v1.0.html). 18 | * If you work for a company that wants to allow you to contribute your work, then you'll need to sign a [corporate CLA](http://code.google.com/legal/corporate-cla-v1.0.html). 19 | 20 | Follow either of the two links above to access the appropriate CLA and instructions for how to sign and return it. Once we receive it, we'll be able to accept your pull requests. 21 | 22 | ***NOTE***: Only original source code from you and other people that have signed the CLA can be accepted into the repository. 23 | 24 | -------------------------------------------------------------------------------- /models/README.md: -------------------------------------------------------------------------------- 1 | # TensorFlow Models 2 | 3 | This repository contains a number of different models implemented in [TensorFlow](https://www.tensorflow.org): 4 | 5 | The [official models](official) are a collection of example models that use TensorFlow's high-level APIs. They are intended to be well-maintained, tested, and kept up to date with the latest stable TensorFlow API. They should also be reasonably optimized for fast performance while still being easy to read. We especially recommend newer TensorFlow users to start here. 6 | 7 | The [research models](https://github.com/tensorflow/models/tree/master/research) are a large collection of models implemented in TensorFlow by researchers. They are not officially supported or available in release branches; it is up to the individual researchers to maintain the models and/or provide support on issues and pull requests. 8 | 9 | The [samples folder](samples) contains code snippets and smaller models that demonstrate features of TensorFlow, including code presented in various blog posts. 10 | 11 | The [tutorials folder](tutorials) is a collection of models described in the [TensorFlow tutorials](https://www.tensorflow.org/tutorials/). 12 | 13 | ## Contribution guidelines 14 | 15 | If you want to contribute to models, be sure to review the [contribution guidelines](CONTRIBUTING.md). 16 | 17 | ## License 18 | 19 | [Apache License 2.0](LICENSE) 20 | -------------------------------------------------------------------------------- /models/research/make_logs.py: -------------------------------------------------------------------------------- 1 | import time 2 | import os 3 | import datetime 4 | import argparse 5 | 6 | def make_logs(root_dir, model): 7 | now = str(datetime.datetime.now()).replace(' ', '_') 8 | model_dir = os.path.join(root_dir, model) 9 | #print(model_dir) 10 | logdir_name = 'logs_'+ now 11 | logdir_path = os.path.join(model_dir, logdir_name) 12 | print(logdir_path) 13 | if not os.path.exists(model_dir): 14 | os.mkdir(model_dir) 15 | #os.chdir(model_dir) 16 | os.mkdir(logdir_path) 17 | print(logdir_path) 18 | #print('save training logs in {} directory'.format(logdir_path)) 19 | return logdir_path 20 | 21 | 22 | 23 | if __name__ == '__main__': 24 | parser = argparse.ArgumentParser() 25 | parser.add_argument('--model', type=str, default='custom_faster_rcnn_inceptionv2', 26 | choices=['custom_faster_rcnn_inceptionv2','custom_ssdlite_mobilenet_v2_coco','lasaygues', 'arkea', 'arkea_checkbox', 'arkea_signature_poc'], 27 | help='model for object detection') 28 | parser.add_argument('--save_logs', type=bool, default=True, help='set to True to save the logs') 29 | 30 | #Parameters 31 | args = parser.parse_args() 32 | model = args.model 33 | save_logs = args.save_logs 34 | 35 | root_dir = '../../logs' 36 | if not os.path.exists(root_dir): 37 | os.mkdir(root_dir) 38 | if save_logs: 39 | logdir = make_logs(root_dir, model) 40 | print('save training logs in {} directory'.format(logdir)) 41 | 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /models/research/object_detection/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to the Tensorflow Object Detection API 2 | 3 | Patches to Tensorflow Object Detection API are welcome! 4 | 5 | We require contributors to fill out either the individual or corporate 6 | Contributor License Agreement (CLA). 7 | 8 | * If you are an individual writing original source code and you're sure you own the intellectual property, then you'll need to sign an [individual CLA](http://code.google.com/legal/individual-cla-v1.0.html). 9 | * If you work for a company that wants to allow you to contribute your work, then you'll need to sign a [corporate CLA](http://code.google.com/legal/corporate-cla-v1.0.html). 10 | 11 | Please follow the 12 | [Tensorflow contributing guidelines](https://github.com/tensorflow/tensorflow/blob/master/CONTRIBUTING.md) 13 | when submitting pull requests. 14 | -------------------------------------------------------------------------------- /models/research/object_detection/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/anchor_generators/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/anchor_generators/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/box_coders/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/box_coders/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/box_coders/mean_stddev_box_coder.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Mean stddev box coder. 17 | 18 | This box coder use the following coding schema to encode boxes: 19 | rel_code = (box_corner - anchor_corner_mean) / anchor_corner_stddev. 20 | """ 21 | from object_detection.core import box_coder 22 | from object_detection.core import box_list 23 | 24 | 25 | class MeanStddevBoxCoder(box_coder.BoxCoder): 26 | """Mean stddev box coder.""" 27 | 28 | def __init__(self, stddev=0.01): 29 | """Constructor for MeanStddevBoxCoder. 30 | 31 | Args: 32 | stddev: The standard deviation used to encode and decode boxes. 33 | """ 34 | self._stddev = stddev 35 | 36 | @property 37 | def code_size(self): 38 | return 4 39 | 40 | def _encode(self, boxes, anchors): 41 | """Encode a box collection with respect to anchor collection. 42 | 43 | Args: 44 | boxes: BoxList holding N boxes to be encoded. 45 | anchors: BoxList of N anchors. 46 | 47 | Returns: 48 | a tensor representing N anchor-encoded boxes 49 | 50 | Raises: 51 | ValueError: if the anchors still have deprecated stddev field. 52 | """ 53 | box_corners = boxes.get() 54 | if anchors.has_field('stddev'): 55 | raise ValueError("'stddev' is a parameter of MeanStddevBoxCoder and " 56 | "should not be specified in the box list.") 57 | means = anchors.get() 58 | return (box_corners - means) / self._stddev 59 | 60 | def _decode(self, rel_codes, anchors): 61 | """Decode. 62 | 63 | Args: 64 | rel_codes: a tensor representing N anchor-encoded boxes. 65 | anchors: BoxList of anchors. 66 | 67 | Returns: 68 | boxes: BoxList holding N bounding boxes 69 | 70 | Raises: 71 | ValueError: if the anchors still have deprecated stddev field and expects 72 | the decode method to use stddev value from that field. 73 | """ 74 | means = anchors.get() 75 | if anchors.has_field('stddev'): 76 | raise ValueError("'stddev' is a parameter of MeanStddevBoxCoder and " 77 | "should not be specified in the box list.") 78 | box_corners = rel_codes * self._stddev + means 79 | return box_list.BoxList(box_corners) 80 | -------------------------------------------------------------------------------- /models/research/object_detection/box_coders/mean_stddev_box_coder_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Tests for object_detection.box_coder.mean_stddev_boxcoder.""" 17 | 18 | import tensorflow as tf 19 | 20 | from object_detection.box_coders import mean_stddev_box_coder 21 | from object_detection.core import box_list 22 | 23 | 24 | class MeanStddevBoxCoderTest(tf.test.TestCase): 25 | 26 | def testGetCorrectRelativeCodesAfterEncoding(self): 27 | box_corners = [[0.0, 0.0, 0.5, 0.5], [0.0, 0.0, 0.5, 0.5]] 28 | boxes = box_list.BoxList(tf.constant(box_corners)) 29 | expected_rel_codes = [[0.0, 0.0, 0.0, 0.0], [-5.0, -5.0, -5.0, -3.0]] 30 | prior_means = tf.constant([[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 1.0, 0.8]]) 31 | priors = box_list.BoxList(prior_means) 32 | 33 | coder = mean_stddev_box_coder.MeanStddevBoxCoder(stddev=0.1) 34 | rel_codes = coder.encode(boxes, priors) 35 | with self.test_session() as sess: 36 | rel_codes_out = sess.run(rel_codes) 37 | self.assertAllClose(rel_codes_out, expected_rel_codes) 38 | 39 | def testGetCorrectBoxesAfterDecoding(self): 40 | rel_codes = tf.constant([[0.0, 0.0, 0.0, 0.0], [-5.0, -5.0, -5.0, -3.0]]) 41 | expected_box_corners = [[0.0, 0.0, 0.5, 0.5], [0.0, 0.0, 0.5, 0.5]] 42 | prior_means = tf.constant([[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 1.0, 0.8]]) 43 | priors = box_list.BoxList(prior_means) 44 | 45 | coder = mean_stddev_box_coder.MeanStddevBoxCoder(stddev=0.1) 46 | decoded_boxes = coder.decode(rel_codes, priors) 47 | decoded_box_corners = decoded_boxes.get() 48 | with self.test_session() as sess: 49 | decoded_out = sess.run(decoded_box_corners) 50 | self.assertAllClose(decoded_out, expected_box_corners) 51 | 52 | 53 | if __name__ == '__main__': 54 | tf.test.main() 55 | -------------------------------------------------------------------------------- /models/research/object_detection/builders/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/builders/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/builders/graph_rewriter_builder.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Functions for quantized training and evaluation.""" 16 | 17 | import tensorflow as tf 18 | 19 | 20 | def build(graph_rewriter_config, is_training): 21 | """Returns a function that modifies default graph based on options. 22 | 23 | Args: 24 | graph_rewriter_config: graph_rewriter_pb2.GraphRewriter proto. 25 | is_training: whether in training of eval mode. 26 | """ 27 | def graph_rewrite_fn(): 28 | """Function to quantize weights and activation of the default graph.""" 29 | if (graph_rewriter_config.quantization.weight_bits != 8 or 30 | graph_rewriter_config.quantization.activation_bits != 8): 31 | raise ValueError('Only 8bit quantization is supported') 32 | 33 | # Quantize the graph by inserting quantize ops for weights and activations 34 | if is_training: 35 | tf.contrib.quantize.create_training_graph( 36 | input_graph=tf.get_default_graph(), 37 | quant_delay=graph_rewriter_config.quantization.delay) 38 | else: 39 | tf.contrib.quantize.create_eval_graph(input_graph=tf.get_default_graph()) 40 | 41 | tf.contrib.layers.summarize_collection('quant_vars') 42 | return graph_rewrite_fn 43 | -------------------------------------------------------------------------------- /models/research/object_detection/builders/graph_rewriter_builder_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Tests for graph_rewriter_builder.""" 16 | import mock 17 | import tensorflow as tf 18 | from object_detection.builders import graph_rewriter_builder 19 | from object_detection.protos import graph_rewriter_pb2 20 | 21 | 22 | class QuantizationBuilderTest(tf.test.TestCase): 23 | 24 | def testQuantizationBuilderSetsUpCorrectTrainArguments(self): 25 | with mock.patch.object( 26 | tf.contrib.quantize, 'create_training_graph') as mock_quant_fn: 27 | with mock.patch.object(tf.contrib.layers, 28 | 'summarize_collection') as mock_summarize_col: 29 | graph_rewriter_proto = graph_rewriter_pb2.GraphRewriter() 30 | graph_rewriter_proto.quantization.delay = 10 31 | graph_rewriter_proto.quantization.weight_bits = 8 32 | graph_rewriter_proto.quantization.activation_bits = 8 33 | graph_rewrite_fn = graph_rewriter_builder.build( 34 | graph_rewriter_proto, is_training=True) 35 | graph_rewrite_fn() 36 | _, kwargs = mock_quant_fn.call_args 37 | self.assertEqual(kwargs['input_graph'], tf.get_default_graph()) 38 | self.assertEqual(kwargs['quant_delay'], 10) 39 | mock_summarize_col.assert_called_with('quant_vars') 40 | 41 | def testQuantizationBuilderSetsUpCorrectEvalArguments(self): 42 | with mock.patch.object(tf.contrib.quantize, 43 | 'create_eval_graph') as mock_quant_fn: 44 | with mock.patch.object(tf.contrib.layers, 45 | 'summarize_collection') as mock_summarize_col: 46 | graph_rewriter_proto = graph_rewriter_pb2.GraphRewriter() 47 | graph_rewriter_proto.quantization.delay = 10 48 | graph_rewrite_fn = graph_rewriter_builder.build( 49 | graph_rewriter_proto, is_training=False) 50 | graph_rewrite_fn() 51 | _, kwargs = mock_quant_fn.call_args 52 | self.assertEqual(kwargs['input_graph'], tf.get_default_graph()) 53 | mock_summarize_col.assert_called_with('quant_vars') 54 | 55 | 56 | if __name__ == '__main__': 57 | tf.test.main() 58 | -------------------------------------------------------------------------------- /models/research/object_detection/builders/matcher_builder.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """A function to build an object detection matcher from configuration.""" 17 | 18 | from object_detection.matchers import argmax_matcher 19 | from object_detection.matchers import bipartite_matcher 20 | from object_detection.protos import matcher_pb2 21 | 22 | 23 | def build(matcher_config): 24 | """Builds a matcher object based on the matcher config. 25 | 26 | Args: 27 | matcher_config: A matcher.proto object containing the config for the desired 28 | Matcher. 29 | 30 | Returns: 31 | Matcher based on the config. 32 | 33 | Raises: 34 | ValueError: On empty matcher proto. 35 | """ 36 | if not isinstance(matcher_config, matcher_pb2.Matcher): 37 | raise ValueError('matcher_config not of type matcher_pb2.Matcher.') 38 | if matcher_config.WhichOneof('matcher_oneof') == 'argmax_matcher': 39 | matcher = matcher_config.argmax_matcher 40 | matched_threshold = unmatched_threshold = None 41 | if not matcher.ignore_thresholds: 42 | matched_threshold = matcher.matched_threshold 43 | unmatched_threshold = matcher.unmatched_threshold 44 | return argmax_matcher.ArgMaxMatcher( 45 | matched_threshold=matched_threshold, 46 | unmatched_threshold=unmatched_threshold, 47 | negatives_lower_than_unmatched=matcher.negatives_lower_than_unmatched, 48 | force_match_for_each_row=matcher.force_match_for_each_row, 49 | use_matmul_gather=matcher.use_matmul_gather) 50 | if matcher_config.WhichOneof('matcher_oneof') == 'bipartite_matcher': 51 | matcher = matcher_config.bipartite_matcher 52 | return bipartite_matcher.GreedyBipartiteMatcher(matcher.use_matmul_gather) 53 | raise ValueError('Empty matcher.') 54 | -------------------------------------------------------------------------------- /models/research/object_detection/builders/region_similarity_calculator_builder.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Builder for region similarity calculators.""" 17 | 18 | from object_detection.core import region_similarity_calculator 19 | from object_detection.protos import region_similarity_calculator_pb2 20 | 21 | 22 | def build(region_similarity_calculator_config): 23 | """Builds region similarity calculator based on the configuration. 24 | 25 | Builds one of [IouSimilarity, IoaSimilarity, NegSqDistSimilarity] objects. See 26 | core/region_similarity_calculator.proto for details. 27 | 28 | Args: 29 | region_similarity_calculator_config: RegionSimilarityCalculator 30 | configuration proto. 31 | 32 | Returns: 33 | region_similarity_calculator: RegionSimilarityCalculator object. 34 | 35 | Raises: 36 | ValueError: On unknown region similarity calculator. 37 | """ 38 | 39 | if not isinstance( 40 | region_similarity_calculator_config, 41 | region_similarity_calculator_pb2.RegionSimilarityCalculator): 42 | raise ValueError( 43 | 'region_similarity_calculator_config not of type ' 44 | 'region_similarity_calculator_pb2.RegionsSimilarityCalculator') 45 | 46 | similarity_calculator = region_similarity_calculator_config.WhichOneof( 47 | 'region_similarity') 48 | if similarity_calculator == 'iou_similarity': 49 | return region_similarity_calculator.IouSimilarity() 50 | if similarity_calculator == 'ioa_similarity': 51 | return region_similarity_calculator.IoaSimilarity() 52 | if similarity_calculator == 'neg_sq_dist_similarity': 53 | return region_similarity_calculator.NegSqDistSimilarity() 54 | if similarity_calculator == 'thresholded_iou_similarity': 55 | return region_similarity_calculator.ThresholdedIouSimilarity( 56 | region_similarity_calculator_config.thresholded_iou_similarity 57 | .iou_threshold) 58 | 59 | raise ValueError('Unknown region similarity calculator.') 60 | -------------------------------------------------------------------------------- /models/research/object_detection/builders/region_similarity_calculator_builder_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Tests for region_similarity_calculator_builder.""" 17 | 18 | import tensorflow as tf 19 | 20 | from google.protobuf import text_format 21 | from object_detection.builders import region_similarity_calculator_builder 22 | from object_detection.core import region_similarity_calculator 23 | from object_detection.protos import region_similarity_calculator_pb2 as sim_calc_pb2 24 | 25 | 26 | class RegionSimilarityCalculatorBuilderTest(tf.test.TestCase): 27 | 28 | def testBuildIoaSimilarityCalculator(self): 29 | similarity_calc_text_proto = """ 30 | ioa_similarity { 31 | } 32 | """ 33 | similarity_calc_proto = sim_calc_pb2.RegionSimilarityCalculator() 34 | text_format.Merge(similarity_calc_text_proto, similarity_calc_proto) 35 | similarity_calc = region_similarity_calculator_builder.build( 36 | similarity_calc_proto) 37 | self.assertTrue(isinstance(similarity_calc, 38 | region_similarity_calculator.IoaSimilarity)) 39 | 40 | def testBuildIouSimilarityCalculator(self): 41 | similarity_calc_text_proto = """ 42 | iou_similarity { 43 | } 44 | """ 45 | similarity_calc_proto = sim_calc_pb2.RegionSimilarityCalculator() 46 | text_format.Merge(similarity_calc_text_proto, similarity_calc_proto) 47 | similarity_calc = region_similarity_calculator_builder.build( 48 | similarity_calc_proto) 49 | self.assertTrue(isinstance(similarity_calc, 50 | region_similarity_calculator.IouSimilarity)) 51 | 52 | def testBuildNegSqDistSimilarityCalculator(self): 53 | similarity_calc_text_proto = """ 54 | neg_sq_dist_similarity { 55 | } 56 | """ 57 | similarity_calc_proto = sim_calc_pb2.RegionSimilarityCalculator() 58 | text_format.Merge(similarity_calc_text_proto, similarity_calc_proto) 59 | similarity_calc = region_similarity_calculator_builder.build( 60 | similarity_calc_proto) 61 | self.assertTrue(isinstance(similarity_calc, 62 | region_similarity_calculator. 63 | NegSqDistSimilarity)) 64 | 65 | 66 | if __name__ == '__main__': 67 | tf.test.main() 68 | -------------------------------------------------------------------------------- /models/research/object_detection/core/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /models/research/object_detection/core/box_coder_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Tests for object_detection.core.box_coder.""" 17 | 18 | import tensorflow as tf 19 | 20 | from object_detection.core import box_coder 21 | from object_detection.core import box_list 22 | 23 | 24 | class MockBoxCoder(box_coder.BoxCoder): 25 | """Test BoxCoder that encodes/decodes using the multiply-by-two function.""" 26 | 27 | def code_size(self): 28 | return 4 29 | 30 | def _encode(self, boxes, anchors): 31 | return 2.0 * boxes.get() 32 | 33 | def _decode(self, rel_codes, anchors): 34 | return box_list.BoxList(rel_codes / 2.0) 35 | 36 | 37 | class BoxCoderTest(tf.test.TestCase): 38 | 39 | def test_batch_decode(self): 40 | mock_anchor_corners = tf.constant( 41 | [[0, 0.1, 0.2, 0.3], [0.2, 0.4, 0.4, 0.6]], tf.float32) 42 | mock_anchors = box_list.BoxList(mock_anchor_corners) 43 | mock_box_coder = MockBoxCoder() 44 | 45 | expected_boxes = [[[0.0, 0.1, 0.5, 0.6], [0.5, 0.6, 0.7, 0.8]], 46 | [[0.1, 0.2, 0.3, 0.4], [0.7, 0.8, 0.9, 1.0]]] 47 | 48 | encoded_boxes_list = [mock_box_coder.encode( 49 | box_list.BoxList(tf.constant(boxes)), mock_anchors) 50 | for boxes in expected_boxes] 51 | encoded_boxes = tf.stack(encoded_boxes_list) 52 | decoded_boxes = box_coder.batch_decode( 53 | encoded_boxes, mock_box_coder, mock_anchors) 54 | 55 | with self.test_session() as sess: 56 | decoded_boxes_result = sess.run(decoded_boxes) 57 | self.assertAllClose(expected_boxes, decoded_boxes_result) 58 | 59 | 60 | if __name__ == '__main__': 61 | tf.test.main() 62 | -------------------------------------------------------------------------------- /models/research/object_detection/core/data_decoder.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Interface for data decoders. 17 | 18 | Data decoders decode the input data and return a dictionary of tensors keyed by 19 | the entries in core.reader.Fields. 20 | """ 21 | from abc import ABCMeta 22 | from abc import abstractmethod 23 | 24 | 25 | class DataDecoder(object): 26 | """Interface for data decoders.""" 27 | __metaclass__ = ABCMeta 28 | 29 | @abstractmethod 30 | def decode(self, data): 31 | """Return a single image and associated labels. 32 | 33 | Args: 34 | data: a string tensor holding a serialized protocol buffer corresponding 35 | to data for a single image. 36 | 37 | Returns: 38 | tensor_dict: a dictionary containing tensors. Possible keys are defined in 39 | reader.Fields. 40 | """ 41 | pass 42 | -------------------------------------------------------------------------------- /models/research/object_detection/core/data_parser.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Interface for data parsers. 16 | 17 | Data parser parses input data and returns a dictionary of numpy arrays 18 | keyed by the entries in standard_fields.py. Since the parser parses records 19 | to numpy arrays (materialized tensors) directly, it is used to read data for 20 | evaluation/visualization; to parse the data during training, DataDecoder should 21 | be used. 22 | """ 23 | from abc import ABCMeta 24 | from abc import abstractmethod 25 | 26 | 27 | class DataToNumpyParser(object): 28 | __metaclass__ = ABCMeta 29 | 30 | @abstractmethod 31 | def parse(self, input_data): 32 | """Parses input and returns a numpy array or a dictionary of numpy arrays. 33 | 34 | Args: 35 | input_data: an input data 36 | 37 | Returns: 38 | A numpy array or a dictionary of numpy arrays or None, if input 39 | cannot be parsed. 40 | """ 41 | pass 42 | -------------------------------------------------------------------------------- /models/research/object_detection/core/prefetcher.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Provides functions to prefetch tensors to feed into models.""" 17 | import tensorflow as tf 18 | 19 | 20 | def prefetch(tensor_dict, capacity): 21 | """Creates a prefetch queue for tensors. 22 | 23 | Creates a FIFO queue to asynchronously enqueue tensor_dicts and returns a 24 | dequeue op that evaluates to a tensor_dict. This function is useful in 25 | prefetching preprocessed tensors so that the data is readily available for 26 | consumers. 27 | 28 | Example input pipeline when you don't need batching: 29 | ---------------------------------------------------- 30 | key, string_tensor = slim.parallel_reader.parallel_read(...) 31 | tensor_dict = decoder.decode(string_tensor) 32 | tensor_dict = preprocessor.preprocess(tensor_dict, ...) 33 | prefetch_queue = prefetcher.prefetch(tensor_dict, capacity=20) 34 | tensor_dict = prefetch_queue.dequeue() 35 | outputs = Model(tensor_dict) 36 | ... 37 | ---------------------------------------------------- 38 | 39 | For input pipelines with batching, refer to core/batcher.py 40 | 41 | Args: 42 | tensor_dict: a dictionary of tensors to prefetch. 43 | capacity: the size of the prefetch queue. 44 | 45 | Returns: 46 | a FIFO prefetcher queue 47 | """ 48 | names = list(tensor_dict.keys()) 49 | dtypes = [t.dtype for t in tensor_dict.values()] 50 | shapes = [t.get_shape() for t in tensor_dict.values()] 51 | prefetch_queue = tf.PaddingFIFOQueue(capacity, dtypes=dtypes, 52 | shapes=shapes, 53 | names=names, 54 | name='prefetch_queue') 55 | enqueue_op = prefetch_queue.enqueue(tensor_dict) 56 | tf.train.queue_runner.add_queue_runner(tf.train.queue_runner.QueueRunner( 57 | prefetch_queue, [enqueue_op])) 58 | tf.summary.scalar('queue/%s/fraction_of_%d_full' % (prefetch_queue.name, 59 | capacity), 60 | tf.to_float(prefetch_queue.size()) * (1. / capacity)) 61 | return prefetch_queue 62 | -------------------------------------------------------------------------------- /models/research/object_detection/data_decoders/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/data_decoders/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/dataset_tools/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/dataset_tools/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/dataset_tools/create_pycocotools_package.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | # Script to download pycocotools and make package for CMLE jobs. 18 | # 19 | # usage: 20 | # bash object_detection/dataset_tools/create_pycocotools_package.sh \ 21 | # /tmp/pycocotools 22 | set -e 23 | 24 | if [ -z "$1" ]; then 25 | echo "usage create_pycocotools_package.sh [output dir]" 26 | exit 27 | fi 28 | 29 | # Create the output directory. 30 | OUTPUT_DIR="${1%/}" 31 | SCRATCH_DIR="${OUTPUT_DIR}/raw" 32 | mkdir -p "${OUTPUT_DIR}" 33 | mkdir -p "${SCRATCH_DIR}" 34 | 35 | cd ${SCRATCH_DIR} 36 | git clone https://github.com/cocodataset/cocoapi.git 37 | cd cocoapi/PythonAPI && mv ../common ./ 38 | 39 | sed "s/\.\.\/common/common/g" setup.py > setup.py.updated 40 | cp -f setup.py.updated setup.py 41 | rm setup.py.updated 42 | 43 | sed "s/\.\.\/common/common/g" pycocotools/_mask.pyx > _mask.pyx.updated 44 | cp -f _mask.pyx.updated pycocotools/_mask.pyx 45 | rm _mask.pyx.updated 46 | 47 | sed "s/import matplotlib\.pyplot as plt/import matplotlib\nmatplotlib\.use\(\'Agg\'\)\nimport matplotlib\.pyplot as plt/g" pycocotools/coco.py > coco.py.updated 48 | cp -f coco.py.updated pycocotools/coco.py 49 | rm coco.py.updated 50 | 51 | cd "${OUTPUT_DIR}" 52 | tar -czf pycocotools-2.0.tar.gz -C "${SCRATCH_DIR}/cocoapi/" PythonAPI/ 53 | rm -rf ${SCRATCH_DIR} 54 | -------------------------------------------------------------------------------- /models/research/object_detection/dataset_tools/tf_record_creation_util.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | r"""Utilities for creating TFRecords of TF examples for the Open Images dataset. 16 | """ 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | import tensorflow as tf 22 | 23 | 24 | def open_sharded_output_tfrecords(exit_stack, base_path, num_shards): 25 | """Opens all TFRecord shards for writing and adds them to an exit stack. 26 | 27 | Args: 28 | exit_stack: A context2.ExitStack used to automatically closed the TFRecords 29 | opened in this function. 30 | base_path: The base path for all shards 31 | num_shards: The number of shards 32 | 33 | Returns: 34 | The list of opened TFRecords. Position k in the list corresponds to shard k. 35 | """ 36 | tf_record_output_filenames = [ 37 | '{}-{:05d}-of-{:05d}'.format(base_path, idx, num_shards) 38 | for idx in range(num_shards) 39 | ] 40 | 41 | tfrecords = [ 42 | exit_stack.enter_context(tf.python_io.TFRecordWriter(file_name)) 43 | for file_name in tf_record_output_filenames 44 | ] 45 | 46 | return tfrecords 47 | -------------------------------------------------------------------------------- /models/research/object_detection/dataset_tools/tf_record_creation_util_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Tests for tf_record_creation_util.py.""" 16 | 17 | import os 18 | import contextlib2 19 | import tensorflow as tf 20 | 21 | from object_detection.dataset_tools import tf_record_creation_util 22 | 23 | 24 | class OpenOutputTfrecordsTests(tf.test.TestCase): 25 | 26 | def test_sharded_tfrecord_writes(self): 27 | with contextlib2.ExitStack() as tf_record_close_stack: 28 | output_tfrecords = tf_record_creation_util.open_sharded_output_tfrecords( 29 | tf_record_close_stack, 30 | os.path.join(tf.test.get_temp_dir(), 'test.tfrec'), 10) 31 | for idx in range(10): 32 | output_tfrecords[idx].write('test_{}'.format(idx)) 33 | 34 | for idx in range(10): 35 | tf_record_path = '{}-{:05d}-of-00010'.format( 36 | os.path.join(tf.test.get_temp_dir(), 'test.tfrec'), idx) 37 | records = list(tf.python_io.tf_record_iterator(tf_record_path)) 38 | self.assertAllEqual(records, ['test_{}'.format(idx)]) 39 | 40 | 41 | if __name__ == '__main__': 42 | tf.test.main() 43 | -------------------------------------------------------------------------------- /models/research/object_detection/export_model.sh: -------------------------------------------------------------------------------- 1 | python3 export_inference_graph.py \ 2 | --input_type image_tensor \ 3 | --pipeline_config_path ../../../config/custom_faster_rcnn_inceptionv2.config \ 4 | --trained_checkpoint_prefix ../../../logs/custom_faster_rcnn_inceptionv2/logs_2019-08-28_16:11:29.305430/model.ckpt-696 \ 5 | --output_directory ../../../inference/frozen_inference_graph -------------------------------------------------------------------------------- /models/research/object_detection/inference/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/inference/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/legacy/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/legacy/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/matchers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/matchers/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/meta_architectures/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/meta_architectures/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/meta_architectures/rfcn_meta_arch_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Tests for object_detection.meta_architectures.rfcn_meta_arch.""" 17 | 18 | import tensorflow as tf 19 | 20 | from object_detection.meta_architectures import faster_rcnn_meta_arch_test_lib 21 | from object_detection.meta_architectures import rfcn_meta_arch 22 | 23 | 24 | class RFCNMetaArchTest( 25 | faster_rcnn_meta_arch_test_lib.FasterRCNNMetaArchTestBase): 26 | 27 | def _get_second_stage_box_predictor_text_proto(self): 28 | box_predictor_text_proto = """ 29 | rfcn_box_predictor { 30 | conv_hyperparams { 31 | op: CONV 32 | activation: NONE 33 | regularizer { 34 | l2_regularizer { 35 | weight: 0.0005 36 | } 37 | } 38 | initializer { 39 | variance_scaling_initializer { 40 | factor: 1.0 41 | uniform: true 42 | mode: FAN_AVG 43 | } 44 | } 45 | } 46 | } 47 | """ 48 | return box_predictor_text_proto 49 | 50 | def _get_model(self, box_predictor, **common_kwargs): 51 | return rfcn_meta_arch.RFCNMetaArch( 52 | second_stage_rfcn_box_predictor=box_predictor, **common_kwargs) 53 | 54 | def _get_box_classifier_features_shape(self, 55 | image_size, 56 | batch_size, 57 | max_num_proposals, 58 | initial_crop_size, 59 | maxpool_stride, 60 | num_features): 61 | return (batch_size, image_size, image_size, num_features) 62 | 63 | 64 | if __name__ == '__main__': 65 | tf.test.main() 66 | -------------------------------------------------------------------------------- /models/research/object_detection/metrics/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/metrics/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/metrics/io_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Common IO utils used in offline metric computation. 16 | """ 17 | 18 | from __future__ import absolute_import 19 | from __future__ import division 20 | from __future__ import print_function 21 | 22 | import csv 23 | 24 | 25 | def write_csv(fid, metrics): 26 | """Writes metrics key-value pairs to CSV file. 27 | 28 | Args: 29 | fid: File identifier of an opened file. 30 | metrics: A dictionary with metrics to be written. 31 | """ 32 | metrics_writer = csv.writer(fid, delimiter=',') 33 | for metric_name, metric_value in metrics.items(): 34 | metrics_writer.writerow([metric_name, str(metric_value)]) 35 | -------------------------------------------------------------------------------- /models/research/object_detection/metrics/offline_eval_map_corloc_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Tests for utilities in offline_eval_map_corloc binary.""" 16 | 17 | import tensorflow as tf 18 | 19 | from object_detection.metrics import offline_eval_map_corloc as offline_eval 20 | 21 | 22 | class OfflineEvalMapCorlocTest(tf.test.TestCase): 23 | 24 | def test_generateShardedFilenames(self): 25 | test_filename = '/path/to/file' 26 | result = offline_eval._generate_sharded_filenames(test_filename) 27 | self.assertEqual(result, [test_filename]) 28 | 29 | test_filename = '/path/to/file-00000-of-00050' 30 | result = offline_eval._generate_sharded_filenames(test_filename) 31 | self.assertEqual(result, [test_filename]) 32 | 33 | result = offline_eval._generate_sharded_filenames('/path/to/@3.record') 34 | self.assertEqual(result, [ 35 | '/path/to/-00000-of-00003.record', '/path/to/-00001-of-00003.record', 36 | '/path/to/-00002-of-00003.record' 37 | ]) 38 | 39 | result = offline_eval._generate_sharded_filenames('/path/to/abc@3') 40 | self.assertEqual(result, [ 41 | '/path/to/abc-00000-of-00003', '/path/to/abc-00001-of-00003', 42 | '/path/to/abc-00002-of-00003' 43 | ]) 44 | 45 | result = offline_eval._generate_sharded_filenames('/path/to/@1') 46 | self.assertEqual(result, ['/path/to/-00000-of-00001']) 47 | 48 | def test_generateFilenames(self): 49 | test_filenames = ['/path/to/file', '/path/to/@3.record'] 50 | result = offline_eval._generate_filenames(test_filenames) 51 | self.assertEqual(result, [ 52 | '/path/to/file', '/path/to/-00000-of-00003.record', 53 | '/path/to/-00001-of-00003.record', '/path/to/-00002-of-00003.record' 54 | ]) 55 | 56 | 57 | if __name__ == '__main__': 58 | tf.test.main() 59 | -------------------------------------------------------------------------------- /models/research/object_detection/model_hparams.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Hyperparameters for the object detection model in TF.learn. 16 | 17 | This file consolidates and documents the hyperparameters used by the model. 18 | """ 19 | 20 | from __future__ import absolute_import 21 | from __future__ import division 22 | from __future__ import print_function 23 | 24 | import tensorflow as tf 25 | 26 | 27 | def create_hparams(hparams_overrides=None): 28 | """Returns hyperparameters, including any flag value overrides. 29 | 30 | Args: 31 | hparams_overrides: Optional hparams overrides, represented as a 32 | string containing comma-separated hparam_name=value pairs. 33 | 34 | Returns: 35 | The hyperparameters as a tf.HParams object. 36 | """ 37 | hparams = tf.contrib.training.HParams( 38 | # Whether a fine tuning checkpoint (provided in the pipeline config) 39 | # should be loaded for training. 40 | load_pretrained=True) 41 | # Override any of the preceding hyperparameter values. 42 | if hparams_overrides: 43 | hparams = hparams.parse(hparams_overrides) 44 | return hparams 45 | -------------------------------------------------------------------------------- /models/research/object_detection/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/models/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/models/keras_models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/models/keras_models/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/predictors/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/predictors/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/predictors/heads/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/predictors/heads/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/predictors/heads/head.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Base head class. 17 | 18 | All the different kinds of prediction heads in different models will inherit 19 | from this class. What is in common between all head classes is that they have a 20 | `predict` function that receives `features` as its first argument. 21 | 22 | How to add a new prediction head to an existing meta architecture? 23 | For example, how can we add a `3d shape` prediction head to Mask RCNN? 24 | 25 | We have to take the following steps to add a new prediction head to an 26 | existing meta arch: 27 | (a) Add a class for predicting the head. This class should inherit from the 28 | `Head` class below and have a `predict` function that receives the features 29 | and predicts the output. The output is always a tf.float32 tensor. 30 | (b) Add the head to the meta architecture. For example in case of Mask RCNN, 31 | go to box_predictor_builder and put in the logic for adding the new head to the 32 | Mask RCNN box predictor. 33 | (c) Add the logic for computing the loss for the new head. 34 | (d) Add the necessary metrics for the new head. 35 | (e) (optional) Add visualization for the new head. 36 | """ 37 | from abc import abstractmethod 38 | 39 | import tensorflow as tf 40 | 41 | 42 | class Head(object): 43 | """Mask RCNN head base class.""" 44 | 45 | def __init__(self): 46 | """Constructor.""" 47 | pass 48 | 49 | @abstractmethod 50 | def predict(self, features, num_predictions_per_location): 51 | """Returns the head's predictions. 52 | 53 | Args: 54 | features: A float tensor of features. 55 | num_predictions_per_location: Int containing number of predictions per 56 | location. 57 | 58 | Returns: 59 | A tf.float32 tensor. 60 | """ 61 | pass 62 | 63 | 64 | class KerasHead(tf.keras.Model): 65 | """Keras head base class.""" 66 | 67 | def call(self, features): 68 | """The Keras model call will delegate to the `_predict` method.""" 69 | return self._predict(features) 70 | 71 | @abstractmethod 72 | def _predict(self, features): 73 | """Returns the head's predictions. 74 | 75 | Args: 76 | features: A float tensor of features. 77 | 78 | Returns: 79 | A tf.float32 tensor. 80 | """ 81 | pass 82 | -------------------------------------------------------------------------------- /models/research/object_detection/predictors/heads/keypoint_head_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Tests for object_detection.predictors.heads.keypoint_head.""" 17 | import tensorflow as tf 18 | 19 | from google.protobuf import text_format 20 | from object_detection.builders import hyperparams_builder 21 | from object_detection.predictors.heads import keypoint_head 22 | from object_detection.protos import hyperparams_pb2 23 | from object_detection.utils import test_case 24 | 25 | 26 | class MaskRCNNKeypointHeadTest(test_case.TestCase): 27 | 28 | def _build_arg_scope_with_hyperparams(self, 29 | op_type=hyperparams_pb2.Hyperparams.FC): 30 | hyperparams = hyperparams_pb2.Hyperparams() 31 | hyperparams_text_proto = """ 32 | activation: NONE 33 | regularizer { 34 | l2_regularizer { 35 | } 36 | } 37 | initializer { 38 | truncated_normal_initializer { 39 | } 40 | } 41 | """ 42 | text_format.Merge(hyperparams_text_proto, hyperparams) 43 | hyperparams.op = op_type 44 | return hyperparams_builder.build(hyperparams, is_training=True) 45 | 46 | def test_prediction_size(self): 47 | keypoint_prediction_head = keypoint_head.MaskRCNNKeypointHead( 48 | conv_hyperparams_fn=self._build_arg_scope_with_hyperparams()) 49 | roi_pooled_features = tf.random_uniform( 50 | [64, 14, 14, 1024], minval=-2.0, maxval=2.0, dtype=tf.float32) 51 | prediction = keypoint_prediction_head.predict( 52 | features=roi_pooled_features, num_predictions_per_location=1) 53 | self.assertAllEqual([64, 1, 17, 56, 56], prediction.get_shape().as_list()) 54 | 55 | 56 | if __name__ == '__main__': 57 | tf.test.main() 58 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/protos/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/protos/anchor_generator.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | import "object_detection/protos/grid_anchor_generator.proto"; 6 | import "object_detection/protos/ssd_anchor_generator.proto"; 7 | import "object_detection/protos/multiscale_anchor_generator.proto"; 8 | 9 | // Configuration proto for the anchor generator to use in the object detection 10 | // pipeline. See core/anchor_generator.py for details. 11 | message AnchorGenerator { 12 | oneof anchor_generator_oneof { 13 | GridAnchorGenerator grid_anchor_generator = 1; 14 | SsdAnchorGenerator ssd_anchor_generator = 2; 15 | MultiscaleAnchorGenerator multiscale_anchor_generator = 3; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/argmax_matcher.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for ArgMaxMatcher. See 6 | // matchers/argmax_matcher.py for details. 7 | message ArgMaxMatcher { 8 | // Threshold for positive matches. 9 | optional float matched_threshold = 1 [default = 0.5]; 10 | 11 | // Threshold for negative matches. 12 | optional float unmatched_threshold = 2 [default = 0.5]; 13 | 14 | // Whether to construct ArgMaxMatcher without thresholds. 15 | optional bool ignore_thresholds = 3 [default = false]; 16 | 17 | // If True then negative matches are the ones below the unmatched_threshold, 18 | // whereas ignored matches are in between the matched and umatched 19 | // threshold. If False, then negative matches are in between the matched 20 | // and unmatched threshold, and everything lower than unmatched is ignored. 21 | optional bool negatives_lower_than_unmatched = 4 [default = true]; 22 | 23 | // Whether to ensure each row is matched to at least one column. 24 | optional bool force_match_for_each_row = 5 [default = false]; 25 | 26 | // Force constructed match objects to use matrix multiplication based gather 27 | // instead of standard tf.gather 28 | optional bool use_matmul_gather = 6 [default = false]; 29 | } 30 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/bipartite_matcher.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for bipartite matcher. See 6 | // matchers/bipartite_matcher.py for details. 7 | message BipartiteMatcher { 8 | // Force constructed match objects to use matrix multiplication based gather 9 | // instead of standard tf.gather 10 | optional bool use_matmul_gather = 6 [default = false]; 11 | } 12 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/bipartite_matcher_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: object_detection/protos/bipartite_matcher.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | 17 | 18 | DESCRIPTOR = _descriptor.FileDescriptor( 19 | name='object_detection/protos/bipartite_matcher.proto', 20 | package='object_detection.protos', 21 | syntax='proto2', 22 | serialized_pb=_b('\n/object_detection/protos/bipartite_matcher.proto\x12\x17object_detection.protos\"4\n\x10\x42ipartiteMatcher\x12 \n\x11use_matmul_gather\x18\x06 \x01(\x08:\x05\x66\x61lse') 23 | ) 24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 25 | 26 | 27 | 28 | 29 | _BIPARTITEMATCHER = _descriptor.Descriptor( 30 | name='BipartiteMatcher', 31 | full_name='object_detection.protos.BipartiteMatcher', 32 | filename=None, 33 | file=DESCRIPTOR, 34 | containing_type=None, 35 | fields=[ 36 | _descriptor.FieldDescriptor( 37 | name='use_matmul_gather', full_name='object_detection.protos.BipartiteMatcher.use_matmul_gather', index=0, 38 | number=6, type=8, cpp_type=7, label=1, 39 | has_default_value=True, default_value=False, 40 | message_type=None, enum_type=None, containing_type=None, 41 | is_extension=False, extension_scope=None, 42 | options=None), 43 | ], 44 | extensions=[ 45 | ], 46 | nested_types=[], 47 | enum_types=[ 48 | ], 49 | options=None, 50 | is_extendable=False, 51 | syntax='proto2', 52 | extension_ranges=[], 53 | oneofs=[ 54 | ], 55 | serialized_start=76, 56 | serialized_end=128, 57 | ) 58 | 59 | DESCRIPTOR.message_types_by_name['BipartiteMatcher'] = _BIPARTITEMATCHER 60 | 61 | BipartiteMatcher = _reflection.GeneratedProtocolMessageType('BipartiteMatcher', (_message.Message,), dict( 62 | DESCRIPTOR = _BIPARTITEMATCHER, 63 | __module__ = 'object_detection.protos.bipartite_matcher_pb2' 64 | # @@protoc_insertion_point(class_scope:object_detection.protos.BipartiteMatcher) 65 | )) 66 | _sym_db.RegisterMessage(BipartiteMatcher) 67 | 68 | 69 | # @@protoc_insertion_point(module_scope) 70 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/box_coder.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | import "object_detection/protos/faster_rcnn_box_coder.proto"; 6 | import "object_detection/protos/keypoint_box_coder.proto"; 7 | import "object_detection/protos/mean_stddev_box_coder.proto"; 8 | import "object_detection/protos/square_box_coder.proto"; 9 | 10 | // Configuration proto for the box coder to be used in the object detection 11 | // pipeline. See core/box_coder.py for details. 12 | message BoxCoder { 13 | oneof box_coder_oneof { 14 | FasterRcnnBoxCoder faster_rcnn_box_coder = 1; 15 | MeanStddevBoxCoder mean_stddev_box_coder = 2; 16 | SquareBoxCoder square_box_coder = 3; 17 | KeypointBoxCoder keypoint_box_coder = 4; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/calibration.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Message wrapper for various calibration configurations 6 | message CalibrationConfig { 7 | oneof calibrator { 8 | // Class-agnostic calibration via linear interpolation (usually output from 9 | // isotonic regression) 10 | FunctionApproximation function_approximation = 1; 11 | 12 | // Per-class calibration via linear interpolation 13 | LabelFunctionApproximations label_function_approximations = 2; 14 | 15 | // Class-agnostic sigmoid calibration 16 | SigmoidCalibration sigmoid_calibration = 3; 17 | 18 | // Per-class sigmoid calibration 19 | LabelSigmoidCalibrations label_sigmoid_calibrations = 4; 20 | } 21 | } 22 | 23 | // Message for class-agnostic domain/range mapping for function 24 | // approximations 25 | message FunctionApproximation { 26 | // Message mapping class labels to indices 27 | optional XYPairs x_y_pairs = 1; 28 | } 29 | 30 | // Message for class-specific domain/range mapping for function 31 | // approximations 32 | message LabelFunctionApproximations { 33 | // Message mapping class labels to indices 34 | map label_xy_pairs_map = 1; 35 | // Label map to map label names from to class ids. 36 | optional string label_map_path = 2; 37 | } 38 | 39 | // Message for class-agnostic Sigmoid Calibration 40 | message SigmoidCalibration { 41 | // Message mapping class index to Sigmoid Parameters 42 | optional SigmoidParameters sigmoid_parameters = 1; 43 | } 44 | 45 | // Message for class-specific Sigmoid Calibration 46 | message LabelSigmoidCalibrations { 47 | // Message mapping class index to Sigmoid Parameters 48 | map label_sigmoid_parameters_map = 1; 49 | // Label map to map label names from to class ids. 50 | optional string label_map_path = 2; 51 | } 52 | 53 | // Message to store a domain/range pair for function to be approximated 54 | message XYPairs { 55 | message XYPair { 56 | optional float x = 1; 57 | optional float y = 2; 58 | } 59 | 60 | // Sequence of x/y pairs for function approximation 61 | repeated XYPair x_y_pair = 1; 62 | } 63 | 64 | // Message defining parameters for sigmoid calibration. 65 | message SigmoidParameters { 66 | optional float a = 1 [default = -1.0]; 67 | optional float b = 2 [default = 0.0]; 68 | } 69 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/faster_rcnn_box_coder.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for FasterRCNNBoxCoder. See 6 | // box_coders/faster_rcnn_box_coder.py for details. 7 | message FasterRcnnBoxCoder { 8 | // Scale factor for anchor encoded box center. 9 | optional float y_scale = 1 [default = 10.0]; 10 | optional float x_scale = 2 [default = 10.0]; 11 | 12 | // Scale factor for anchor encoded box height. 13 | optional float height_scale = 3 [default = 5.0]; 14 | 15 | // Scale factor for anchor encoded box width. 16 | optional float width_scale = 4 [default = 5.0]; 17 | } 18 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/graph_rewriter.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Message to configure graph rewriter for the tf graph. 6 | message GraphRewriter { 7 | optional Quantization quantization = 1; 8 | } 9 | 10 | // Message for quantization options. See 11 | // tensorflow/contrib/quantize/python/quantize.py for details. 12 | message Quantization { 13 | // Number of steps to delay before quantization takes effect during training. 14 | optional int32 delay = 1 [default = 500000]; 15 | 16 | // Number of bits to use for quantizing weights. 17 | // Only 8 bit is supported for now. 18 | optional int32 weight_bits = 2 [default = 8]; 19 | 20 | // Number of bits to use for quantizing activations. 21 | // Only 8 bit is supported for now. 22 | optional int32 activation_bits = 3 [default = 8]; 23 | } 24 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/grid_anchor_generator.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for GridAnchorGenerator. See 6 | // anchor_generators/grid_anchor_generator.py for details. 7 | message GridAnchorGenerator { 8 | // Anchor height in pixels. 9 | optional int32 height = 1 [default = 256]; 10 | 11 | // Anchor width in pixels. 12 | optional int32 width = 2 [default = 256]; 13 | 14 | // Anchor stride in height dimension in pixels. 15 | optional int32 height_stride = 3 [default = 16]; 16 | 17 | // Anchor stride in width dimension in pixels. 18 | optional int32 width_stride = 4 [default = 16]; 19 | 20 | // Anchor height offset in pixels. 21 | optional int32 height_offset = 5 [default = 0]; 22 | 23 | // Anchor width offset in pixels. 24 | optional int32 width_offset = 6 [default = 0]; 25 | 26 | // At any given location, len(scales) * len(aspect_ratios) anchors are 27 | // generated with all possible combinations of scales and aspect ratios. 28 | 29 | // List of scales for the anchors. 30 | repeated float scales = 7; 31 | 32 | // List of aspect ratios for the anchors. 33 | repeated float aspect_ratios = 8; 34 | } 35 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/image_resizer.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for image resizing operations. 6 | // See builders/image_resizer_builder.py for details. 7 | message ImageResizer { 8 | oneof image_resizer_oneof { 9 | KeepAspectRatioResizer keep_aspect_ratio_resizer = 1; 10 | FixedShapeResizer fixed_shape_resizer = 2; 11 | IdentityResizer identity_resizer = 3; 12 | } 13 | } 14 | 15 | // Enumeration type for image resizing methods provided in TensorFlow. 16 | enum ResizeType { 17 | BILINEAR = 0; // Corresponds to tf.image.ResizeMethod.BILINEAR 18 | NEAREST_NEIGHBOR = 1; // Corresponds to tf.image.ResizeMethod.NEAREST_NEIGHBOR 19 | BICUBIC = 2; // Corresponds to tf.image.ResizeMethod.BICUBIC 20 | AREA = 3; // Corresponds to tf.image.ResizeMethod.AREA 21 | } 22 | 23 | message IdentityResizer { 24 | } 25 | 26 | // Configuration proto for image resizer that keeps aspect ratio. 27 | message KeepAspectRatioResizer { 28 | // Desired size of the smaller image dimension in pixels. 29 | optional int32 min_dimension = 1 [default = 600]; 30 | 31 | // Desired size of the larger image dimension in pixels. 32 | optional int32 max_dimension = 2 [default = 1024]; 33 | 34 | // Desired method when resizing image. 35 | optional ResizeType resize_method = 3 [default = BILINEAR]; 36 | 37 | // Whether to pad the image with zeros so the output spatial size is 38 | // [max_dimension, max_dimension]. Note that the zeros are padded to the 39 | // bottom and the right of the resized image. 40 | optional bool pad_to_max_dimension = 4 [default = false]; 41 | 42 | // Whether to also resize the image channels from 3 to 1 (RGB to grayscale). 43 | optional bool convert_to_grayscale = 5 [default = false]; 44 | 45 | // Per-channel pad value. This is only used when pad_to_max_dimension is True. 46 | // If unspecified, a default pad value of 0 is applied to all channels. 47 | repeated float per_channel_pad_value = 6; 48 | } 49 | 50 | // Configuration proto for image resizer that resizes to a fixed shape. 51 | message FixedShapeResizer { 52 | // Desired height of image in pixels. 53 | optional int32 height = 1 [default = 300]; 54 | 55 | // Desired width of image in pixels. 56 | optional int32 width = 2 [default = 300]; 57 | 58 | // Desired method when resizing image. 59 | optional ResizeType resize_method = 3 [default = BILINEAR]; 60 | 61 | // Whether to also resize the image channels from 3 to 1 (RGB to grayscale). 62 | optional bool convert_to_grayscale = 4 [default = false]; 63 | } 64 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/keypoint_box_coder.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for KeypointBoxCoder. See 6 | // box_coders/keypoint_box_coder.py for details. 7 | message KeypointBoxCoder { 8 | optional int32 num_keypoints = 1; 9 | 10 | // Scale factor for anchor encoded box center and keypoints. 11 | optional float y_scale = 2 [default = 10.0]; 12 | optional float x_scale = 3 [default = 10.0]; 13 | 14 | // Scale factor for anchor encoded box height. 15 | optional float height_scale = 4 [default = 5.0]; 16 | 17 | // Scale factor for anchor encoded box width. 18 | optional float width_scale = 5 [default = 5.0]; 19 | } 20 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/matcher.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | import "object_detection/protos/argmax_matcher.proto"; 6 | import "object_detection/protos/bipartite_matcher.proto"; 7 | 8 | // Configuration proto for the matcher to be used in the object detection 9 | // pipeline. See core/matcher.py for details. 10 | message Matcher { 11 | oneof matcher_oneof { 12 | ArgMaxMatcher argmax_matcher = 1; 13 | BipartiteMatcher bipartite_matcher = 2; 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/mean_stddev_box_coder.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for MeanStddevBoxCoder. See 6 | // box_coders/mean_stddev_box_coder.py for details. 7 | message MeanStddevBoxCoder { 8 | // The standard deviation used to encode and decode boxes. 9 | optional float stddev = 1 [default=0.01]; 10 | } 11 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/mean_stddev_box_coder_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | # source: object_detection/protos/mean_stddev_box_coder.proto 3 | 4 | import sys 5 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | from google.protobuf import descriptor_pb2 11 | # @@protoc_insertion_point(imports) 12 | 13 | _sym_db = _symbol_database.Default() 14 | 15 | 16 | 17 | 18 | DESCRIPTOR = _descriptor.FileDescriptor( 19 | name='object_detection/protos/mean_stddev_box_coder.proto', 20 | package='object_detection.protos', 21 | syntax='proto2', 22 | serialized_pb=_b('\n3object_detection/protos/mean_stddev_box_coder.proto\x12\x17object_detection.protos\"*\n\x12MeanStddevBoxCoder\x12\x14\n\x06stddev\x18\x01 \x01(\x02:\x04\x30.01') 23 | ) 24 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 25 | 26 | 27 | 28 | 29 | _MEANSTDDEVBOXCODER = _descriptor.Descriptor( 30 | name='MeanStddevBoxCoder', 31 | full_name='object_detection.protos.MeanStddevBoxCoder', 32 | filename=None, 33 | file=DESCRIPTOR, 34 | containing_type=None, 35 | fields=[ 36 | _descriptor.FieldDescriptor( 37 | name='stddev', full_name='object_detection.protos.MeanStddevBoxCoder.stddev', index=0, 38 | number=1, type=2, cpp_type=6, label=1, 39 | has_default_value=True, default_value=float(0.01), 40 | message_type=None, enum_type=None, containing_type=None, 41 | is_extension=False, extension_scope=None, 42 | options=None), 43 | ], 44 | extensions=[ 45 | ], 46 | nested_types=[], 47 | enum_types=[ 48 | ], 49 | options=None, 50 | is_extendable=False, 51 | syntax='proto2', 52 | extension_ranges=[], 53 | oneofs=[ 54 | ], 55 | serialized_start=80, 56 | serialized_end=122, 57 | ) 58 | 59 | DESCRIPTOR.message_types_by_name['MeanStddevBoxCoder'] = _MEANSTDDEVBOXCODER 60 | 61 | MeanStddevBoxCoder = _reflection.GeneratedProtocolMessageType('MeanStddevBoxCoder', (_message.Message,), dict( 62 | DESCRIPTOR = _MEANSTDDEVBOXCODER, 63 | __module__ = 'object_detection.protos.mean_stddev_box_coder_pb2' 64 | # @@protoc_insertion_point(class_scope:object_detection.protos.MeanStddevBoxCoder) 65 | )) 66 | _sym_db.RegisterMessage(MeanStddevBoxCoder) 67 | 68 | 69 | # @@protoc_insertion_point(module_scope) 70 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/model.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | import "object_detection/protos/faster_rcnn.proto"; 6 | import "object_detection/protos/ssd.proto"; 7 | 8 | // Top level configuration for DetectionModels. 9 | message DetectionModel { 10 | oneof model { 11 | FasterRcnn faster_rcnn = 1; 12 | Ssd ssd = 2; 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/multiscale_anchor_generator.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for RetinaNet anchor generator described in 6 | // https://arxiv.org/abs/1708.02002. See 7 | // anchor_generators/multiscale_grid_anchor_generator.py for details. 8 | message MultiscaleAnchorGenerator { 9 | // minimum level in feature pyramid 10 | optional int32 min_level = 1 [default = 3]; 11 | 12 | // maximum level in feature pyramid 13 | optional int32 max_level = 2 [default = 7]; 14 | 15 | // Scale of anchor to feature stride 16 | optional float anchor_scale = 3 [default = 4.0]; 17 | 18 | // Aspect ratios for anchors at each grid point. 19 | repeated float aspect_ratios = 4; 20 | 21 | // Number of intermediate scale each scale octave 22 | optional int32 scales_per_octave = 5 [default = 2]; 23 | 24 | // Whether to produce anchors in normalized coordinates. 25 | optional bool normalize_coordinates = 6 [default = true]; 26 | } 27 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/pipeline.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | import "object_detection/protos/eval.proto"; 6 | import "object_detection/protos/graph_rewriter.proto"; 7 | import "object_detection/protos/input_reader.proto"; 8 | import "object_detection/protos/model.proto"; 9 | import "object_detection/protos/train.proto"; 10 | 11 | // Convenience message for configuring a training and eval pipeline. Allows all 12 | // of the pipeline parameters to be configured from one file. 13 | // Next id: 7 14 | message TrainEvalPipelineConfig { 15 | optional DetectionModel model = 1; 16 | optional TrainConfig train_config = 2; 17 | optional InputReader train_input_reader = 3; 18 | optional EvalConfig eval_config = 4; 19 | repeated InputReader eval_input_reader = 5; 20 | optional GraphRewriter graph_rewriter = 6; 21 | extensions 1000 to max; 22 | } 23 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/post_processing.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | import "object_detection/protos/calibration.proto"; 6 | 7 | // Configuration proto for non-max-suppression operation on a batch of 8 | // detections. 9 | message BatchNonMaxSuppression { 10 | // Scalar threshold for score (low scoring boxes are removed). 11 | optional float score_threshold = 1 [default = 0.0]; 12 | 13 | // Scalar threshold for IOU (boxes that have high IOU overlap 14 | // with previously selected boxes are removed). 15 | optional float iou_threshold = 2 [default = 0.6]; 16 | 17 | // Maximum number of detections to retain per class. 18 | optional int32 max_detections_per_class = 3 [default = 100]; 19 | 20 | // Maximum number of detections to retain across all classes. 21 | optional int32 max_total_detections = 5 [default = 100]; 22 | 23 | // Whether to use the implementation of NMS that guarantees static shapes. 24 | optional bool use_static_shapes = 6 [default = false]; 25 | } 26 | 27 | // Configuration proto for post-processing predicted boxes and 28 | // scores. 29 | message PostProcessing { 30 | // Non max suppression parameters. 31 | optional BatchNonMaxSuppression batch_non_max_suppression = 1; 32 | 33 | // Enum to specify how to convert the detection scores. 34 | enum ScoreConverter { 35 | // Input scores equals output scores. 36 | IDENTITY = 0; 37 | 38 | // Applies a sigmoid on input scores. 39 | SIGMOID = 1; 40 | 41 | // Applies a softmax on input scores 42 | SOFTMAX = 2; 43 | } 44 | 45 | // Score converter to use. 46 | optional ScoreConverter score_converter = 2 [default = IDENTITY]; 47 | // Scale logit (input) value before conversion in post-processing step. 48 | // Typically used for softmax distillation, though can be used to scale for 49 | // other reasons. 50 | optional float logit_scale = 3 [default = 1.0]; 51 | // Calibrate score outputs. Calibration is applied after score converter 52 | // and before non max suppression. 53 | optional CalibrationConfig calibration_config = 4; 54 | } 55 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/region_similarity_calculator.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for region similarity calculators. See 6 | // core/region_similarity_calculator.py for details. 7 | message RegionSimilarityCalculator { 8 | oneof region_similarity { 9 | NegSqDistSimilarity neg_sq_dist_similarity = 1; 10 | IouSimilarity iou_similarity = 2; 11 | IoaSimilarity ioa_similarity = 3; 12 | ThresholdedIouSimilarity thresholded_iou_similarity = 4; 13 | } 14 | } 15 | 16 | // Configuration for negative squared distance similarity calculator. 17 | message NegSqDistSimilarity { 18 | } 19 | 20 | // Configuration for intersection-over-union (IOU) similarity calculator. 21 | message IouSimilarity { 22 | } 23 | 24 | // Configuration for intersection-over-area (IOA) similarity calculator. 25 | message IoaSimilarity { 26 | } 27 | 28 | // Configuration for thresholded-intersection-over-union similarity calculator. 29 | message ThresholdedIouSimilarity { 30 | 31 | // IOU threshold used for filtering scores. 32 | optional float iou_threshold = 1 [default = 0.5]; 33 | } 34 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/square_box_coder.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for SquareBoxCoder. See 6 | // box_coders/square_box_coder.py for details. 7 | message SquareBoxCoder { 8 | // Scale factor for anchor encoded box center. 9 | optional float y_scale = 1 [default = 10.0]; 10 | optional float x_scale = 2 [default = 10.0]; 11 | 12 | // Scale factor for anchor encoded box length. 13 | optional float length_scale = 3 [default = 5.0]; 14 | } 15 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/ssd_anchor_generator.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package object_detection.protos; 4 | 5 | // Configuration proto for SSD anchor generator described in 6 | // https://arxiv.org/abs/1512.02325. See 7 | // anchor_generators/multiple_grid_anchor_generator.py for details. 8 | message SsdAnchorGenerator { 9 | // Number of grid layers to create anchors for. 10 | optional int32 num_layers = 1 [default = 6]; 11 | 12 | // Scale of anchors corresponding to finest resolution. 13 | optional float min_scale = 2 [default = 0.2]; 14 | 15 | // Scale of anchors corresponding to coarsest resolution 16 | optional float max_scale = 3 [default = 0.95]; 17 | 18 | // Can be used to override min_scale->max_scale, with an explicitly defined 19 | // set of scales. If empty, then min_scale->max_scale is used. 20 | repeated float scales = 12; 21 | 22 | // Aspect ratios for anchors at each grid point. 23 | repeated float aspect_ratios = 4; 24 | 25 | // When this aspect ratio is greater than 0, then an additional 26 | // anchor, with an interpolated scale is added with this aspect ratio. 27 | optional float interpolated_scale_aspect_ratio = 13 [default = 1.0]; 28 | 29 | // Whether to use the following aspect ratio and scale combination for the 30 | // layer with the finest resolution : (scale=0.1, aspect_ratio=1.0), 31 | // (scale=min_scale, aspect_ration=2.0), (scale=min_scale, aspect_ratio=0.5). 32 | optional bool reduce_boxes_in_lowest_layer = 5 [default = true]; 33 | 34 | // The base anchor size in height dimension. 35 | optional float base_anchor_height = 6 [default = 1.0]; 36 | 37 | // The base anchor size in width dimension. 38 | optional float base_anchor_width = 7 [default = 1.0]; 39 | 40 | // Anchor stride in height dimension in pixels for each layer. The length of 41 | // this field is expected to be equal to the value of num_layers. 42 | repeated int32 height_stride = 8; 43 | 44 | // Anchor stride in width dimension in pixels for each layer. The length of 45 | // this field is expected to be equal to the value of num_layers. 46 | repeated int32 width_stride = 9; 47 | 48 | // Anchor height offset in pixels for each layer. The length of this field is 49 | // expected to be equal to the value of num_layers. 50 | repeated int32 height_offset = 10; 51 | 52 | // Anchor width offset in pixels for each layer. The length of this field is 53 | // expected to be equal to the value of num_layers. 54 | repeated int32 width_offset = 11; 55 | } 56 | -------------------------------------------------------------------------------- /models/research/object_detection/protos/string_int_label_map.proto: -------------------------------------------------------------------------------- 1 | // Message to store the mapping from class label strings to class id. Datasets 2 | // use string labels to represent classes while the object detection framework 3 | // works with class ids. This message maps them so they can be converted back 4 | // and forth as needed. 5 | syntax = "proto2"; 6 | 7 | package object_detection.protos; 8 | 9 | message StringIntLabelMapItem { 10 | // String name. The most common practice is to set this to a MID or synsets 11 | // id. 12 | optional string name = 1; 13 | 14 | // Integer id that maps to the string name above. Label ids should start from 15 | // 1. 16 | optional int32 id = 2; 17 | 18 | // Human readable string label. 19 | optional string display_name = 3; 20 | }; 21 | 22 | message StringIntLabelMap { 23 | repeated StringIntLabelMapItem item = 1; 24 | }; 25 | -------------------------------------------------------------------------------- /models/research/object_detection/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/object_detection/utils/__init__.py -------------------------------------------------------------------------------- /models/research/object_detection/utils/category_util.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Functions for importing/exporting Object Detection categories.""" 17 | import csv 18 | 19 | import tensorflow as tf 20 | 21 | 22 | def load_categories_from_csv_file(csv_path): 23 | """Loads categories from a csv file. 24 | 25 | The CSV file should have one comma delimited numeric category id and string 26 | category name pair per line. For example: 27 | 28 | 0,"cat" 29 | 1,"dog" 30 | 2,"bird" 31 | ... 32 | 33 | Args: 34 | csv_path: Path to the csv file to be parsed into categories. 35 | Returns: 36 | categories: A list of dictionaries representing all possible categories. 37 | The categories will contain an integer 'id' field and a string 38 | 'name' field. 39 | Raises: 40 | ValueError: If the csv file is incorrectly formatted. 41 | """ 42 | categories = [] 43 | 44 | with tf.gfile.Open(csv_path, 'r') as csvfile: 45 | reader = csv.reader(csvfile, delimiter=',', quotechar='"') 46 | for row in reader: 47 | if not row: 48 | continue 49 | 50 | if len(row) != 2: 51 | raise ValueError('Expected 2 fields per row in csv: %s' % ','.join(row)) 52 | 53 | category_id = int(row[0]) 54 | category_name = row[1] 55 | categories.append({'id': category_id, 'name': category_name}) 56 | 57 | return categories 58 | 59 | 60 | def save_categories_to_csv_file(categories, csv_path): 61 | """Saves categories to a csv file. 62 | 63 | Args: 64 | categories: A list of dictionaries representing categories to save to file. 65 | Each category must contain an 'id' and 'name' field. 66 | csv_path: Path to the csv file to be parsed into categories. 67 | """ 68 | categories.sort(key=lambda x: x['id']) 69 | with tf.gfile.Open(csv_path, 'w') as csvfile: 70 | writer = csv.writer(csvfile, delimiter=',', quotechar='"') 71 | for category in categories: 72 | writer.writerow([category['id'], category['name']]) 73 | -------------------------------------------------------------------------------- /models/research/object_detection/utils/category_util_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Tests for object_detection.utils.category_util.""" 17 | import os 18 | 19 | import tensorflow as tf 20 | 21 | from object_detection.utils import category_util 22 | 23 | 24 | class EvalUtilTest(tf.test.TestCase): 25 | 26 | def test_load_categories_from_csv_file(self): 27 | csv_data = """ 28 | 0,"cat" 29 | 1,"dog" 30 | 2,"bird" 31 | """.strip(' ') 32 | csv_path = os.path.join(self.get_temp_dir(), 'test.csv') 33 | with tf.gfile.Open(csv_path, 'wb') as f: 34 | f.write(csv_data) 35 | 36 | categories = category_util.load_categories_from_csv_file(csv_path) 37 | self.assertTrue({'id': 0, 'name': 'cat'} in categories) 38 | self.assertTrue({'id': 1, 'name': 'dog'} in categories) 39 | self.assertTrue({'id': 2, 'name': 'bird'} in categories) 40 | 41 | def test_save_categories_to_csv_file(self): 42 | categories = [ 43 | {'id': 0, 'name': 'cat'}, 44 | {'id': 1, 'name': 'dog'}, 45 | {'id': 2, 'name': 'bird'}, 46 | ] 47 | csv_path = os.path.join(self.get_temp_dir(), 'test.csv') 48 | category_util.save_categories_to_csv_file(categories, csv_path) 49 | saved_categories = category_util.load_categories_from_csv_file(csv_path) 50 | self.assertEqual(saved_categories, categories) 51 | 52 | 53 | if __name__ == '__main__': 54 | tf.test.main() 55 | -------------------------------------------------------------------------------- /models/research/object_detection/utils/context_manager.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Python context management helper.""" 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | 22 | class IdentityContextManager(object): 23 | """Returns an identity context manager that does nothing. 24 | 25 | This is helpful in setting up conditional `with` statement as below: 26 | 27 | with slim.arg_scope(x) if use_slim_scope else IdentityContextManager(): 28 | do_stuff() 29 | 30 | """ 31 | 32 | def __enter__(self): 33 | return None 34 | 35 | def __exit__(self, exec_type, exec_value, traceback): 36 | del exec_type 37 | del exec_value 38 | del traceback 39 | return False 40 | 41 | -------------------------------------------------------------------------------- /models/research/object_detection/utils/context_manager_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Tests for tensorflow_models.object_detection.utils.context_manager.""" 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | import tensorflow as tf 22 | from object_detection.utils import context_manager 23 | 24 | 25 | class ContextManagerTest(tf.test.TestCase): 26 | 27 | def test_identity_context_manager(self): 28 | with context_manager.IdentityContextManager() as identity_context: 29 | self.assertIsNone(identity_context) 30 | 31 | 32 | if __name__ == '__main__': 33 | tf.test.main() 34 | -------------------------------------------------------------------------------- /models/research/object_detection/utils/dataset_util_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Tests for object_detection.utils.dataset_util.""" 17 | 18 | import os 19 | import tensorflow as tf 20 | 21 | from object_detection.utils import dataset_util 22 | 23 | 24 | class DatasetUtilTest(tf.test.TestCase): 25 | 26 | def test_read_examples_list(self): 27 | example_list_data = """example1 1\nexample2 2""" 28 | example_list_path = os.path.join(self.get_temp_dir(), 'examples.txt') 29 | with tf.gfile.Open(example_list_path, 'wb') as f: 30 | f.write(example_list_data) 31 | 32 | examples = dataset_util.read_examples_list(example_list_path) 33 | self.assertListEqual(['example1', 'example2'], examples) 34 | 35 | 36 | if __name__ == '__main__': 37 | tf.test.main() 38 | -------------------------------------------------------------------------------- /models/research/object_detection/utils/np_box_mask_list.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Numpy BoxMaskList classes and functions.""" 17 | 18 | import numpy as np 19 | from object_detection.utils import np_box_list 20 | 21 | 22 | class BoxMaskList(np_box_list.BoxList): 23 | """Convenience wrapper for BoxList with masks. 24 | 25 | BoxMaskList extends the np_box_list.BoxList to contain masks as well. 26 | In particular, its constructor receives both boxes and masks. Note that the 27 | masks correspond to the full image. 28 | """ 29 | 30 | def __init__(self, box_data, mask_data): 31 | """Constructs box collection. 32 | 33 | Args: 34 | box_data: a numpy array of shape [N, 4] representing box coordinates 35 | mask_data: a numpy array of shape [N, height, width] representing masks 36 | with values are in {0,1}. The masks correspond to the full 37 | image. The height and the width will be equal to image height and width. 38 | 39 | Raises: 40 | ValueError: if bbox data is not a numpy array 41 | ValueError: if invalid dimensions for bbox data 42 | ValueError: if mask data is not a numpy array 43 | ValueError: if invalid dimension for mask data 44 | """ 45 | super(BoxMaskList, self).__init__(box_data) 46 | if not isinstance(mask_data, np.ndarray): 47 | raise ValueError('Mask data must be a numpy array.') 48 | if len(mask_data.shape) != 3: 49 | raise ValueError('Invalid dimensions for mask data.') 50 | if mask_data.dtype != np.uint8: 51 | raise ValueError('Invalid data type for mask data: uint8 is required.') 52 | if mask_data.shape[0] != box_data.shape[0]: 53 | raise ValueError('There should be the same number of boxes and masks.') 54 | self.data['masks'] = mask_data 55 | 56 | def get_masks(self): 57 | """Convenience function for accessing masks. 58 | 59 | Returns: 60 | a numpy array of shape [N, height, width] representing masks 61 | """ 62 | return self.get_field('masks') 63 | 64 | -------------------------------------------------------------------------------- /models/research/object_detection/utils/np_box_ops_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Tests for object_detection.np_box_ops.""" 17 | 18 | import numpy as np 19 | import tensorflow as tf 20 | 21 | from object_detection.utils import np_box_ops 22 | 23 | 24 | class BoxOpsTests(tf.test.TestCase): 25 | 26 | def setUp(self): 27 | boxes1 = np.array([[4.0, 3.0, 7.0, 5.0], [5.0, 6.0, 10.0, 7.0]], 28 | dtype=float) 29 | boxes2 = np.array([[3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0], 30 | [0.0, 0.0, 20.0, 20.0]], 31 | dtype=float) 32 | self.boxes1 = boxes1 33 | self.boxes2 = boxes2 34 | 35 | def testArea(self): 36 | areas = np_box_ops.area(self.boxes1) 37 | expected_areas = np.array([6.0, 5.0], dtype=float) 38 | self.assertAllClose(expected_areas, areas) 39 | 40 | def testIntersection(self): 41 | intersection = np_box_ops.intersection(self.boxes1, self.boxes2) 42 | expected_intersection = np.array([[2.0, 0.0, 6.0], [1.0, 0.0, 5.0]], 43 | dtype=float) 44 | self.assertAllClose(intersection, expected_intersection) 45 | 46 | def testIOU(self): 47 | iou = np_box_ops.iou(self.boxes1, self.boxes2) 48 | expected_iou = np.array([[2.0 / 16.0, 0.0, 6.0 / 400.0], 49 | [1.0 / 16.0, 0.0, 5.0 / 400.0]], 50 | dtype=float) 51 | self.assertAllClose(iou, expected_iou) 52 | 53 | def testIOA(self): 54 | boxes1 = np.array([[0.25, 0.25, 0.75, 0.75], 55 | [0.0, 0.0, 0.5, 0.75]], 56 | dtype=np.float32) 57 | boxes2 = np.array([[0.5, 0.25, 1.0, 1.0], 58 | [0.0, 0.0, 1.0, 1.0]], 59 | dtype=np.float32) 60 | ioa21 = np_box_ops.ioa(boxes2, boxes1) 61 | expected_ioa21 = np.array([[0.5, 0.0], 62 | [1.0, 1.0]], 63 | dtype=np.float32) 64 | self.assertAllClose(ioa21, expected_ioa21) 65 | 66 | 67 | if __name__ == '__main__': 68 | tf.test.main() 69 | -------------------------------------------------------------------------------- /models/research/object_detection/utils/static_shape.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Helper functions to access TensorShape values. 17 | 18 | The rank 4 tensor_shape must be of the form [batch_size, height, width, depth]. 19 | """ 20 | 21 | 22 | def get_batch_size(tensor_shape): 23 | """Returns batch size from the tensor shape. 24 | 25 | Args: 26 | tensor_shape: A rank 4 TensorShape. 27 | 28 | Returns: 29 | An integer representing the batch size of the tensor. 30 | """ 31 | tensor_shape.assert_has_rank(rank=4) 32 | return tensor_shape[0].value 33 | 34 | 35 | def get_height(tensor_shape): 36 | """Returns height from the tensor shape. 37 | 38 | Args: 39 | tensor_shape: A rank 4 TensorShape. 40 | 41 | Returns: 42 | An integer representing the height of the tensor. 43 | """ 44 | tensor_shape.assert_has_rank(rank=4) 45 | return tensor_shape[1].value 46 | 47 | 48 | def get_width(tensor_shape): 49 | """Returns width from the tensor shape. 50 | 51 | Args: 52 | tensor_shape: A rank 4 TensorShape. 53 | 54 | Returns: 55 | An integer representing the width of the tensor. 56 | """ 57 | tensor_shape.assert_has_rank(rank=4) 58 | return tensor_shape[2].value 59 | 60 | 61 | def get_depth(tensor_shape): 62 | """Returns depth from the tensor shape. 63 | 64 | Args: 65 | tensor_shape: A rank 4 TensorShape. 66 | 67 | Returns: 68 | An integer representing the depth of the tensor. 69 | """ 70 | tensor_shape.assert_has_rank(rank=4) 71 | return tensor_shape[3].value 72 | -------------------------------------------------------------------------------- /models/research/object_detection/utils/static_shape_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Tests for object_detection.utils.static_shape.""" 17 | 18 | import tensorflow as tf 19 | 20 | from object_detection.utils import static_shape 21 | 22 | 23 | class StaticShapeTest(tf.test.TestCase): 24 | 25 | def test_return_correct_batchSize(self): 26 | tensor_shape = tf.TensorShape(dims=[32, 299, 384, 3]) 27 | self.assertEqual(32, static_shape.get_batch_size(tensor_shape)) 28 | 29 | def test_return_correct_height(self): 30 | tensor_shape = tf.TensorShape(dims=[32, 299, 384, 3]) 31 | self.assertEqual(299, static_shape.get_height(tensor_shape)) 32 | 33 | def test_return_correct_width(self): 34 | tensor_shape = tf.TensorShape(dims=[32, 299, 384, 3]) 35 | self.assertEqual(384, static_shape.get_width(tensor_shape)) 36 | 37 | def test_return_correct_depth(self): 38 | tensor_shape = tf.TensorShape(dims=[32, 299, 384, 3]) 39 | self.assertEqual(3, static_shape.get_depth(tensor_shape)) 40 | 41 | def test_die_on_tensor_shape_with_rank_three(self): 42 | tensor_shape = tf.TensorShape(dims=[32, 299, 384]) 43 | with self.assertRaises(ValueError): 44 | static_shape.get_batch_size(tensor_shape) 45 | static_shape.get_height(tensor_shape) 46 | static_shape.get_width(tensor_shape) 47 | static_shape.get_depth(tensor_shape) 48 | 49 | if __name__ == '__main__': 50 | tf.test.main() 51 | -------------------------------------------------------------------------------- /models/research/protobuf.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/protobuf.zip -------------------------------------------------------------------------------- /models/research/setup.py: -------------------------------------------------------------------------------- 1 | """Setup script for object_detection.""" 2 | 3 | from setuptools import find_packages 4 | from setuptools import setup 5 | 6 | 7 | REQUIRED_PACKAGES = ['Pillow>=1.0', 'Matplotlib>=2.1', 'Cython>=0.28.1'] 8 | 9 | setup( 10 | name='object_detection', 11 | version='0.1', 12 | install_requires=REQUIRED_PACKAGES, 13 | include_package_data=True, 14 | packages=[p for p in find_packages() if p.startswith('object_detection')], 15 | description='Tensorflow Object Detection Library', 16 | ) 17 | -------------------------------------------------------------------------------- /models/research/slim/WORKSPACE: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/slim/WORKSPACE -------------------------------------------------------------------------------- /models/research/slim/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/slim/__init__.py -------------------------------------------------------------------------------- /models/research/slim/datasets/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /models/research/slim/datasets/dataset_factory.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """A factory-pattern class which returns classification image/label pairs.""" 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | from datasets import cifar10 22 | from datasets import flowers 23 | from datasets import imagenet 24 | from datasets import mnist 25 | 26 | datasets_map = { 27 | 'cifar10': cifar10, 28 | 'flowers': flowers, 29 | 'imagenet': imagenet, 30 | 'mnist': mnist, 31 | } 32 | 33 | 34 | def get_dataset(name, split_name, dataset_dir, file_pattern=None, reader=None): 35 | """Given a dataset name and a split_name returns a Dataset. 36 | 37 | Args: 38 | name: String, the name of the dataset. 39 | split_name: A train/test split name. 40 | dataset_dir: The directory where the dataset files are stored. 41 | file_pattern: The file pattern to use for matching the dataset source files. 42 | reader: The subclass of tf.ReaderBase. If left as `None`, then the default 43 | reader defined by each dataset is used. 44 | 45 | Returns: 46 | A `Dataset` class. 47 | 48 | Raises: 49 | ValueError: If the dataset `name` is unknown. 50 | """ 51 | if name not in datasets_map: 52 | raise ValueError('Name of dataset unknown %s' % name) 53 | return datasets_map[name].get_split( 54 | split_name, 55 | dataset_dir, 56 | file_pattern, 57 | reader) 58 | -------------------------------------------------------------------------------- /models/research/slim/deployment/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /models/research/slim/download_and_convert_data.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | r"""Downloads and converts a particular dataset. 16 | 17 | Usage: 18 | ```shell 19 | 20 | $ python download_and_convert_data.py \ 21 | --dataset_name=mnist \ 22 | --dataset_dir=/tmp/mnist 23 | 24 | $ python download_and_convert_data.py \ 25 | --dataset_name=cifar10 \ 26 | --dataset_dir=/tmp/cifar10 27 | 28 | $ python download_and_convert_data.py \ 29 | --dataset_name=flowers \ 30 | --dataset_dir=/tmp/flowers 31 | ``` 32 | """ 33 | from __future__ import absolute_import 34 | from __future__ import division 35 | from __future__ import print_function 36 | 37 | import tensorflow as tf 38 | 39 | from datasets import download_and_convert_cifar10 40 | from datasets import download_and_convert_flowers 41 | from datasets import download_and_convert_mnist 42 | 43 | FLAGS = tf.app.flags.FLAGS 44 | 45 | tf.app.flags.DEFINE_string( 46 | 'dataset_name', 47 | None, 48 | 'The name of the dataset to convert, one of "cifar10", "flowers", "mnist".') 49 | 50 | tf.app.flags.DEFINE_string( 51 | 'dataset_dir', 52 | None, 53 | 'The directory where the output TFRecords and temporary files are saved.') 54 | 55 | 56 | def main(_): 57 | if not FLAGS.dataset_name: 58 | raise ValueError('You must supply the dataset name with --dataset_name') 59 | if not FLAGS.dataset_dir: 60 | raise ValueError('You must supply the dataset directory with --dataset_dir') 61 | 62 | if FLAGS.dataset_name == 'cifar10': 63 | download_and_convert_cifar10.run(FLAGS.dataset_dir) 64 | elif FLAGS.dataset_name == 'flowers': 65 | download_and_convert_flowers.run(FLAGS.dataset_dir) 66 | elif FLAGS.dataset_name == 'mnist': 67 | download_and_convert_mnist.run(FLAGS.dataset_dir) 68 | else: 69 | raise ValueError( 70 | 'dataset_name [%s] was not recognized.' % FLAGS.dataset_name) 71 | 72 | if __name__ == '__main__': 73 | tf.app.run() 74 | -------------------------------------------------------------------------------- /models/research/slim/export_inference_graph_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | """Tests for export_inference_graph.""" 17 | 18 | from __future__ import absolute_import 19 | from __future__ import division 20 | from __future__ import print_function 21 | 22 | import os 23 | 24 | 25 | import tensorflow as tf 26 | 27 | from tensorflow.python.platform import gfile 28 | import export_inference_graph 29 | 30 | 31 | class ExportInferenceGraphTest(tf.test.TestCase): 32 | 33 | def testExportInferenceGraph(self): 34 | tmpdir = self.get_temp_dir() 35 | output_file = os.path.join(tmpdir, 'inception_v3.pb') 36 | flags = tf.app.flags.FLAGS 37 | flags.output_file = output_file 38 | flags.model_name = 'inception_v3' 39 | flags.dataset_dir = tmpdir 40 | export_inference_graph.main(None) 41 | self.assertTrue(gfile.Exists(output_file)) 42 | 43 | if __name__ == '__main__': 44 | tf.test.main() 45 | -------------------------------------------------------------------------------- /models/research/slim/nets/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /models/research/slim/nets/inception.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Brings all inception models under one namespace.""" 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | # pylint: disable=unused-import 22 | from nets.inception_resnet_v2 import inception_resnet_v2 23 | from nets.inception_resnet_v2 import inception_resnet_v2_arg_scope 24 | from nets.inception_resnet_v2 import inception_resnet_v2_base 25 | from nets.inception_v1 import inception_v1 26 | from nets.inception_v1 import inception_v1_arg_scope 27 | from nets.inception_v1 import inception_v1_base 28 | from nets.inception_v2 import inception_v2 29 | from nets.inception_v2 import inception_v2_arg_scope 30 | from nets.inception_v2 import inception_v2_base 31 | from nets.inception_v3 import inception_v3 32 | from nets.inception_v3 import inception_v3_arg_scope 33 | from nets.inception_v3 import inception_v3_base 34 | from nets.inception_v4 import inception_v4 35 | from nets.inception_v4 import inception_v4_arg_scope 36 | from nets.inception_v4 import inception_v4_base 37 | # pylint: enable=unused-import 38 | -------------------------------------------------------------------------------- /models/research/slim/nets/mobilenet/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/slim/nets/mobilenet/__init__.py -------------------------------------------------------------------------------- /models/research/slim/nets/mobilenet/madds_top1_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/slim/nets/mobilenet/madds_top1_accuracy.png -------------------------------------------------------------------------------- /models/research/slim/nets/mobilenet/mnet_v1_vs_v2_pixel1_latency.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/slim/nets/mobilenet/mnet_v1_vs_v2_pixel1_latency.png -------------------------------------------------------------------------------- /models/research/slim/nets/mobilenet_v1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperlex/Signature-detection-Practical-guide/4625ea16da7cec312a6da1bdfe40222ada48625d/models/research/slim/nets/mobilenet_v1.png -------------------------------------------------------------------------------- /models/research/slim/nets/nasnet/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /models/research/slim/nets/nasnet/nasnet_utils_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Tests for slim.nets.nasnet.nasnet_utils.""" 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | import tensorflow as tf 22 | 23 | from nets.nasnet import nasnet_utils 24 | 25 | 26 | class NasnetUtilsTest(tf.test.TestCase): 27 | 28 | def testCalcReductionLayers(self): 29 | num_cells = 18 30 | num_reduction_layers = 2 31 | reduction_layers = nasnet_utils.calc_reduction_layers( 32 | num_cells, num_reduction_layers) 33 | self.assertEqual(len(reduction_layers), 2) 34 | self.assertEqual(reduction_layers[0], 6) 35 | self.assertEqual(reduction_layers[1], 12) 36 | 37 | def testGetChannelIndex(self): 38 | data_formats = ['NHWC', 'NCHW'] 39 | for data_format in data_formats: 40 | index = nasnet_utils.get_channel_index(data_format) 41 | correct_index = 3 if data_format == 'NHWC' else 1 42 | self.assertEqual(index, correct_index) 43 | 44 | def testGetChannelDim(self): 45 | data_formats = ['NHWC', 'NCHW'] 46 | shape = [10, 20, 30, 40] 47 | for data_format in data_formats: 48 | dim = nasnet_utils.get_channel_dim(shape, data_format) 49 | correct_dim = shape[3] if data_format == 'NHWC' else shape[1] 50 | self.assertEqual(dim, correct_dim) 51 | 52 | def testGlobalAvgPool(self): 53 | data_formats = ['NHWC', 'NCHW'] 54 | inputs = tf.placeholder(tf.float32, (5, 10, 20, 10)) 55 | for data_format in data_formats: 56 | output = nasnet_utils.global_avg_pool( 57 | inputs, data_format) 58 | self.assertEqual(output.shape, [5, 10]) 59 | 60 | 61 | if __name__ == '__main__': 62 | tf.test.main() 63 | -------------------------------------------------------------------------------- /models/research/slim/preprocessing/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /models/research/slim/preprocessing/lenet_preprocessing.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Provides utilities for preprocessing.""" 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | import tensorflow as tf 22 | 23 | slim = tf.contrib.slim 24 | 25 | 26 | def preprocess_image(image, output_height, output_width, is_training): 27 | """Preprocesses the given image. 28 | 29 | Args: 30 | image: A `Tensor` representing an image of arbitrary size. 31 | output_height: The height of the image after preprocessing. 32 | output_width: The width of the image after preprocessing. 33 | is_training: `True` if we're preprocessing the image for training and 34 | `False` otherwise. 35 | 36 | Returns: 37 | A preprocessed image. 38 | """ 39 | image = tf.to_float(image) 40 | image = tf.image.resize_image_with_crop_or_pad( 41 | image, output_width, output_height) 42 | image = tf.subtract(image, 128.0) 43 | image = tf.div(image, 128.0) 44 | return image 45 | -------------------------------------------------------------------------------- /models/research/slim/scripts/train_cifarnet_on_cifar10.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | # 17 | # This script performs the following operations: 18 | # 1. Downloads the Cifar10 dataset 19 | # 2. Trains a CifarNet model on the Cifar10 training set. 20 | # 3. Evaluates the model on the Cifar10 testing set. 21 | # 22 | # Usage: 23 | # cd slim 24 | # ./scripts/train_cifarnet_on_cifar10.sh 25 | set -e 26 | 27 | # Where the checkpoint and logs will be saved to. 28 | TRAIN_DIR=/tmp/cifarnet-model 29 | 30 | # Where the dataset is saved to. 31 | DATASET_DIR=/tmp/cifar10 32 | 33 | # Download the dataset 34 | python download_and_convert_data.py \ 35 | --dataset_name=cifar10 \ 36 | --dataset_dir=${DATASET_DIR} 37 | 38 | # Run training. 39 | python train_image_classifier.py \ 40 | --train_dir=${TRAIN_DIR} \ 41 | --dataset_name=cifar10 \ 42 | --dataset_split_name=train \ 43 | --dataset_dir=${DATASET_DIR} \ 44 | --model_name=cifarnet \ 45 | --preprocessing_name=cifarnet \ 46 | --max_number_of_steps=100000 \ 47 | --batch_size=128 \ 48 | --save_interval_secs=120 \ 49 | --save_summaries_secs=120 \ 50 | --log_every_n_steps=100 \ 51 | --optimizer=sgd \ 52 | --learning_rate=0.1 \ 53 | --learning_rate_decay_factor=0.1 \ 54 | --num_epochs_per_decay=200 \ 55 | --weight_decay=0.004 56 | 57 | # Run evaluation. 58 | python eval_image_classifier.py \ 59 | --checkpoint_path=${TRAIN_DIR} \ 60 | --eval_dir=${TRAIN_DIR} \ 61 | --dataset_name=cifar10 \ 62 | --dataset_split_name=test \ 63 | --dataset_dir=${DATASET_DIR} \ 64 | --model_name=cifarnet 65 | -------------------------------------------------------------------------------- /models/research/slim/scripts/train_lenet_on_mnist.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | # 17 | # This script performs the following operations: 18 | # 1. Downloads the MNIST dataset 19 | # 2. Trains a LeNet model on the MNIST training set. 20 | # 3. Evaluates the model on the MNIST testing set. 21 | # 22 | # Usage: 23 | # cd slim 24 | # ./slim/scripts/train_lenet_on_mnist.sh 25 | set -e 26 | 27 | # Where the checkpoint and logs will be saved to. 28 | TRAIN_DIR=/tmp/lenet-model 29 | 30 | # Where the dataset is saved to. 31 | DATASET_DIR=/tmp/mnist 32 | 33 | # Download the dataset 34 | python download_and_convert_data.py \ 35 | --dataset_name=mnist \ 36 | --dataset_dir=${DATASET_DIR} 37 | 38 | # Run training. 39 | python train_image_classifier.py \ 40 | --train_dir=${TRAIN_DIR} \ 41 | --dataset_name=mnist \ 42 | --dataset_split_name=train \ 43 | --dataset_dir=${DATASET_DIR} \ 44 | --model_name=lenet \ 45 | --preprocessing_name=lenet \ 46 | --max_number_of_steps=20000 \ 47 | --batch_size=50 \ 48 | --learning_rate=0.01 \ 49 | --save_interval_secs=60 \ 50 | --save_summaries_secs=60 \ 51 | --log_every_n_steps=100 \ 52 | --optimizer=sgd \ 53 | --learning_rate_decay_type=fixed \ 54 | --weight_decay=0 55 | 56 | # Run evaluation. 57 | python eval_image_classifier.py \ 58 | --checkpoint_path=${TRAIN_DIR} \ 59 | --eval_dir=${TRAIN_DIR} \ 60 | --dataset_name=mnist \ 61 | --dataset_split_name=test \ 62 | --dataset_dir=${DATASET_DIR} \ 63 | --model_name=lenet 64 | -------------------------------------------------------------------------------- /models/research/slim/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Setup script for slim.""" 16 | 17 | from setuptools import find_packages 18 | from setuptools import setup 19 | 20 | 21 | setup( 22 | name='slim', 23 | version='0.1', 24 | include_package_data=True, 25 | packages=find_packages(), 26 | description='tf-slim', 27 | ) 28 | -------------------------------------------------------------------------------- /models/research/train_sign_detect_frcnn.sh: -------------------------------------------------------------------------------- 1 | commandOutput="$(python make_logs.py --model "custom_faster_rcnn_inceptionv2")" 2 | echo "Output was $commandOutput" 3 | PIPELINE_CONFIG_PATH='PATH TO BE CONFIGURES' 4 | MODEL_DIR="$commandOutput" 5 | NUM_TRAIN_STEPS=2000 6 | SAMPLE_1_OF_N_EVAL_EXAMPLES=1 7 | python object_detection/model_main.py \ 8 | --pipeline_config_path=${PIPELINE_CONFIG_PATH} \ 9 | --model_dir=${MODEL_DIR} \ 10 | --num_train_steps=${NUM_TRAIN_STEPS} \ 11 | --sample_1_of_n_eval_examples=$SAMPLE_1_OF_N_EVAL_EXAMPLES \ 12 | --alsologtostderr 13 | 14 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | tensorflow==1.15.0-rc1 2 | Cython 3 | contextlib2 4 | lxml 5 | jupyter 6 | matplotlib 7 | numpy 8 | Pillow 9 | argparse 10 | --------------------------------------------------------------------------------