├── usr ├── flow.png └── object_detection.png ├── data ├── eval.tfrecords ├── train.tfrecords └── image_label.pbtxt ├── raw └── origindata │ ├── daisy │ ├── 488202750_c420cbce61.jpg │ ├── 5547758_eea9edfd54_n.jpg │ ├── 5794839_200acd910c_n.jpg │ ├── 134409839_71069a95d1_m.jpg │ ├── 158869618_f1a6704236_n.jpg │ ├── 162362896_99c7d851c8_n.jpg │ ├── 162362897_1d21b70621_m.jpg │ ├── 163978992_8128b49d3e_n.jpg │ ├── 172882635_4cc7b86731_m.jpg │ └── 446484749_4044affcaf_n.jpg │ ├── roses │ ├── 1645761726_2b1be95472.jpg │ ├── 2059172936_032ffc12aa.jpg │ ├── 466486216_ab13b55763.jpg │ ├── 685724528_6cd5cbe203.jpg │ ├── 1562198683_8cd8cb5876_n.jpg │ ├── 2501297526_cbd66a3f7e_m.jpg │ ├── 2535466393_6556afeb2f_m.jpg │ ├── 488849503_63a290a8c2_m.jpg │ ├── 509239741_28e2cfe492_m.jpg │ └── 873660804_37f5c6a46e_n.jpg │ ├── tulips │ ├── 142235237_da662d925c.jpg │ ├── 155097272_70feb13184.jpg │ ├── 483880052_19fdb26a9f.jpg │ ├── 510698601_9f61d6f8d8.jpg │ ├── 133692329_c1150ed811_n.jpg │ ├── 2361075034_cf730b8682.jpg │ ├── 2430566689_8543552f9b.jpg │ ├── 2249756775_02e693beda_n.jpg │ ├── 2374855021_21959b40c0_n.jpg │ └── 2535936698_78cc03df3f_n.jpg │ ├── dandelion │ ├── 15987457_49dc11bf4b.jpg │ ├── 98992760_53ed1d26a9.jpg │ ├── 10443973_aeb97513fc_m.jpg │ ├── 11405573_24a8a838cc_n.jpg │ ├── 138132145_782763b84f_m.jpg │ ├── 13920113_f03e867ea7_m.jpg │ ├── 141652526_2be95f21c3_n.jpg │ ├── 14283011_3e7452c5b2_n.jpg │ ├── 144040769_c5b805f868.jpg │ └── 80846315_d997645bea_n.jpg │ ├── sunflowers │ ├── 175638423_058c07afb9.jpg │ ├── 44079668_34dfee3da1_n.jpg │ ├── 45045003_30bbd0a142_m.jpg │ ├── 678714585_addc9aaaef.jpg │ ├── 145303599_2627e23815_n.jpg │ ├── 151898652_b5f1c70b98_n.jpg │ ├── 164670455_29d8e02bbd_n.jpg │ ├── 164671753_ab36d9cbb7_n.jpg │ ├── 274848710_5185cf33b1_n.jpg │ └── 287233531_74d4605814_m.jpg │ ├── dandelion.json │ ├── sunflowers.json │ ├── daisy.json │ ├── roses.json │ └── tulips.json ├── .gitignore ├── read_tfrecord_multiple.py ├── models └── model │ └── ssd_mobilenet_v1_coco.config ├── combine_vott_labeling_file_for_tfrecord.py ├── README.md └── Vott2TFRecordAndTFLabel_multiple.py /usr/flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/usr/flow.png -------------------------------------------------------------------------------- /data/eval.tfrecords: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/data/eval.tfrecords -------------------------------------------------------------------------------- /data/train.tfrecords: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/data/train.tfrecords -------------------------------------------------------------------------------- /usr/object_detection.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/usr/object_detection.png -------------------------------------------------------------------------------- /raw/origindata/daisy/488202750_c420cbce61.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/daisy/488202750_c420cbce61.jpg -------------------------------------------------------------------------------- /raw/origindata/daisy/5547758_eea9edfd54_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/daisy/5547758_eea9edfd54_n.jpg -------------------------------------------------------------------------------- /raw/origindata/daisy/5794839_200acd910c_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/daisy/5794839_200acd910c_n.jpg -------------------------------------------------------------------------------- /raw/origindata/roses/1645761726_2b1be95472.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/roses/1645761726_2b1be95472.jpg -------------------------------------------------------------------------------- /raw/origindata/roses/2059172936_032ffc12aa.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/roses/2059172936_032ffc12aa.jpg -------------------------------------------------------------------------------- /raw/origindata/roses/466486216_ab13b55763.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/roses/466486216_ab13b55763.jpg -------------------------------------------------------------------------------- /raw/origindata/roses/685724528_6cd5cbe203.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/roses/685724528_6cd5cbe203.jpg -------------------------------------------------------------------------------- /raw/origindata/tulips/142235237_da662d925c.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/tulips/142235237_da662d925c.jpg -------------------------------------------------------------------------------- /raw/origindata/tulips/155097272_70feb13184.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/tulips/155097272_70feb13184.jpg -------------------------------------------------------------------------------- /raw/origindata/tulips/483880052_19fdb26a9f.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/tulips/483880052_19fdb26a9f.jpg -------------------------------------------------------------------------------- /raw/origindata/tulips/510698601_9f61d6f8d8.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/tulips/510698601_9f61d6f8d8.jpg -------------------------------------------------------------------------------- /raw/origindata/daisy/134409839_71069a95d1_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/daisy/134409839_71069a95d1_m.jpg -------------------------------------------------------------------------------- /raw/origindata/daisy/158869618_f1a6704236_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/daisy/158869618_f1a6704236_n.jpg -------------------------------------------------------------------------------- /raw/origindata/daisy/162362896_99c7d851c8_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/daisy/162362896_99c7d851c8_n.jpg -------------------------------------------------------------------------------- /raw/origindata/daisy/162362897_1d21b70621_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/daisy/162362897_1d21b70621_m.jpg -------------------------------------------------------------------------------- /raw/origindata/daisy/163978992_8128b49d3e_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/daisy/163978992_8128b49d3e_n.jpg -------------------------------------------------------------------------------- /raw/origindata/daisy/172882635_4cc7b86731_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/daisy/172882635_4cc7b86731_m.jpg -------------------------------------------------------------------------------- /raw/origindata/daisy/446484749_4044affcaf_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/daisy/446484749_4044affcaf_n.jpg -------------------------------------------------------------------------------- /raw/origindata/dandelion/15987457_49dc11bf4b.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/dandelion/15987457_49dc11bf4b.jpg -------------------------------------------------------------------------------- /raw/origindata/dandelion/98992760_53ed1d26a9.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/dandelion/98992760_53ed1d26a9.jpg -------------------------------------------------------------------------------- /raw/origindata/roses/1562198683_8cd8cb5876_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/roses/1562198683_8cd8cb5876_n.jpg -------------------------------------------------------------------------------- /raw/origindata/roses/2501297526_cbd66a3f7e_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/roses/2501297526_cbd66a3f7e_m.jpg -------------------------------------------------------------------------------- /raw/origindata/roses/2535466393_6556afeb2f_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/roses/2535466393_6556afeb2f_m.jpg -------------------------------------------------------------------------------- /raw/origindata/roses/488849503_63a290a8c2_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/roses/488849503_63a290a8c2_m.jpg -------------------------------------------------------------------------------- /raw/origindata/roses/509239741_28e2cfe492_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/roses/509239741_28e2cfe492_m.jpg -------------------------------------------------------------------------------- /raw/origindata/roses/873660804_37f5c6a46e_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/roses/873660804_37f5c6a46e_n.jpg -------------------------------------------------------------------------------- /raw/origindata/tulips/133692329_c1150ed811_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/tulips/133692329_c1150ed811_n.jpg -------------------------------------------------------------------------------- /raw/origindata/tulips/2361075034_cf730b8682.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/tulips/2361075034_cf730b8682.jpg -------------------------------------------------------------------------------- /raw/origindata/tulips/2430566689_8543552f9b.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/tulips/2430566689_8543552f9b.jpg -------------------------------------------------------------------------------- /raw/origindata/dandelion/10443973_aeb97513fc_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/dandelion/10443973_aeb97513fc_m.jpg -------------------------------------------------------------------------------- /raw/origindata/dandelion/11405573_24a8a838cc_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/dandelion/11405573_24a8a838cc_n.jpg -------------------------------------------------------------------------------- /raw/origindata/dandelion/138132145_782763b84f_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/dandelion/138132145_782763b84f_m.jpg -------------------------------------------------------------------------------- /raw/origindata/dandelion/13920113_f03e867ea7_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/dandelion/13920113_f03e867ea7_m.jpg -------------------------------------------------------------------------------- /raw/origindata/dandelion/141652526_2be95f21c3_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/dandelion/141652526_2be95f21c3_n.jpg -------------------------------------------------------------------------------- /raw/origindata/dandelion/14283011_3e7452c5b2_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/dandelion/14283011_3e7452c5b2_n.jpg -------------------------------------------------------------------------------- /raw/origindata/dandelion/144040769_c5b805f868.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/dandelion/144040769_c5b805f868.jpg -------------------------------------------------------------------------------- /raw/origindata/dandelion/80846315_d997645bea_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/dandelion/80846315_d997645bea_n.jpg -------------------------------------------------------------------------------- /raw/origindata/sunflowers/175638423_058c07afb9.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/sunflowers/175638423_058c07afb9.jpg -------------------------------------------------------------------------------- /raw/origindata/sunflowers/44079668_34dfee3da1_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/sunflowers/44079668_34dfee3da1_n.jpg -------------------------------------------------------------------------------- /raw/origindata/sunflowers/45045003_30bbd0a142_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/sunflowers/45045003_30bbd0a142_m.jpg -------------------------------------------------------------------------------- /raw/origindata/sunflowers/678714585_addc9aaaef.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/sunflowers/678714585_addc9aaaef.jpg -------------------------------------------------------------------------------- /raw/origindata/tulips/2249756775_02e693beda_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/tulips/2249756775_02e693beda_n.jpg -------------------------------------------------------------------------------- /raw/origindata/tulips/2374855021_21959b40c0_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/tulips/2374855021_21959b40c0_n.jpg -------------------------------------------------------------------------------- /raw/origindata/tulips/2535936698_78cc03df3f_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/tulips/2535936698_78cc03df3f_n.jpg -------------------------------------------------------------------------------- /raw/origindata/sunflowers/145303599_2627e23815_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/sunflowers/145303599_2627e23815_n.jpg -------------------------------------------------------------------------------- /raw/origindata/sunflowers/151898652_b5f1c70b98_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/sunflowers/151898652_b5f1c70b98_n.jpg -------------------------------------------------------------------------------- /raw/origindata/sunflowers/164670455_29d8e02bbd_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/sunflowers/164670455_29d8e02bbd_n.jpg -------------------------------------------------------------------------------- /raw/origindata/sunflowers/164671753_ab36d9cbb7_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/sunflowers/164671753_ab36d9cbb7_n.jpg -------------------------------------------------------------------------------- /raw/origindata/sunflowers/274848710_5185cf33b1_n.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/sunflowers/274848710_5185cf33b1_n.jpg -------------------------------------------------------------------------------- /raw/origindata/sunflowers/287233531_74d4605814_m.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiankaiwang/TF_ObjectDetection_Flow/HEAD/raw/origindata/sunflowers/287233531_74d4605814_m.jpg -------------------------------------------------------------------------------- /data/image_label.pbtxt: -------------------------------------------------------------------------------- 1 | item { 2 | id: 1 3 | name: 'daisy' 4 | } 5 | item { 6 | id: 2 7 | name: 'others' 8 | } 9 | item { 10 | id: 3 11 | name: 'dandelion' 12 | } 13 | item { 14 | id: 4 15 | name: 'roses' 16 | } 17 | item { 18 | id: 5 19 | name: 'sunflowers' 20 | } 21 | item { 22 | id: 6 23 | name: 'tulips' 24 | } 25 | -------------------------------------------------------------------------------- /raw/origindata/dandelion.json: -------------------------------------------------------------------------------- 1 | {"frames":{"0":[{"x1":2,"y1":26,"x2":317,"y2":393,"id":0,"width":317,"height":423,"type":"Rectangle","tags":["dandelion"],"name":1}],"1":[{"x1":123,"y1":46,"x2":485,"y2":390,"id":1,"width":564,"height":423,"type":"Rectangle","tags":["dandelion"],"name":1}],"2":[{"x1":125,"y1":204,"x2":295,"y2":350,"id":2,"width":317,"height":423,"type":"Rectangle","tags":["dandelion"],"name":1}],"3":[{"x1":19,"y1":7,"x2":407,"y2":412,"id":3,"width":423,"height":423,"type":"Rectangle","tags":["dandelion"],"name":1}],"4":[{"x1":98,"y1":25,"x2":515,"y2":419,"id":4,"width":564,"height":423,"type":"Rectangle","tags":["dandelion"],"name":1}],"5":[{"x1":183,"y1":113,"x2":339,"y2":260,"id":5,"width":564,"height":423,"type":"Rectangle","tags":["dandelion"],"name":1}],"6":[{"x1":122,"y1":24,"x2":482,"y2":370,"id":6,"width":562,"height":423,"type":"Rectangle","tags":["dandelion"],"name":1}],"7":[{"x1":64,"y1":46,"x2":464,"y2":382,"id":7,"width":564,"height":423,"type":"Rectangle","tags":["dandelion"],"name":1}],"8":[{"x1":28,"y1":1,"x2":545,"y2":421,"id":8,"width":564,"height":423,"type":"Rectangle","tags":["dandelion"],"name":1}],"9":[{"x1":29,"y1":95,"x2":339,"y2":371,"id":9,"width":635,"height":423,"type":"Rectangle","tags":["dandelion"],"name":1}]},"framerate":"1","inputTags":"dandelion","suggestiontype":"track","scd":false,"visitedFrames":[0,1,2,3,4,5,6,7,8,9],"visitedFrameNames":["10443973_aeb97513fc_m.jpg","11405573_24a8a838cc_n.jpg","138132145_782763b84f_m.jpg","13920113_f03e867ea7_m.jpg","141652526_2be95f21c3_n.jpg","14283011_3e7452c5b2_n.jpg","144040769_c5b805f868.jpg","15987457_49dc11bf4b.jpg","80846315_d997645bea_n.jpg","98992760_53ed1d26a9.jpg"],"labeler":"jkw"} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | 106 | # tmp 107 | tmp/ 108 | 109 | # bat, sh 110 | *.bat 111 | *.sh 112 | -------------------------------------------------------------------------------- /raw/origindata/sunflowers.json: -------------------------------------------------------------------------------- 1 | {"frames":{"0":[{"x1":100,"y1":30,"x2":440,"y2":367,"id":0,"width":564,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":1}],"1":[{"x1":137,"y1":39,"x2":492,"y2":385,"id":1,"width":564,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":1}],"2":[{"x1":123,"y1":60,"x2":462,"y2":400,"id":2,"width":593,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":1}],"3":[{"x1":111,"y1":34,"x2":346,"y2":423,"id":3,"width":593,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":1}],"4":[{"x1":65,"y1":50,"x2":406,"y2":408,"id":4,"width":564,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":1},{"x1":372.5,"y1":74,"x2":512.5,"y2":237,"id":5,"width":564,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":2},{"x1":279,"y1":0,"x2":426,"y2":67,"id":6,"width":564,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":3}],"5":[{"x1":36,"y1":0,"x2":377,"y2":361,"id":7,"width":564,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":1}],"6":[{"x1":18,"y1":52,"x2":317,"y2":385,"id":8,"width":437,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":1},{"x1":289,"y1":0,"x2":429,"y2":66,"id":9,"width":437,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":2}],"7":[{"x1":16.5,"y1":0,"x2":490.5,"y2":402,"id":10,"width":518,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":1},{"x1":432.5,"y1":293,"x2":509.5,"y2":423,"id":11,"width":518,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":2}],"8":[{"x1":117,"y1":165,"x2":272,"y2":356,"id":12,"width":317,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":1},{"x1":18,"y1":150,"x2":118,"y2":299,"id":13,"width":317,"height":423,"type":"Rectangle","tags":["sunflowers"],"name":2}]},"framerate":"1","inputTags":"sunflowers","suggestiontype":"track","scd":false,"visitedFrames":[0,1,2,3,4,5,6,7,8],"visitedFrameNames":["145303599_2627e23815_n.jpg","151898652_b5f1c70b98_n.jpg","164670455_29d8e02bbd_n.jpg","164671753_ab36d9cbb7_n.jpg","175638423_058c07afb9.jpg","274848710_5185cf33b1_n.jpg","287233531_74d4605814_m.jpg","44079668_34dfee3da1_n.jpg","45045003_30bbd0a142_m.jpg","678714585_addc9aaaef.jpg"],"labeler":"jkw"} -------------------------------------------------------------------------------- /raw/origindata/daisy.json: -------------------------------------------------------------------------------- 1 | {"frames":{"0":[{"x1":38,"y1":8,"x2":420,"y2":415,"id":0,"width":457,"height":423,"type":"Rectangle","tags":["daisy"],"name":1}],"1":[{"x1":30.5,"y1":126,"x2":233.5,"y2":368,"id":1,"width":510,"height":423,"type":"Rectangle","tags":["daisy"],"name":1},{"x1":180,"y1":80,"x2":487,"y2":393,"id":2,"width":510,"height":423,"type":"Rectangle","tags":["daisy"],"name":2}],"2":[{"x1":49.5,"y1":64,"x2":464.5,"y2":348,"id":3,"width":528,"height":423,"type":"Rectangle","tags":["daisy"],"name":1},{"x1":18,"y1":277,"x2":112,"y2":350,"id":4,"width":528,"height":423,"type":"Rectangle","tags":["daisy"],"name":2},{"x1":153.5,"y1":13,"x2":263.5,"y2":63,"id":5,"width":528,"height":423,"type":"Rectangle","tags":["daisy"],"name":3}],"3":[{"x1":32,"y1":110,"x2":354,"y2":277,"id":6,"width":389,"height":423,"type":"Rectangle","tags":["daisy"],"name":1},{"x1":192,"y1":39,"x2":291,"y2":105,"id":7,"width":389,"height":423,"type":"Rectangle","tags":["daisy"],"name":2}],"4":[{"x1":187,"y1":87,"x2":468,"y2":384,"id":8,"width":526,"height":423,"type":"Rectangle","tags":["daisy"],"name":1},{"x1":3,"y1":110,"x2":146,"y2":293,"id":9,"width":526,"height":423,"type":"Rectangle","tags":["daisy"],"name":2},{"x1":95,"y1":20,"x2":214,"y2":119,"id":10,"width":526,"height":423,"type":"Rectangle","tags":["daisy"],"name":3},{"x1":240,"y1":5,"x2":374,"y2":92,"id":11,"width":526,"height":423,"type":"Rectangle","tags":["daisy"],"name":4}],"5":[{"x1":46,"y1":35,"x2":433,"y2":395,"id":12,"width":474,"height":423,"type":"Rectangle","tags":["daisy"],"name":1}],"6":[{"x1":319,"y1":20,"x2":568,"y2":256,"id":13,"width":583,"height":423,"type":"Rectangle","tags":["daisy"],"name":1}],"7":[{"x1":70,"y1":16,"x2":510,"y2":400,"id":14,"width":564,"height":423,"type":"Rectangle","tags":["daisy"],"name":1}],"8":[{"x1":2,"y1":92,"x2":440,"y2":423,"id":15,"width":583,"height":423,"type":"Rectangle","tags":["daisy"],"name":1}],"9":[{"x1":307,"y1":168,"x2":497,"y2":364,"id":16,"width":564,"height":423,"type":"Rectangle","tags":["daisy"],"name":1},{"x1":29,"y1":155,"x2":272,"y2":391,"id":17,"width":564,"height":423,"type":"Rectangle","tags":["daisy"],"name":2},{"x1":199,"y1":20,"x2":449,"y2":201,"id":18,"width":564,"height":423,"type":"Rectangle","tags":["daisy"],"name":3}]},"framerate":"1","inputTags":"daisy,others","suggestiontype":"track","scd":false,"visitedFrames":[0,1,2,3,4,5,6,7,8,9],"visitedFrameNames":["134409839_71069a95d1_m.jpg","158869618_f1a6704236_n.jpg","162362896_99c7d851c8_n.jpg","162362897_1d21b70621_m.jpg","163978992_8128b49d3e_n.jpg","172882635_4cc7b86731_m.jpg","446484749_4044affcaf_n.jpg","488202750_c420cbce61.jpg","5547758_eea9edfd54_n.jpg","5794839_200acd910c_n.jpg"],"labeler":"jkw"} -------------------------------------------------------------------------------- /raw/origindata/roses.json: -------------------------------------------------------------------------------- 1 | {"frames":{"0":[{"x1":37,"y1":40,"x2":620,"y2":423,"id":0,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":1}],"1":[{"x1":240,"y1":126,"x2":590,"y2":392,"id":1,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":1},{"x1":48,"y1":119,"x2":233,"y2":354,"id":2,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":2},{"x1":144,"y1":51,"x2":386,"y2":167,"id":3,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":3}],"2":[{"x1":12,"y1":2,"x2":473,"y2":395,"id":4,"width":559,"height":423,"type":"Rectangle","tags":["roses"],"name":1}],"3":[{"x1":91,"y1":52,"x2":324,"y2":322,"id":5,"width":423,"height":423,"type":"Rectangle","tags":["roses"],"name":1}],"4":[{"x1":42,"y1":11,"x2":327,"y2":390,"id":6,"width":336,"height":423,"type":"Rectangle","tags":["roses"],"name":1}],"5":[{"x1":49,"y1":101,"x2":132,"y2":205,"id":7,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":1},{"x1":223,"y1":150,"x2":307,"y2":231,"id":8,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":2},{"x1":391,"y1":324,"x2":481,"y2":397,"id":9,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":3},{"x1":121,"y1":239,"x2":201,"y2":330,"id":10,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":4},{"x1":211,"y1":307,"x2":284,"y2":375,"id":11,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":5},{"x1":303,"y1":235,"x2":397,"y2":314,"id":12,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":6},{"x1":143,"y1":155,"x2":215,"y2":234,"id":13,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":7},{"x1":139,"y1":96,"x2":250,"y2":149,"id":14,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":8},{"x1":314,"y1":324,"x2":389,"y2":385,"id":15,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":9},{"x1":207,"y1":230,"x2":279,"y2":298,"id":16,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":10},{"x1":70,"y1":212,"x2":149,"y2":274,"id":17,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":11}],"6":[{"x1":57,"y1":191,"x2":162,"y2":243,"id":18,"width":308,"height":423,"type":"Rectangle","tags":["roses"],"name":1},{"x1":136,"y1":249,"x2":201,"y2":294,"id":19,"width":308,"height":423,"type":"Rectangle","tags":["roses"],"name":2},{"x1":164,"y1":291,"x2":227,"y2":356,"id":20,"width":308,"height":423,"type":"Rectangle","tags":["roses"],"name":3},{"x1":218,"y1":159,"x2":247,"y2":201,"id":21,"width":308,"height":423,"type":"Rectangle","tags":["roses"],"name":4},{"x1":247,"y1":186,"x2":278,"y2":215,"id":22,"width":308,"height":423,"type":"Rectangle","tags":["roses"],"name":5}],"7":[{"x1":32,"y1":38,"x2":284,"y2":320,"id":23,"width":315,"height":423,"type":"Rectangle","tags":["roses"],"name":1}],"8":[{"x1":127,"y1":53,"x2":472,"y2":402,"id":24,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":1},{"x1":479,"y1":0,"x2":635,"y2":230,"id":25,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":2},{"x1":0,"y1":0,"x2":215,"y2":258,"id":26,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":3},{"x1":245,"y1":0,"x2":476,"y2":44,"id":27,"width":635,"height":423,"type":"Rectangle","tags":["roses"],"name":4}],"9":[{"x1":88,"y1":51,"x2":556,"y2":345,"id":28,"width":606,"height":423,"type":"Rectangle","tags":["roses"],"name":1},{"x1":10,"y1":142,"x2":112,"y2":372,"id":29,"width":606,"height":423,"type":"Rectangle","tags":["roses"],"name":2}]},"framerate":"1","inputTags":"roses","suggestiontype":"track","scd":false,"visitedFrames":[0,1,2,3,4,5,6,7,8,9],"visitedFrameNames":["1562198683_8cd8cb5876_n.jpg","1645761726_2b1be95472.jpg","2059172936_032ffc12aa.jpg","2501297526_cbd66a3f7e_m.jpg","2535466393_6556afeb2f_m.jpg","466486216_ab13b55763.jpg","488849503_63a290a8c2_m.jpg","509239741_28e2cfe492_m.jpg","685724528_6cd5cbe203.jpg","873660804_37f5c6a46e_n.jpg"],"labeler":"jkw"} -------------------------------------------------------------------------------- /read_tfrecord_multiple.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Wed Apr 25 23:43:33 2018 4 | @author: acer4755g 5 | @reference: 6 | """ 7 | 8 | import tensorflow as tf 9 | import os 10 | import Vott2TFRecordAndTFLabel_multiple 11 | import matplotlib.pyplot as plt 12 | 13 | outputFilePath = Vott2TFRecordAndTFLabel_multiple.outputFilePath 14 | outputLabelFile = Vott2TFRecordAndTFLabel_multiple.outputLabelFile 15 | TRAIN_VALIDARION_RATIO = Vott2TFRecordAndTFLabel_multiple.TRAIN_VALIDARION_RATIO 16 | 17 | # 18 | # desc : read and decode tfrecord 19 | # param@ 20 | # |- filename_queue: input a filename queue 21 | # 22 | def read_and_decode(filename_queue): 23 | # create a tfrecord object 24 | reader = tf.TFRecordReader() 25 | _, serialized_example = reader.read(filename_queue) 26 | 27 | # decode the example 28 | features = tf.parse_single_example(serialized_example,\ 29 | features={'image/encoded':tf.FixedLenFeature([], tf.string)\ 30 | , 'image/object/class/label':tf.VarLenFeature(tf.int64)\ 31 | , 'image/object/bbox/xmax':tf.VarLenFeature(tf.float32)\ 32 | , 'image/height':tf.FixedLenFeature([], tf.int64) \ 33 | , 'image/width':tf.FixedLenFeature([], tf.int64)}) 34 | 35 | label = tf.cast(features['image/object/class/label'], tf.int64) 36 | label = tf.sparse_tensor_to_dense(label) 37 | xmax = tf.cast(features['image/object/bbox/xmax'], tf.float32) 38 | xmax = tf.sparse_tensor_to_dense(xmax) 39 | height = tf.cast(features['image/height'], tf.int64) 40 | width = tf.cast(features['image/width'], tf.int64) 41 | 42 | # it must decode byteslist from string type to uint8 type 43 | image = tf.image.decode_jpeg(features['image/encoded']) 44 | image = tf.image.convert_image_dtype(image, dtype=tf.uint8) 45 | return image, height, width, label, xmax 46 | 47 | # 48 | # desc : input training or validation file (could be shuffle) and return a tuple 49 | # 50 | def inputs(data_set_name, num_epochs=None, outputImage=False): 51 | with tf.name_scope('input'): 52 | # return a QueueRunner object and FIFOQueue object inside in 53 | filename_queue = tf.train.string_input_producer([data_set_name], num_epochs=num_epochs) 54 | 55 | image, height, width, label, xmax = read_and_decode(filename_queue) 56 | 57 | if outputImage: 58 | # output all images 59 | with tf.Session() as sess: 60 | 61 | # because one epoch variable is built inside string_input_produer (image_raw) 62 | # and the variable is belonging to tf.GraphKeys.LOCAL_VARIABLES 63 | # tf.local_variables_initializer() is necessary 64 | init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer()) 65 | sess.run(init_op) 66 | 67 | coord=tf.train.Coordinator() 68 | threads= tf.train.start_queue_runners(coord=coord) 69 | 70 | for i in range(0, 3, 1): 71 | single, heg, wdt, lbl, xmn = sess.run([image, height, width, label, xmax]) 72 | 73 | # show the image 74 | plt.imshow(single) 75 | plt.show() 76 | 77 | # print the label 78 | print("Image height:{}, width:{}, label:{}, xmax:{}.".format(heg, wdt, lbl, xmn)) 79 | 80 | coord.request_stop() 81 | coord.join(threads) 82 | 83 | if __name__ == '__main__': 84 | inputs(\ 85 | os.path.join(outputFilePath,'train.tfrecords')\ 86 | , outputImage=True) 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | -------------------------------------------------------------------------------- /raw/origindata/tulips.json: -------------------------------------------------------------------------------- 1 | {"frames":{"0":[{"x1":0,"y1":0,"x2":103,"y2":140,"id":0,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":1},{"x1":129,"y1":208,"x2":265,"y2":358,"id":1,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":2},{"x1":338.5,"y1":8,"x2":555.5,"y2":306,"id":2,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":3}],"1":[{"x1":46,"y1":124,"x2":259,"y2":376,"id":3,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":1},{"x1":284,"y1":104,"x2":463,"y2":363,"id":4,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":2},{"x1":467,"y1":264,"x2":554,"y2":413,"id":5,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":3},{"x1":0,"y1":342,"x2":57,"y2":423,"id":6,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":4},{"x1":386,"y1":312,"x2":483,"y2":422,"id":7,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":5},{"x1":295,"y1":358,"x2":376,"y2":418,"id":8,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":6}],"2":[{"x1":12,"y1":28,"x2":118,"y2":111,"id":9,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":1},{"x1":72,"y1":231,"x2":141,"y2":286,"id":10,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":2},{"x1":102,"y1":267,"x2":188,"y2":367,"id":11,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":3},{"x1":0,"y1":312,"x2":61,"y2":406,"id":12,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":4},{"x1":0,"y1":206,"x2":42,"y2":288,"id":13,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":5},{"x1":23,"y1":128,"x2":130,"y2":211,"id":14,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":6},{"x1":393,"y1":237,"x2":513,"y2":332,"id":15,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":7},{"x1":456,"y1":112,"x2":511,"y2":176,"id":16,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":8},{"x1":344,"y1":105,"x2":409,"y2":153,"id":17,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":9},{"x1":293,"y1":136,"x2":353,"y2":207,"id":18,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":10},{"x1":247,"y1":83,"x2":282,"y2":121,"id":19,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":11},{"x1":209,"y1":28,"x2":261,"y2":74,"id":20,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":12},{"x1":157,"y1":69,"x2":243,"y2":127,"id":21,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":13},{"x1":139,"y1":173,"x2":186,"y2":236,"id":22,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":14},{"x1":180,"y1":324,"x2":218,"y2":366,"id":23,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":15},{"x1":76,"y1":344,"x2":149,"y2":403,"id":24,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":16},{"x1":103,"y1":99,"x2":163,"y2":159,"id":25,"width":567,"height":423,"type":"Rectangle","tags":["tulips"],"name":17}],"3":[{"x1":0,"y1":0,"x2":278,"y2":350,"id":26,"width":601,"height":423,"type":"Rectangle","tags":["tulips"],"name":1},{"x1":269,"y1":63,"x2":591,"y2":414,"id":27,"width":601,"height":423,"type":"Rectangle","tags":["tulips"],"name":2}],"4":[{"x1":100,"y1":223,"x2":294,"y2":418,"id":28,"width":635,"height":423,"type":"Rectangle","tags":["tulips"],"name":1},{"x1":359,"y1":84,"x2":626,"y2":361,"id":29,"width":635,"height":423,"type":"Rectangle","tags":["tulips"],"name":2},{"x1":278,"y1":244,"x2":427,"y2":420,"id":30,"width":635,"height":423,"type":"Rectangle","tags":["tulips"],"name":3}],"5":[{"x1":203,"y1":168,"x2":325,"y2":292,"id":31,"width":593,"height":423,"type":"Rectangle","tags":["tulips"],"name":1}],"6":[{"x1":174,"y1":95,"x2":281,"y2":285,"id":32,"width":675,"height":423,"type":"Rectangle","tags":["tulips"],"name":1},{"x1":289,"y1":117,"x2":370,"y2":284,"id":33,"width":675,"height":423,"type":"Rectangle","tags":["tulips"],"name":2},{"x1":350,"y1":157,"x2":450,"y2":323,"id":34,"width":675,"height":423,"type":"Rectangle","tags":["tulips"],"name":3},{"x1":457,"y1":106,"x2":584,"y2":282,"id":35,"width":675,"height":423,"type":"Rectangle","tags":["tulips"],"name":4}],"7":[{"x1":492,"y1":161,"x2":607,"y2":279,"id":36,"width":635,"height":423,"type":"Rectangle","tags":["tulips"],"name":1},{"x1":320,"y1":82,"x2":494,"y2":249,"id":37,"width":635,"height":423,"type":"Rectangle","tags":["tulips"],"name":2},{"x1":155,"y1":159,"x2":297,"y2":312,"id":38,"width":635,"height":423,"type":"Rectangle","tags":["tulips"],"name":3},{"x1":33,"y1":117,"x2":199,"y2":271,"id":39,"width":635,"height":423,"type":"Rectangle","tags":["tulips"],"name":4}],"8":[{"x1":226,"y1":9,"x2":536,"y2":372,"id":40,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":1},{"x1":45,"y1":58,"x2":257,"y2":397,"id":41,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":2},{"x1":469.5,"y1":376,"x2":555.5,"y2":423,"id":42,"width":564,"height":423,"type":"Rectangle","tags":["tulips"],"name":3}],"9":[{"x1":119,"y1":9,"x2":498,"y2":408,"id":43,"width":635,"height":423,"type":"Rectangle","tags":["tulips"],"name":1},{"x1":447,"y1":174,"x2":607,"y2":364,"id":44,"width":635,"height":423,"type":"Rectangle","tags":["tulips"],"name":2}]},"framerate":"1","inputTags":"tulips","suggestiontype":"track","scd":false,"visitedFrames":[0,1,2,3,4,5,6,7,8,9],"visitedFrameNames":["133692329_c1150ed811_n.jpg","142235237_da662d925c.jpg","155097272_70feb13184.jpg","2249756775_02e693beda_n.jpg","2361075034_cf730b8682.jpg","2374855021_21959b40c0_n.jpg","2430566689_8543552f9b.jpg","2535936698_78cc03df3f_n.jpg","483880052_19fdb26a9f.jpg","510698601_9f61d6f8d8.jpg"],"labeler":"jkw"} -------------------------------------------------------------------------------- /models/model/ssd_mobilenet_v1_coco.config: -------------------------------------------------------------------------------- 1 | # SSD with Mobilenet v1 configuration for MSCOCO Dataset. 2 | # Users should configure the fine_tune_checkpoint field in the train config as 3 | # well as the label_map_path and input_path fields in the train_input_reader and 4 | # eval_input_reader. Search for "PATH_TO_BE_CONFIGURED" to find the fields that 5 | # should be configured. 6 | 7 | model { 8 | ssd { 9 | num_classes: 6 10 | box_coder { 11 | faster_rcnn_box_coder { 12 | y_scale: 10.0 13 | x_scale: 10.0 14 | height_scale: 5.0 15 | width_scale: 5.0 16 | } 17 | } 18 | matcher { 19 | argmax_matcher { 20 | matched_threshold: 0.5 21 | unmatched_threshold: 0.5 22 | ignore_thresholds: false 23 | negatives_lower_than_unmatched: true 24 | force_match_for_each_row: true 25 | } 26 | } 27 | similarity_calculator { 28 | iou_similarity { 29 | } 30 | } 31 | anchor_generator { 32 | ssd_anchor_generator { 33 | num_layers: 6 34 | min_scale: 0.2 35 | max_scale: 0.95 36 | aspect_ratios: 1.0 37 | aspect_ratios: 2.0 38 | aspect_ratios: 0.5 39 | aspect_ratios: 3.0 40 | aspect_ratios: 0.3333 41 | } 42 | } 43 | image_resizer { 44 | fixed_shape_resizer { 45 | height: 300 46 | width: 300 47 | } 48 | } 49 | box_predictor { 50 | convolutional_box_predictor { 51 | min_depth: 0 52 | max_depth: 0 53 | num_layers_before_predictor: 0 54 | use_dropout: false 55 | dropout_keep_probability: 0.8 56 | kernel_size: 1 57 | box_code_size: 4 58 | apply_sigmoid_to_scores: false 59 | conv_hyperparams { 60 | activation: RELU_6, 61 | regularizer { 62 | l2_regularizer { 63 | weight: 0.00004 64 | } 65 | } 66 | initializer { 67 | truncated_normal_initializer { 68 | stddev: 0.03 69 | mean: 0.0 70 | } 71 | } 72 | batch_norm { 73 | train: true, 74 | scale: true, 75 | center: true, 76 | decay: 0.9997, 77 | epsilon: 0.001, 78 | } 79 | } 80 | } 81 | } 82 | feature_extractor { 83 | type: 'ssd_mobilenet_v1' 84 | min_depth: 16 85 | depth_multiplier: 1.0 86 | conv_hyperparams { 87 | activation: RELU_6, 88 | regularizer { 89 | l2_regularizer { 90 | weight: 0.00004 91 | } 92 | } 93 | initializer { 94 | truncated_normal_initializer { 95 | stddev: 0.03 96 | mean: 0.0 97 | } 98 | } 99 | batch_norm { 100 | train: true, 101 | scale: true, 102 | center: true, 103 | decay: 0.9997, 104 | epsilon: 0.001, 105 | } 106 | } 107 | } 108 | loss { 109 | classification_loss { 110 | weighted_sigmoid { 111 | } 112 | } 113 | localization_loss { 114 | weighted_smooth_l1 { 115 | } 116 | } 117 | hard_example_miner { 118 | num_hard_examples: 3000 119 | iou_threshold: 0.99 120 | loss_type: CLASSIFICATION 121 | max_negatives_per_positive: 3 122 | min_negatives_per_image: 0 123 | } 124 | classification_weight: 1.0 125 | localization_weight: 1.0 126 | } 127 | normalize_loss_by_num_matches: true 128 | post_processing { 129 | batch_non_max_suppression { 130 | score_threshold: 1e-8 131 | iou_threshold: 0.6 132 | max_detections_per_class: 100 133 | max_total_detections: 100 134 | } 135 | score_converter: SIGMOID 136 | } 137 | } 138 | } 139 | 140 | train_config: { 141 | batch_size: 6 142 | optimizer { 143 | rms_prop_optimizer: { 144 | learning_rate: { 145 | exponential_decay_learning_rate { 146 | initial_learning_rate: 0.004 147 | decay_steps: 800720 148 | decay_factor: 0.95 149 | } 150 | } 151 | momentum_optimizer_value: 0.9 152 | decay: 0.9 153 | epsilon: 1.0 154 | } 155 | } 156 | fine_tune_checkpoint: "" 157 | from_detection_checkpoint: true 158 | # Note: The below line limits the training process to 200K steps, which we 159 | # empirically found to be sufficient enough to train the pets dataset. This 160 | # effectively bypasses the learning rate schedule (the learning rate will 161 | # never decay). Remove the below line to train indefinitely. 162 | num_steps: 120 163 | data_augmentation_options { 164 | random_horizontal_flip { 165 | } 166 | } 167 | data_augmentation_options { 168 | ssd_random_crop { 169 | } 170 | } 171 | } 172 | 173 | train_input_reader: { 174 | tf_record_input_reader { 175 | input_path: "TF_ObjectDetection_Flow/data/train.tfrecords" 176 | } 177 | label_map_path: "TF_ObjectDetection_Flow/data/image_label.pbtxt" 178 | } 179 | 180 | eval_config: { 181 | num_examples: 10 182 | # Note: The below line limits the evaluation process to 10 evaluations. 183 | # Remove the below line to evaluate indefinitely. 184 | max_evals: 10 185 | } 186 | 187 | eval_input_reader: { 188 | tf_record_input_reader { 189 | input_path: "TF_ObjectDetection_Flow/data/eval.tfrecords" 190 | } 191 | label_map_path: "TF_ObjectDetection_Flow/data/image_label.pbtxt" 192 | shuffle: false 193 | num_readers: 1 194 | } 195 | -------------------------------------------------------------------------------- /combine_vott_labeling_file_for_tfrecord.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | # author: jiankaiwang (https://jiankaiwang.no-ip.biz/) 3 | 4 | from __future__ import absolute_import 5 | from __future__ import division 6 | from __future__ import print_function 7 | 8 | import argparse 9 | import json 10 | import os 11 | import codecs 12 | import sys 13 | import math 14 | from PIL import Image 15 | from shutil import copyfile 16 | 17 | # global variables 18 | id_increament = 0 19 | output_labels_index = 0 20 | output_labels = {\ 21 | "frames": {} \ 22 | , "inputTags": "" \ 23 | , "visitedFrames": [] \ 24 | , "visitedFrameNames": [] \ 25 | , "framerate": "1"\ 26 | , "suggestiontype": "track"\ 27 | , "scd": "false" \ 28 | } 29 | 30 | labeldir = os.path.join('.','tmp') 31 | labelimgdir = os.path.join('.','tmp','input') 32 | outputdir = os.path.join('.','tmp') 33 | outputimgdir = os.path.join('.','tmp','output') 34 | COMP_WIDTH = "auto" 35 | COMP_HEIGHT = "auto" 36 | 37 | # compressed=True: the max width is image width, not scaling 38 | def scaleImageSize(set_width, set_height, img_width, img_height, compressed=True): 39 | if str(set_width) != "auto" and str(set_height) != "auto": 40 | return int(set_width), int(set_height) 41 | elif str(set_width) != "auto" and str(set_height) == "auto": 42 | # set_width is not auto 43 | set_height = math.ceil(float(set_width)*float(img_height) / float(img_width)) 44 | if set_height > img_height and compressed: 45 | set_height = img_height 46 | elif str(set_width) == "auto" and str(set_height) != "auto": 47 | set_width = math.ceil(float(set_height)*float(img_width) / float(img_height)) 48 | if set_width > img_width and compressed: 49 | set_width = img_width 50 | else: 51 | set_width = img_width 52 | set_height = img_height 53 | return int(set_width), int(set_height) 54 | 55 | def comporessImage(fromFile, toFile, width, height): 56 | try: 57 | # cv2 might encounter orientation issue 58 | # pillow does not 59 | im = Image.open(fromFile) 60 | (img_width, img_height) = im.size 61 | new_width, new_height = scaleImageSize(width, height, img_width, img_height) 62 | (im.resize((new_width, new_height), Image.BILINEAR)).save(toFile) 63 | return True, float(new_height/img_height), img_width, img_height 64 | except Exception as e: 65 | print("Error: Parsing image {} is error.".format(fromFile)) 66 | print("Error: Message is {}.".format(e)) 67 | # directly copy the image with the same size 68 | copyfile(fromFile, toFile) 69 | return False, 1.0, 1.0, 1.0 70 | 71 | def read_label_file(file): 72 | tmpContent = "" 73 | with codecs.open(file, 'r', 'utf-8') as fin: 74 | for line in fin: 75 | tmpContent = tmpContent + line.strip() 76 | return json.loads(tmpContent) 77 | 78 | def find_all_label_files(labeldir): 79 | return next(os.walk(labeldir))[2] 80 | 81 | def find_all_image_dirs(labelimgdir): 82 | return next(os.walk(labelimgdir))[1] 83 | 84 | def __addFrames(jsonData): 85 | global output_labels_index, id_increament, order_header, COMP_WIDTH, COMP_HEIGHT 86 | global labelimgdir, outputimgdir 87 | tmpMaxId = 0 88 | allVisitedFrames = jsonData["visitedFrames"] 89 | allFrameNames = jsonData["visitedFrameNames"] 90 | for frameIndex in allVisitedFrames: 91 | if str(frameIndex) not in jsonData["frames"].keys(): 92 | print("Warning: (no labeling) lost frames index {} on image {}.".\ 93 | format(str(frameIndex), jsonData["visitedFrameNames"][frameIndex])) 94 | continue 95 | output_labels["frames"][str(output_labels_index)] = jsonData["frames"][str(frameIndex)] 96 | 97 | allLabels = output_labels["frames"][str(output_labels_index)] 98 | for idx in range(0, len(allLabels), 1): 99 | if allLabels[idx]["id"] > tmpMaxId: 100 | tmpMaxId = allLabels[idx]["id"] 101 | output_labels["frames"][str(output_labels_index)][idx]["id"] += id_increament 102 | 103 | output_labels["visitedFrames"].append(output_labels_index) 104 | 105 | # rename the file with a index on the header of its filename 106 | to_dir_img = "{}_{}".format(order_header, allFrameNames[frameIndex]) 107 | if os.path.isfile(os.path.join(labelimgdir, allFrameNames[frameIndex])): 108 | status, scaled_ratio, img_width, img_height = comporessImage(\ 109 | os.path.join(labelimgdir, allFrameNames[frameIndex]) \ 110 | , os.path.join(outputimgdir, to_dir_img) \ 111 | , COMP_WIDTH, COMP_HEIGHT) 112 | if not status: 113 | print("Error: Can not compress the image {}."\ 114 | .format(allFrameNames[frameIndex])) 115 | else: 116 | print("Error: Lose image {}".format(os.path.join(labelimgdir, allFrameNames[frameIndex]))) 117 | output_labels["visitedFrameNames"].append(to_dir_img) 118 | output_labels_index = output_labels_index + 1 119 | 120 | # get max id and add one, 121 | # it is the beginning index of the next labelling files 122 | id_increament += tmpMaxId + 1 123 | 124 | def __addTags(jsonData): 125 | global output_labels 126 | if len(output_labels['inputTags']) > 1: 127 | currentTags = output_labels['inputTags'].strip().split(',') 128 | else: 129 | currentTags = [] 130 | inputTags = jsonData['inputTags'].strip().split(',') 131 | for tag in inputTags: 132 | if tag not in currentTags: 133 | currentTags.append(tag) 134 | output_labels['inputTags'] = ','.join(currentTags) 135 | 136 | def add_label_file(file): 137 | labelData = read_label_file(file) 138 | __addFrames(labelData) 139 | __addTags(labelData) 140 | 141 | def output_all_labels(outputdir): 142 | global output_labels 143 | outputPath = os.path.join(outputdir, 'output.json') 144 | with codecs.open(outputPath, 'w', 'utf-8') as fout: 145 | fout.write(json.dumps(output_labels, ensure_ascii=True)) 146 | 147 | def __check_input_output(): 148 | global labeldir, labelimgdir, outputdir, outputimgdir 149 | 150 | # check image pooling directory 151 | if (not os.path.isdir(labeldir)) or (not os.path.isdir(labelimgdir)): 152 | print("Error: no labelling folder {} or no image input folder {}"\ 153 | .format(labeldir, labelimgdir)) 154 | return False 155 | 156 | # check output file path 157 | if not os.path.isdir(outputdir): 158 | try: 159 | os.mkdir(outputdir) 160 | except: 161 | print("Error: No such labelling output directory and can not create it.") 162 | return False 163 | 164 | if not os.path.isdir(outputimgdir): 165 | try: 166 | os.mkdir(outputimgdir) 167 | except: 168 | print("Error: No such image output directory and can not create it.") 169 | return False 170 | 171 | return True 172 | 173 | if __name__ == "__main__": 174 | parser = argparse.ArgumentParser() 175 | parser.add_argument("--labeldir" \ 176 | , type=str \ 177 | , default=os.path.join('.','tmp') \ 178 | , help="dir conserves label files") 179 | parser.add_argument("--labelImgdir" \ 180 | , type=str \ 181 | , default=os.path.join('.','tmp','input') \ 182 | , help="dir conserves images mapping to labelling files") 183 | parser.add_argument("--outputdir" \ 184 | , type=str \ 185 | , default=os.path.join('.','tmp') \ 186 | , help="combined label file") 187 | parser.add_argument("--outputImgdir" \ 188 | , type=str \ 189 | , default=os.path.join('.','tmp','output') \ 190 | , help="combined label file") 191 | parser.add_argument("--compressedwidth" \ 192 | , type=str \ 193 | , default="auto" \ 194 | , help="compress the image by width") 195 | parser.add_argument("--compressedheight" \ 196 | , type=str \ 197 | , default="auto" \ 198 | , help="compress the image by height") 199 | args = parser.parse_args() 200 | 201 | is_available = False 202 | 203 | # get args value 204 | labeldir = args.labeldir 205 | labelimgdir = args.labelImgdir 206 | outputdir = args.outputdir 207 | outputimgdir = args.outputImgdir 208 | COMP_WIDTH = args.compressedwidth 209 | COMP_HEIGHT = args.compressedheight 210 | 211 | # others variable 212 | # due to vott using the order related to the directory 213 | # you have to rename the file to avoid the misordering 214 | # while combining all vott labelling files 215 | order_header = 0 216 | 217 | if not __check_input_output(): 218 | print("Error: Can not validate input {}, {} or output {} {}."\ 219 | .format(labeldir, labelimgdir, outputdir, outputimgdir)) 220 | sys.exit(1) 221 | 222 | # parse directory 223 | allLabelFiles = find_all_label_files(labeldir) 224 | for files in allLabelFiles: 225 | order_header += 1 226 | print('Parse file {}.'.format(files)) 227 | add_label_file(os.path.join(labeldir, files)) 228 | output_all_labels(outputdir) 229 | 230 | 231 | 232 | 233 | 234 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Tensorflow Object Detection Flow 2 | 3 | 4 | 5 | The tutorial is the workflow from labeling objects (using vott), building a object detection model in tensorflow, training it locally, and then embedding it on cloud API server or mobile device. 6 | 7 | 8 | 9 | The flowchart is like below. 10 | 11 | ![](./usr/flow.png) 12 | 13 | 14 | 15 | Reference 16 | 17 | * Vott labeling tool: https://github.com/jiankaiwang/vott 18 | 19 | * object detection: https://github.com/tensorflow/models/tree/master/research/object_detection 20 | * cloud api server (light): https://github.com/jiankaiwang/scai 21 | * mobile device 22 | * development board 23 | 24 | 25 | 26 | ## Model Generation 27 | 28 | 29 | 30 | ### Environment Preparation 31 | 32 | Follow the webpage (https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/installation.md) to install necessary packages or setup environment variables. 33 | 34 | Second, git clone the necessary repositories, including `tensorflow/tensorflow`, `tensorflow/models`, and `jiankaiwang/TF_ObjectDetection_Flow` from github.com. 35 | 36 | ```shell 37 | cd ~ 38 | git clone https://github.com/tensorflow/tensorflow.git 39 | git clone https://github.com/tensorflow/models.git 40 | git clone https://github.com/jiankaiwang/TF_ObjectDetection_Flow.git 41 | ``` 42 | 43 | 44 | 45 | ### Prepare inputs 46 | 47 | In TF_ObjectDetection_Flow, the directionary `raw/origindata` conserves the example dataset which consists of 5 types of flowers. The dataset is downloaded from tensorflow.org (https://www.tensorflow.org/tutorials/image_retraining) and it is good resource for the image classification task and the object detection task. 48 | 49 | Then we use tool vott (https://github.com/jiankaiwang/vott) to label the flower dataset and further use the labeling data to train the model. 50 | 51 | The default directory is like the below structure. After labeling one type of images, vott would output a single labelling data (default is the folder name with json extension, for example, sunflowers with sunflowers.json). We can put all of them into one directory as below. 52 | 53 | ```text 54 | + raw 55 | + origindata 56 | + daisy 57 | + dandelion 58 | + roses 59 | + sunflowers 60 | + tulips 61 | - daisy.json 62 | - dandelion.json 63 | - roses.json 64 | - sunflowers.json 65 | - tulips.json 66 | ``` 67 | 68 | [**Optional**] If you have multiple labelling data, the script `combine_vott_labeling_file_for_tfrecord.py` is to help you combine multiple labelling data. The default directory is like below. 69 | 70 | ```text 71 | + tmp 72 | + input 73 | - label1.json 74 | - label2.json 75 | - label3.jons 76 | ``` 77 | 78 | **The script would combine all of json files with no regrading their name**. The folder `input` conserves all image files which are already labelled. The following is the command running the script. 79 | 80 | ```shell 81 | cd ~/TF_ObjectDetection_Flow 82 | python combine_vott_labeling_file_for_tfrecord.py \ 83 | --labeldir=./tmp 84 | --labelImgdir=./tmp/input 85 | --outputdir=./tmp 86 | --outputImgdir=./tmp/output 87 | --compressedwidth=64 88 | --compressedheight=auto 89 | ``` 90 | 91 | The script `Vott2TFRecordAndTFLabel_multiple.py` is to transform vott labeling data into `tfrecords` format. You can also read tfrecords data by running the script `read_tfrecord_multiple.py`. 92 | 93 | ```shell 94 | cd ~/TF_ObjectDetection_Flow 95 | python Vott2TFRecordAndTFLabel_multiple.py \ 96 | --labeldir=./raw/origindata \ 97 | --outputfilepath=./data \ 98 | --trainevalratio=0.8 99 | ``` 100 | 101 | Reference: [PASCAL VOC/ Oxford-IIIT Pet](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/preparing_inputs.md), [Your-Own Dataset](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/using_your_own_dataset.md) 102 | 103 | 104 | 105 | ### Configure an object detection training pipeline 106 | 107 | The basic object detection training model configurate is like the below. 108 | 109 | ```ini 110 | model { 111 | (... Add model config here...) 112 | } 113 | 114 | train_config : { 115 | (... Add train_config here...) 116 | } 117 | 118 | train_input_reader: { 119 | (... Add train_input configuration here...) 120 | } 121 | 122 | eval_config: { 123 | } 124 | 125 | eval_input_reader: { 126 | (... Add eval_input configuration here...) 127 | } 128 | ``` 129 | 130 | 131 | 132 | You can copy the sample configuration file from path `models/research/object_detection/samples/configs ` to quickly establish your own configuration. 133 | 134 | Here, we uesd the model, `ssd_mobilenet_v1_coco.config`, as the example. 135 | 136 | ```shell 137 | # make sure to git clone https://github.com/tensorflow/models.git 138 | cd ~/models/research 139 | mkdir ./TF_ObjectDetection_Flow 140 | cp object_detection/samples/config/ssd_mobilenet_v1_coco.config ./TF_ObjectDetection_Flow 141 | 142 | # change parameters to your own settings 143 | vim ./TF_ObjectDetection_Flow/ssd_mobilenet_v1_coco.config 144 | ``` 145 | 146 | Be aware of changing **num_classes** in the configuration file, and make sure to edit sections with **"PATH_TO_BE_CONFIGURED"** tag. 147 | 148 | 149 | 150 | ```ini 151 | model { 152 | ssd { 153 | num_classes: (change to your dataset) 154 | # ... 155 | # as the default 156 | # ... 157 | } 158 | } 159 | 160 | train_config: { 161 | # ... 162 | # as the default 163 | # ... 164 | fine_tune_checkpoint: "(you can keep this empty if there is no fine tune model)" 165 | from_detection_checkpoint: true 166 | # Note: The below line limits the training process to 200K steps, which we 167 | # empirically found to be sufficient enough to train the pets dataset. This 168 | # effectively bypasses the learning rate schedule (the learning rate will 169 | # never decay). Remove the below line to train indefinitely. 170 | num_steps: (you can change to fit your dataset) 171 | data_augmentation_options { 172 | random_horizontal_flip { 173 | } 174 | } 175 | data_augmentation_options { 176 | ssd_random_crop { 177 | } 178 | } 179 | } 180 | 181 | train_input_reader: { 182 | tf_record_input_reader { 183 | # PATH_TO_BE_CONFIGURED 184 | input_path: "TF_ObjectDetection_Flow/data/train.tfrecords" 185 | } 186 | # PATH_TO_BE_CONFIGURED 187 | label_map_path: "TF_ObjectDetection_Flow/data/image_label.pbtxt" 188 | } 189 | 190 | eval_config: { 191 | # as the default 192 | } 193 | 194 | eval_input_reader: { 195 | tf_record_input_reader { 196 | # PATH_TO_BE_CONFIGURED 197 | input_path: "TF_ObjectDetection_Flow/data/eval.tfrecords" 198 | } 199 | # PATH_TO_BE_CONFIGURED 200 | label_map_path: "TF_ObjectDetection_Flow/data/image_label.pbtxt" 201 | shuffle: false 202 | num_readers: 1 203 | } 204 | 205 | ``` 206 | 207 | 208 | 209 | Reference: https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/configuring_jobs.md 210 | 211 | 212 | 213 | ### Running Locally 214 | 215 | The recommanded directionary structure is like the below. 216 | 217 | ```text 218 | + raw (see the above "Prepare inputs" section) 219 | + tmp (see the above "Prepare inputs" section) 220 | + data (conserve the tfrecord files and label map file) 221 | - label_map file 222 | - train TFRecord file 223 | - eval TFRecord file 224 | + models 225 | + model 226 | - pipeline config file 227 | + train 228 | + eval 229 | ``` 230 | 231 | 232 | 233 | #### Running the Training job 234 | 235 | ```shell 236 | # From the tensorflow/models/research/ directory 237 | python object_detection/train.py \ 238 | --logtostderr \ 239 | --pipeline_config_path=${PATH_TO_YOUR_PIPELINE_CONFIG} \ 240 | --train_dir=${PATH_TO_TRAIN_DIR} 241 | ``` 242 | 243 | for example, 244 | 245 | ```shell 246 | cd ~/models/research 247 | python object_detection/train.py \ 248 | --logtostderr \ 249 | --pipeline_config_path=~/TF_ObjectDetection_Flow/models/model/ssd_mobilenet_v1_coco.config \ 250 | --train_dir=~/TF_ObjectDetection_Flow/models/model/train 251 | ``` 252 | 253 | The training result would be conserved under path `~/TF_ObjectDetection_Flow/models/model/train`. 254 | 255 | 256 | 257 | #### Running the Evaluation Job 258 | 259 | ```shell 260 | # From the tensorflow/models/research/ directory 261 | python object_detection/eval.py \ 262 | --logtostderr \ 263 | --pipeline_config_path=${PATH_TO_YOUR_PIPELINE_CONFIG} \ 264 | --checkpoint_dir=${PATH_TO_TRAIN_DIR} \ 265 | --eval_dir=${PATH_TO_EVAL_DIR} 266 | ``` 267 | 268 | for example, 269 | 270 | ```shell 271 | cd ~/models/research 272 | python object_detection/eval.py \ 273 | --logtostderr \ 274 | --pipeline_config_path=~/TF_ObjectDetection_Flow/models/model/pipeline.config \ 275 | --checkpoint_dir=~/TF_ObjectDetection_Flow/models/model \ 276 | --eval_dir=~/TF_ObjectDetection_Flow/models/model/eval 277 | ``` 278 | 279 | The evaluated result would be conserved under path `~/TF_ObjectDetection_Flow/models/model/eval`. 280 | 281 | 282 | 283 | Reference: https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_locally.md 284 | 285 | 286 | 287 | ### Generate a frozen_inference_graph 288 | 289 | You have to replace the **ckpt index**, for example, replace `model.ckpt-120` with your own ckpt index. 290 | 291 | ```shell 292 | # From tensorflow/models/research/ 293 | python object_detection/export_inference_graph.py \ 294 | --input_type image_tensor \ 295 | --pipeline_config_path ${PIPELINE_CONFIG_PATH} \ 296 | --trained_checkpoint_prefix ${TRAIN_PATH} \ 297 | --output_directory output_inference_graph.pb 298 | ``` 299 | 300 | for example, 301 | 302 | ```shell 303 | cd ~/models/research 304 | python object_detection/export_inference_graph.py \ 305 | --input_type image_tensor \ 306 | --pipeline_config_path ~/TF_ObjectDetection_Flow/models/model/train/pipeline.config \ 307 | --trained_checkpoint_prefix ~/TF_ObjectDetection_Flow/models/model/model.ckpt-120 \ 308 | --output_directory ~/TF_ObjectDetection_Flow/models/model/res 309 | ``` 310 | 311 | The frozen graph would be conserved under path `~/TF_ObjectDetection_Flow/models/model/res`. 312 | 313 | 314 | 315 | Reference: https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/exporting_models.md 316 | 317 | 318 | 319 | ## Model Usage 320 | 321 | 322 | 323 | After you generate a frozen model (`frozen_inference_graph.pb`). You can simply edit and run the notebook https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb to test and implement the model. 324 | 325 | You can also view the notebook ([ipynb](object_detection_demo.ipynb)) to do inference on an image. 326 | 327 | ![](./usr/object_detection.png) 328 | 329 | 330 | 331 | You can also further deploy the model to the cloud-AI API server (https://github.com/jiankaiwang/scai) or to the mobile or to the development board. 332 | 333 | -------------------------------------------------------------------------------- /Vott2TFRecordAndTFLabel_multiple.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | # author: jiankaiwang (https://jiankaiwang.no-ip.biz/) 3 | 4 | import json 5 | import os 6 | import tensorflow as tf 7 | import cv2 8 | from object_detection.utils import dataset_util 9 | import codecs 10 | import math 11 | import argparse 12 | 13 | # global variable 14 | labelDir = os.path.join('.','raw','origindata') 15 | outputFilePath = os.path.join('.','data') 16 | outputLabelFile = os.path.join(outputFilePath, 'image_label.pbtxt') 17 | outputTrainTFRecordFile = os.path.join(outputFilePath, 'train.tfrecords') 18 | outputEvalTFRecordFile = os.path.join(outputFilePath, 'eval.tfrecords') 19 | TRAIN_VALIDARION_RATIO = 0.8 # TRAIN:VALIDARION = 8:2 20 | 21 | # private variable 22 | __allTages = [] 23 | __trainLabels = [] 24 | __evalLabels = [] 25 | 26 | class EXAMPLE: 27 | height = 0 28 | width = 0 29 | filename = "" 30 | image_format = b'jpg' 31 | xmins = [] 32 | xmaxs = [] 33 | ymins = [] 34 | ymaxs = [] 35 | classes_text = [] 36 | classes = [] 37 | 38 | def create_tf_example(example, imgInByte): 39 | height = example.height # Image height 40 | width = example.width # Image width 41 | filename = example.filename # Filename of the image. Empty if image is not from file 42 | encoded_image_data = imgInByte # Encoded image bytes 43 | image_format = example.image_format # b'jpeg' or b'png' 44 | 45 | xmins = example.xmins # List of normalized left x coordinates in bounding box (1 per box) 46 | xmaxs = example.xmaxs # List of normalized right x coordinates in bounding box 47 | # (1 per box) 48 | ymins = example.ymins # List of normalized top y coordinates in bounding box (1 per box) 49 | ymaxs = example.ymaxs # List of normalized bottom y coordinates in bounding box 50 | # (1 per box) 51 | classes_text = example.classes_text # List of string class name of bounding box (1 per box) 52 | classes = example.classes # List of integer class id of bounding box (1 per box) 53 | 54 | tf_example = tf.train.Example(features=tf.train.Features(feature={\ 55 | 'image/height': dataset_util.int64_feature(height) \ 56 | , 'image/width': dataset_util.int64_feature(width) \ 57 | , 'image/filename': dataset_util.bytes_feature(filename) \ 58 | , 'image/source_id': dataset_util.bytes_feature(filename) \ 59 | , 'image/encoded': dataset_util.bytes_feature(encoded_image_data) \ 60 | , 'image/format': dataset_util.bytes_feature(image_format) \ 61 | , 'image/object/bbox/xmin': dataset_util.float_list_feature(xmins) \ 62 | , 'image/object/bbox/xmax': dataset_util.float_list_feature(xmaxs) \ 63 | , 'image/object/bbox/ymin': dataset_util.float_list_feature(ymins) \ 64 | , 'image/object/bbox/ymax': dataset_util.float_list_feature(ymaxs) \ 65 | , 'image/object/class/text': dataset_util.bytes_list_feature(classes_text) \ 66 | , 'image/object/class/label': dataset_util.int64_list_feature(classes) \ 67 | })) 68 | return tf_example 69 | 70 | def getAllFileList(getVottPath): 71 | filenames = next(os.walk(getVottPath))[2] 72 | return filenames 73 | 74 | def getJsonFile(getFileName): 75 | tmpContent = "" 76 | with codecs.open(os.path.join(labelDir, getFileName), 'r') as fin: 77 | for line in fin: 78 | tmpContent += line.strip() 79 | return json.loads(tmpContent) 80 | 81 | def conserveTagName(getTagList): 82 | for tags in getTagList: 83 | if tags not in __allTages: 84 | __allTages.append(tags) 85 | 86 | def writePbtxt(outputLabelFile): 87 | with codecs.open(outputLabelFile, 'w', 'utf-8') as fout: 88 | for item in __allTages: 89 | fout.write("item {\r\n id: " + str(__allTages.index(item) + 1) \ 90 | + '\r\n name: \'' + item + "\'\r\n}\r\n") 91 | 92 | def parseExampleObject(img_height, img_width, filename, fileformat\ 93 | , xmins, xmaxs, ymins, ymaxs, classes_text, classes): 94 | global __allTages, labelDir 95 | egObj = EXAMPLE() 96 | egObj.height = img_height 97 | egObj.width = img_width 98 | egObj.filename = filename 99 | egObj.image_format = fileformat 100 | egObj.xmins = xmins 101 | egObj.xmaxs = xmaxs 102 | egObj.ymins = ymins 103 | egObj.ymaxs = ymaxs 104 | egObj.classes_text = classes_text 105 | egObj.classes = classes 106 | return egObj 107 | 108 | def getImgEncode(getPath): 109 | # you have to use tf.gfile.FastGFile to encode 110 | # or error message: [Unable to decode bytes as JPEG, PNG, GIF, or BMP] 111 | image_data = tf.gfile.FastGFile(getPath, 'rb').read() 112 | return image_data 113 | 114 | def indexTrainValidate(ttlFileCount): 115 | global TRAIN_VALIDARION_RATIO 116 | evalTtlCount = math.ceil(ttlFileCount*(1.0-TRAIN_VALIDARION_RATIO)) 117 | return 0, ttlFileCount-evalTtlCount 118 | 119 | def sepTrainValidateData(getJsonContent, example, isTrain=True): 120 | visitedFrameNames = getJsonContent['visitedFrameNames'] 121 | 122 | # get trained and validated index 123 | train_start, eval_start = indexTrainValidate(len(visitedFrameNames)) 124 | if not isTrain: 125 | train_start, eval_start = eval_start, len(visitedFrameNames) 126 | 127 | # get trained and validated index 128 | for frames_index in range(train_start, eval_start, 1): 129 | # check to have labeling data 130 | if str(frames_index) not in getJsonContent['frames'].keys(): 131 | print("Error: Frames {} has no key {} in output {}.".format(\ 132 | visitedFrameNames[frames_index], frames_index, example)) 133 | continue 134 | 135 | # get original image height, width, channels 136 | crtFileName = os.path.join(\ 137 | labelDir\ 138 | , example.split('.')[0]\ 139 | , visitedFrameNames[frames_index]) 140 | 141 | try: 142 | img_height, img_width, img_channel = cv2.imread(crtFileName).shape 143 | except: 144 | print("Error: Parsing image {} is error.".format(crtFileName)) 145 | continue 146 | 147 | # start to parse labeling data 148 | labelData = getJsonContent['frames'][str(frames_index)] 149 | xmins = [] 150 | xmaxs = [] 151 | ymins = [] 152 | ymaxs = [] 153 | classes_text = [] 154 | classes = [] 155 | for eachLabeling in range(0, len(labelData), 1): 156 | labelInfo = labelData[eachLabeling] 157 | normalized_width = float(labelInfo["width"]) 158 | normalized_height = float(labelInfo["height"]) 159 | # feed the normalized value into list 160 | x1 = float(labelInfo["x1"]) / normalized_width 161 | x2 = float(labelInfo["x2"]) / normalized_width 162 | y1 = float(labelInfo["y1"]) / normalized_height 163 | y2 = float(labelInfo["y2"]) / normalized_height 164 | xmins.append(x1) 165 | xmaxs.append(x2) 166 | ymins.append(y1) 167 | ymaxs.append(y2) 168 | classes_text.append(str.encode(labelInfo["tags"][0])) 169 | classes.append(__allTages.index(labelInfo["tags"][0]) + 1) 170 | filename = str.encode(visitedFrameNames[frames_index]) 171 | fileformat = str.encode(visitedFrameNames[frames_index].split('.')[1]) 172 | 173 | if isTrain: 174 | __trainLabels.append(create_tf_example(\ 175 | parseExampleObject(img_height, img_width\ 176 | , filename, fileformat\ 177 | , xmins, xmaxs, ymins, ymaxs, classes_text, classes) \ 178 | , getImgEncode(crtFileName))) 179 | else: 180 | __evalLabels.append(create_tf_example(\ 181 | parseExampleObject(img_height, img_width\ 182 | , filename, fileformat\ 183 | , xmins, xmaxs, ymins, ymaxs, classes_text, classes) \ 184 | , getImgEncode(crtFileName))) 185 | 186 | 187 | def prepareExampleList(): 188 | allJsonFiles = getAllFileList(labelDir) 189 | for example in allJsonFiles: 190 | # get all tages 191 | conserveTagName(getJsonFile(example)['inputTags'].split(',')) 192 | # write out the pbtxt 193 | writePbtxt(outputLabelFile) 194 | 195 | # get all file information 196 | getJsonContent = getJsonFile(example) 197 | sepTrainValidateData(getJsonContent, example, isTrain=True) 198 | 199 | if not math.isclose(1.0, TRAIN_VALIDARION_RATIO, rel_tol=1e-5): 200 | sepTrainValidateData(getJsonContent, example, isTrain=False) 201 | 202 | def checkOutputPath(): 203 | global outputFilePath 204 | if not os.path.isdir(outputFilePath): 205 | try: 206 | os.mkdir(outputFilePath) 207 | return 0 208 | except: 209 | return 1 210 | else: 211 | return 0 212 | 213 | def main(_): 214 | prepareExampleList() 215 | 216 | # write training tfrecords 217 | with tf.python_io.TFRecordWriter(outputTrainTFRecordFile) as writer: 218 | for i in range(0, len(__trainLabels), 1): 219 | writer.write(__trainLabels[i].SerializeToString()) 220 | 221 | if not math.isclose(1.0, TRAIN_VALIDARION_RATIO, rel_tol=1e-5): 222 | # write evaluating tfrecords 223 | with tf.python_io.TFRecordWriter(outputEvalTFRecordFile) as writer: 224 | for i in range(0, len(__evalLabels), 1): 225 | writer.write(__evalLabels[i].SerializeToString()) 226 | 227 | if __name__ == '__main__': 228 | parser = argparse.ArgumentParser() 229 | parser.add_argument(\ 230 | '--labeldir',\ 231 | type=str,\ 232 | default=os.path.join('.','raw','origindata'),\ 233 | help='labelling directory'\ 234 | ) 235 | parser.add_argument(\ 236 | '--outputfilepath',\ 237 | type=str,\ 238 | default=os.path.join('.','data'),\ 239 | help='output file for labelling data and tfrecords') 240 | 241 | #TRAIN_VALIDARION_RATIO = 0.8 # TRAIN:VALIDARION = 8:2 242 | parser.add_argument(\ 243 | '--trainevalratio',\ 244 | type=float,\ 245 | default=0.8,\ 246 | help='the ratio for train:evalution') 247 | FLAGS, unparsed = parser.parse_known_args() 248 | 249 | labelDir = FLAGS.labeldir 250 | outputFilePath = FLAGS.outputfilepath 251 | outputLabelFile = os.path.join(outputFilePath, 'image_label.pbtxt') 252 | outputTrainTFRecordFile = os.path.join(outputFilePath, 'train.tfrecords') 253 | outputEvalTFRecordFile = os.path.join(outputFilePath, 'eval.tfrecords') 254 | TRAIN_VALIDARION_RATIO = FLAGS.trainevalratio 255 | 256 | if checkOutputPath() == 0: 257 | tf.app.run() 258 | else: 259 | print("Error: Can not find or create the folder {}.".format(outputFilePath)) 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | --------------------------------------------------------------------------------