├── LICENSE.txt ├── README.md ├── data ├── README.md ├── correct_coordinates.csv ├── correct_train.csv └── train.csv ├── dataset.py ├── inference.py ├── models ├── __init__.py ├── model_cnet.py └── model_countception.py ├── mytransforms.py ├── scripts ├── other │ ├── README.md │ ├── __notebook__.ipynb │ └── script.py └── preprocess.py ├── setup.py ├── train.py ├── utils.py └── utils_cython.pyx /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Pytorch Counting Models for Kaggle Sea Lion Count Challenge 2 | 3 | ## Overview 4 | With less than two weeks remaining, I decided to jump into the Kaggle Sea Lion count competition (https://www.kaggle.com/c/noaa-fisheries-steller-sea-lion-population-count) and see if I could get results implementing a few CNN based counting models I'd been reading about. 5 | 6 | Basically an excuse to try Pytorch and experiment with some new models. Most of my other NN hacking has been in Tensorflow, Torch, or Theano. 7 | 8 | As far as the competition is concerned, these models were a fail. I'm not convinced they couldn't work but I didn't have time to find appropriate hyper parameters, tweak the models, or fix issues in my code to produce anything reasonable. 9 | 10 | 11 | I implemented two models 12 | * Count-ception -- From (https://arxiv.org/abs/1703.08710) 13 | * Count-net -- My own mashup of (https://arxiv.org/pdf/1705.10118.pdf) and (https://www.robots.ox.ac.uk/~vgg/publications/2015/Xie15/weidi15.pdf) 14 | 15 | I wanted to give the FCRN described in https://www.robots.ox.ac.uk/~vgg/publications/2015/Xie15/weidi15.pdf a shot, but the layer description in the paper between conv4 and FC was vague and I ran out of time. 16 | 17 | What's working: 18 | * Kaggle Sealion patch based data processing pipeline with augmentation 19 | * Density or redundant count-ception target generation 20 | * Model training (loss curve looks reasonable) 21 | * Inference (submission generation) 22 | 23 | What's not: 24 | * Good results. Both models train but the appropriate features for sea lion counting and category discrimination do not appear to be learned. Counts are way off. Doing regression across multiple categories of similar looking objects is likely making this a very challenging objective. 25 | * Validation 26 | 27 | 28 | ## Examples 29 | 30 | Train: 31 | 32 | python train.py /data/sealion/Train-processed/ --batch-size 8 --num-processes 4 --num-gpu 2 --lr 0.001 --opt adadelta --model cc --loss l1 33 | 34 | Inference: 35 | 36 | python inference.py /data/x/sealion/Test/ --batch-size 8 --num-processes 4 --restore-checkpoint output/train/20170625-200215/checkpoint-1.pth.tar 37 | 38 | 39 | Build 'utils_cython' module for overlapping patch merge: 40 | 41 | setup.py build_ext --inplace 42 | 43 | -------------------------------------------------------------------------------- /data/README.md: -------------------------------------------------------------------------------- 1 | Corrected coordinates and sealion counts (correct_coordinates.csv and correct_train.csv) sourced from: 2 | * https://github.com/LivingProgram/kaggle-sea-lion-data 3 | * https://www.kaggle.com/c/noaa-fisheries-steller-sea-lion-population-count/discussion/32857 4 | 5 | Thanks! -------------------------------------------------------------------------------- /data/correct_train.csv: -------------------------------------------------------------------------------- 1 | train_id,adult_males,subadult_males,adult_females,juveniles,pups 2 | 0,62,12,486,42,344 3 | 1,2,20,0,12,0 4 | 2,2,0,37,19,0 5 | 4,6,9,2,0,0 6 | 5,6,4,14,4,19 7 | 6,2,5,20,18,0 8 | 8,9,5,76,4,51 9 | 10,5,4,7,5,0 10 | 11,3,5,36,13,0 11 | 12,8,9,13,1,7 12 | 13,1,5,20,13,0 13 | 14,1,1,0,27,0 14 | 15,2,3,33,56,0 15 | 16,1,1,32,35,0 16 | 17,4,4,60,18,0 17 | 18,2,3,0,0,0 18 | 19,7,3,34,24,1 19 | 20,7,21,31,4,1 20 | 22,1,1,0,5,0 21 | 23,1,0,1,6,0 22 | 24,4,2,0,0,0 23 | 25,1,2,0,0,0 24 | 26,0,8,0,4,0 25 | 27,1,4,0,8,0 26 | 28,2,0,0,0,0 27 | 29,4,4,44,15,0 28 | 31,3,2,20,76,1 29 | 32,18,6,159,18,111 30 | 33,0,1,18,8,0 31 | 35,4,1,0,0,0 32 | 36,8,17,0,0,0 33 | 37,1,2,5,0,0 34 | 38,3,0,33,0,0 35 | 39,10,14,62,10,15 36 | 40,2,2,62,7,0 37 | 41,15,0,85,18,59 38 | 42,7,4,10,1,0 39 | 43,28,4,338,47,189 40 | 44,3,2,25,15,0 41 | 45,4,7,100,27,0 42 | 46,1,4,0,0,0 43 | 47,13,14,48,3,33 44 | 48,5,10,66,24,0 45 | 49,0,0,4,15,0 46 | 50,1,0,0,0,0 47 | 51,9,13,10,35,0 48 | 52,2,3,20,23,0 49 | 53,2,1,15,6,0 50 | 54,2,5,22,26,0 51 | 55,6,2,59,74,0 52 | 56,9,3,91,5,93 53 | 57,2,1,0,0,0 54 | 58,38,17,143,71,145 55 | 59,6,4,16,55,1 56 | 60,5,4,0,0,0 57 | 61,0,0,1,0,0 58 | 62,6,2,221,28,1 59 | 63,7,22,58,15,59 60 | 64,5,5,6,0,0 61 | 65,29,8,267,14,270 62 | 66,9,5,23,17,2 63 | 67,9,3,30,4,25 64 | 68,4,3,3,0,0 65 | 69,4,2,46,31,14 66 | 70,9,11,15,636,0 67 | 72,13,3,56,37,20 68 | 73,3,0,20,6,7 69 | 74,1,2,0,4,0 70 | 75,3,1,0,34,0 71 | 76,5,3,0,0,0 72 | 77,19,11,136,37,83 73 | 78,2,2,6,0,5 74 | 79,11,5,37,0,26 75 | 80,0,12,0,19,0 76 | 82,0,0,37,17,0 77 | 83,5,2,45,41,0 78 | 84,1,1,0,12,0 79 | 85,0,1,0,0,0 80 | 86,2,2,4,2,2 81 | 87,7,1,43,16,0 82 | 88,1,2,1,24,0 83 | 90,1,0,17,12,0 84 | 91,17,0,229,1,156 85 | 92,3,5,1,1,1 86 | 93,23,21,144,100,62 87 | 94,4,4,17,0,0 88 | 95,5,0,19,58,0 89 | 96,9,3,167,5,0 90 | 98,0,6,0,0,0 91 | 99,3,1,11,68,0 92 | 100,12,3,50,4,27 93 | 101,10,0,85,56,0 94 | 102,13,0,92,3,73 95 | 103,3,0,0,0,0 96 | 104,3,3,38,43,0 97 | 105,2,0,32,13,3 98 | 106,0,4,0,5,0 99 | 107,7,11,3,1,2 100 | 108,3,4,0,3,0 101 | 109,4,2,35,4,27 102 | 110,16,8,95,13,79 103 | 111,23,9,123,40,80 104 | 112,11,20,387,190,0 105 | 113,0,9,10,3,0 106 | 114,5,7,12,0,9 107 | 115,0,13,0,4,0 108 | 116,0,0,4,9,0 109 | 117,0,1,1,0,0 110 | 118,1,1,10,43,0 111 | 119,7,7,19,2,11 112 | 120,2,0,5,1,3 113 | 121,6,1,90,7,67 114 | 122,11,43,37,14,0 115 | 123,0,0,1,0,0 116 | 124,19,8,104,84,46 117 | 125,6,1,36,29,8 118 | 126,35,14,127,7,117 119 | 127,1,9,15,64,0 120 | 128,11,22,53,43,0 121 | 129,0,3,0,36,0 122 | 130,13,30,43,17,0 123 | 131,5,11,25,73,2 124 | 132,2,2,0,0,0 125 | 133,1,1,2,2,0 126 | 134,3,2,14,6,0 127 | 135,1,0,10,4,0 128 | 136,1,9,3,21,0 129 | 137,6,3,0,0,0 130 | 138,1,0,25,12,0 131 | 139,2,4,9,0,8 132 | 140,1,2,0,0,0 133 | 141,5,28,0,16,0 134 | 142,1,1,3,5,0 135 | 143,0,1,0,0,0 136 | 144,8,0,18,0,20 137 | 145,4,9,60,70,0 138 | 146,4,1,24,23,5 139 | 147,6,4,44,4,26 140 | 148,0,3,0,5,0 141 | 149,2,5,0,2,0 142 | 150,0,2,0,0,0 143 | 152,5,4,0,0,0 144 | 153,12,27,0,0,0 145 | 154,6,13,62,25,0 146 | 155,23,11,114,5,64 147 | 156,2,1,11,0,0 148 | 157,1,1,15,0,0 149 | 158,20,0,90,3,87 150 | 159,0,6,0,0,0 151 | 160,12,5,199,65,30 152 | 161,2,5,5,0,0 153 | 162,4,1,58,37,0 154 | 163,11,8,89,15,72 155 | 164,32,15,248,14,169 156 | 165,2,5,4,3,0 157 | 166,5,5,56,25,0 158 | 167,1,0,0,0,0 159 | 168,6,4,5,101,0 160 | 169,4,2,35,14,11 161 | 170,10,0,82,4,67 162 | 171,0,23,3,37,0 163 | 172,2,0,6,23,0 164 | 173,4,0,6,6,0 165 | 174,4,0,35,0,29 166 | 175,2,0,4,17,0 167 | 176,17,9,92,6,56 168 | 177,2,10,0,8,0 169 | 178,2,0,1,0,0 170 | 179,0,5,0,1,0 171 | 180,3,1,7,4,0 172 | 181,6,6,48,37,2 173 | 182,1,2,13,10,0 174 | 183,6,7,31,28,0 175 | 185,8,1,13,0,8 176 | 186,3,4,112,146,0 177 | 187,54,14,592,8,434 178 | 188,1,3,10,19,0 179 | 189,64,40,382,23,135 180 | 190,1,0,2,6,0 181 | 191,1,5,0,0,0 182 | 192,1,0,0,0,0 183 | 193,2,1,29,15,0 184 | 194,1,0,0,0,0 185 | 195,4,0,63,24,0 186 | 196,12,8,106,11,64 187 | 197,7,3,80,6,62 188 | 198,3,2,0,0,0 189 | 199,1,0,0,0,0 190 | 200,36,3,56,2,5 191 | 201,9,2,44,13,33 192 | 202,3,0,11,23,0 193 | 203,8,8,43,59,2 194 | 204,4,8,1,6,1 195 | 205,14,1,135,10,68 196 | 206,0,5,1,19,1 197 | 207,0,2,0,2,0 198 | 208,4,3,0,0,0 199 | 209,4,9,0,1,0 200 | 210,2,9,1,15,0 201 | 211,19,4,185,20,243 202 | 212,4,6,16,0,0 203 | 213,11,12,26,44,6 204 | 214,4,0,18,5,5 205 | 216,5,0,28,0,33 206 | 217,16,4,140,12,142 207 | 218,0,2,0,1,0 208 | 219,11,7,37,39,0 209 | 220,0,1,0,0,0 210 | 221,6,1,26,9,2 211 | 222,2,0,0,0,0 212 | 223,0,0,1,0,0 213 | 224,12,1,58,3,68 214 | 225,0,1,22,6,0 215 | 226,17,8,207,3,144 216 | 227,5,1,52,111,0 217 | 228,1,9,61,38,0 218 | 229,1,1,0,1,0 219 | 230,0,0,16,5,0 220 | 231,7,4,7,1,1 221 | 232,9,2,21,1,16 222 | 233,1,0,0,0,0 223 | 235,29,3,277,99,135 224 | 236,3,0,29,44,0 225 | 237,0,1,29,38,0 226 | 238,3,3,8,1,3 227 | 239,4,10,8,17,0 228 | 240,25,8,161,23,114 229 | 241,3,0,101,54,0 230 | 243,4,0,0,0,0 231 | 244,1,1,3,13,0 232 | 245,7,2,74,29,8 233 | 246,5,3,24,31,1 234 | 247,3,1,34,18,0 235 | 248,4,0,64,34,0 236 | 249,4,8,13,38,0 237 | 250,2,4,2,0,0 238 | 251,1,0,5,31,0 239 | 252,21,2,105,15,67 240 | 253,2,0,0,0,0 241 | 254,2,9,7,1,6 242 | 255,0,3,0,0,0 243 | 256,15,0,92,53,59 244 | 257,1,11,23,53,9 245 | 258,19,11,350,265,1 246 | 259,3,3,32,20,0 247 | 260,3,1,8,51,0 248 | 261,3,4,29,9,2 249 | 262,3,0,34,31,5 250 | 263,12,2,57,32,23 251 | 264,0,0,3,3,0 252 | 265,1,5,0,0,0 253 | 266,4,0,39,35,0 254 | 267,14,8,62,14,44 255 | 269,3,2,0,0,0 256 | 270,3,0,21,29,0 257 | 271,30,7,288,34,217 258 | 272,0,2,0,4,0 259 | 273,14,3,296,7,250 260 | 274,1,0,0,0,0 261 | 275,0,1,25,40,0 262 | 276,6,3,37,0,0 263 | 277,5,6,68,102,0 264 | 278,5,0,18,19,2 265 | 279,1,0,8,4,0 266 | 280,12,5,19,3,29 267 | 281,3,3,51,20,0 268 | 282,0,3,3,0,0 269 | 283,4,7,26,12,0 270 | 284,2,0,16,15,0 271 | 285,0,2,0,0,0 272 | 286,2,2,0,0,0 273 | 287,10,8,1,0,0 274 | 288,4,0,12,15,1 275 | 289,0,2,38,15,0 276 | 291,39,20,275,23,237 277 | 292,5,5,49,42,1 278 | 293,6,4,38,2,31 279 | 294,1,1,2,5,0 280 | 295,16,6,51,29,26 281 | 296,1,6,0,2,0 282 | 297,3,36,24,39,0 283 | 298,22,10,172,18,183 284 | 299,27,9,209,32,55 285 | 300,6,3,7,11,0 286 | 301,6,3,67,5,35 287 | 302,1,0,0,34,0 288 | 303,1,2,0,25,0 289 | 304,2,3,0,82,0 290 | 305,1,2,3,0,0 291 | 306,14,4,71,45,17 292 | 307,8,0,17,6,17 293 | 308,0,2,4,2,0 294 | 309,5,0,61,23,8 295 | 310,4,8,95,30,0 296 | 312,1,1,21,14,0 297 | 313,5,3,83,30,0 298 | 314,1,2,0,0,0 299 | 315,1,2,0,0,0 300 | 316,5,0,0,1,0 301 | 317,1,2,34,16,0 302 | 318,2,9,33,42,1 303 | 319,17,12,145,68,49 304 | 320,16,4,227,39,11 305 | 321,2,3,0,1,0 306 | 322,8,3,63,22,10 307 | 323,3,0,1,0,1 308 | 324,2,3,12,33,0 309 | 325,0,2,2,79,0 310 | 326,1,3,69,80,0 311 | 327,0,2,4,0,0 312 | 328,6,4,18,4,13 313 | 329,18,3,72,6,49 314 | 330,1,0,25,66,0 315 | 332,10,4,57,27,21 316 | 333,0,15,0,74,0 317 | 334,9,14,21,39,9 318 | 335,6,36,18,12,0 319 | 336,10,2,77,18,27 320 | 337,0,3,4,86,0 321 | 338,37,7,275,41,248 322 | 339,2,2,5,0,0 323 | 340,21,4,181,23,145 324 | 341,0,1,0,1,0 325 | 342,3,0,21,16,0 326 | 343,2,1,4,2,3 327 | 345,1,0,16,2,0 328 | 346,7,1,46,39,0 329 | 347,10,55,2,23,0 330 | 348,0,2,0,17,0 331 | 349,24,29,21,11,0 332 | 350,4,13,0,0,0 333 | 351,1,0,0,0,0 334 | 352,2,1,0,0,0 335 | 353,5,4,78,29,0 336 | 354,2,1,31,4,0 337 | 355,0,4,0,1,0 338 | 356,0,0,1,0,0 339 | 357,4,1,0,0,0 340 | 358,0,3,0,38,0 341 | 359,10,4,51,12,46 342 | 360,2,4,23,77,0 343 | 361,27,1,146,38,54 344 | 362,18,1,127,4,97 345 | 363,1,7,4,53,1 346 | 364,6,0,24,0,45 347 | 365,3,6,0,0,0 348 | 366,1,1,2,2,0 349 | 367,15,4,99,4,55 350 | 368,6,0,37,3,34 351 | 369,8,41,0,24,0 352 | 370,9,4,43,7,16 353 | 371,15,2,62,5,49 354 | 372,29,6,136,33,82 355 | 373,12,6,69,54,6 356 | 374,0,0,36,43,0 357 | 375,3,0,0,0,0 358 | 376,2,3,24,4,0 359 | 377,8,5,27,2,29 360 | 378,5,1,43,1,0 361 | 379,4,7,23,45,5 362 | 381,1,0,0,0,0 363 | 382,1,0,12,22,0 364 | 383,8,8,70,124,0 365 | 385,2,2,0,1,0 366 | 386,4,1,15,8,10 367 | 387,2,0,0,0,0 368 | 388,1,0,12,24,0 369 | 389,0,1,42,14,0 370 | 390,4,5,4,2,3 371 | 391,7,3,8,0,8 372 | 392,2,0,15,19,0 373 | 393,7,6,33,2,21 374 | 394,1,2,0,0,0 375 | 395,2,0,25,15,0 376 | 396,1,11,0,2,0 377 | 397,2,1,2,0,0 378 | 398,0,4,0,11,0 379 | 399,6,13,30,70,0 380 | 400,3,1,7,0,0 381 | 401,8,8,33,32,16 382 | 402,6,11,300,71,0 383 | 403,1,17,22,195,2 384 | 404,2,0,12,1,6 385 | 405,1,2,4,7,0 386 | 407,5,2,4,0,0 387 | 408,9,5,45,2,34 388 | 409,4,0,13,6,1 389 | 410,1,2,0,0,0 390 | 411,17,0,329,345,0 391 | 412,5,7,104,61,0 392 | 413,8,2,0,0,0 393 | 414,3,7,10,13,0 394 | 415,4,0,20,2,0 395 | 416,2,34,7,340,0 396 | 417,6,2,4,0,0 397 | 418,10,6,140,150,0 398 | 419,3,1,0,0,0 399 | 420,8,6,13,4,16 400 | 422,10,0,93,19,90 401 | 423,0,4,0,4,0 402 | 424,1,0,34,18,0 403 | 425,0,1,14,16,0 404 | 426,2,6,11,42,5 405 | 427,1,3,4,28,0 406 | 428,1,0,1,1,0 407 | 429,3,18,19,25,0 408 | 430,4,0,10,6,1 409 | 431,0,2,5,8,0 410 | 432,4,8,23,23,0 411 | 433,0,3,5,43,0 412 | 434,7,3,21,101,0 413 | 435,0,4,1,19,0 414 | 436,2,1,0,0,0 415 | 437,30,1,218,4,192 416 | 438,15,6,169,0,123 417 | 439,2,3,21,16,0 418 | 440,2,13,4,7,0 419 | 441,1,0,8,15,0 420 | 442,1,2,5,23,0 421 | 443,4,1,12,35,0 422 | 444,1,4,0,0,0 423 | 445,1,0,11,16,0 424 | 446,9,1,62,19,32 425 | 447,0,4,0,2,0 426 | 448,1,1,2,1,1 427 | 449,1,6,0,0,0 428 | 450,1,0,0,0,0 429 | 451,2,3,44,29,0 430 | 452,35,7,207,9,100 431 | 453,2,1,9,20,0 432 | 454,2,5,22,35,1 433 | 455,3,0,49,17,2 434 | 456,3,3,5,0,0 435 | 457,2,1,5,13,0 436 | 458,1,9,7,6,0 437 | 459,15,7,58,3,30 438 | 460,1,1,22,20,0 439 | 461,1,2,17,40,0 440 | 462,14,6,60,6,58 441 | 463,6,0,57,7,7 442 | 464,0,1,0,16,0 443 | 465,10,2,94,171,0 444 | 466,3,1,9,3,7 445 | 467,1,4,0,0,0 446 | 468,8,14,39,10,0 447 | 470,0,0,1,1,0 448 | 471,4,2,7,4,0 449 | 472,6,4,45,15,0 450 | 473,2,0,19,60,0 451 | 474,1,2,7,26,0 452 | 476,9,0,42,39,0 453 | 477,9,5,50,16,14 454 | 478,8,9,49,13,44 455 | 479,5,4,0,0,0 456 | 480,3,2,30,9,1 457 | 481,3,2,4,45,0 458 | 482,7,1,44,6,33 459 | 483,1,2,3,30,0 460 | 484,6,40,0,5,0 461 | 485,10,6,45,35,0 462 | 486,3,2,0,1,1 463 | 487,3,0,0,0,0 464 | 488,6,11,145,78,0 465 | 489,2,0,2,16,0 466 | 491,0,1,9,4,0 467 | 492,2,1,9,21,1 468 | 493,0,21,0,65,0 469 | 494,24,44,72,19,53 470 | 495,2,0,20,2,20 471 | 496,0,2,5,0,4 472 | 497,1,2,0,0,0 473 | 498,0,1,1,19,0 474 | 500,2,6,0,1,0 475 | 501,2,0,2,3,0 476 | 502,6,3,59,32,0 477 | 503,0,5,6,31,0 478 | 504,2,0,0,3,0 479 | 505,1,8,0,41,0 480 | 506,0,0,9,21,0 481 | 508,0,0,4,1,0 482 | 509,23,10,86,63,43 483 | 510,5,1,0,0,0 484 | 511,3,4,16,27,0 485 | 512,1,4,0,0,0 486 | 513,7,0,8,2,7 487 | 514,6,3,260,109,0 488 | 515,0,8,2,1,0 489 | 516,13,3,92,14,61 490 | 517,2,1,4,0,0 491 | 518,4,7,59,30,0 492 | 519,6,3,44,17,13 493 | 520,6,2,21,0,0 494 | 521,1,0,7,5,0 495 | 522,5,3,67,43,5 496 | 523,16,4,64,13,16 497 | 524,4,2,1,1,0 498 | 525,3,0,9,35,0 499 | 526,4,10,20,19,13 500 | 527,2,0,29,6,1 501 | 528,8,0,30,4,24 502 | 529,5,2,15,13,0 503 | 532,2,0,0,0,0 504 | 533,2,2,0,2,0 505 | 534,0,1,0,0,0 506 | 535,7,0,81,36,7 507 | 536,15,1,52,4,44 508 | 537,1,4,2,8,0 509 | 538,10,2,163,9,115 510 | 539,1,5,0,0,0 511 | 540,22,21,40,11,1 512 | 541,4,1,12,2,0 513 | 542,3,2,0,0,0 514 | 543,3,2,0,6,0 515 | 544,4,6,8,2,0 516 | 545,3,14,36,21,0 517 | 546,3,1,16,47,0 518 | 547,2,2,18,34,0 519 | 548,3,0,5,4,0 520 | 549,1,4,31,0,0 521 | 550,2,0,11,14,0 522 | 551,1,0,13,13,0 523 | 552,2,3,52,46,0 524 | 553,5,3,3,1,0 525 | 554,1,8,9,1,1 526 | 555,3,3,20,28,0 527 | 556,13,17,133,32,102 528 | 557,0,3,0,3,0 529 | 558,14,3,87,21,39 530 | 559,2,2,29,41,0 531 | 560,1,2,0,38,0 532 | 561,3,0,33,42,0 533 | 562,3,4,31,68,0 534 | 563,6,5,68,6,88 535 | 564,1,0,0,0,0 536 | 565,0,0,6,8,0 537 | 566,2,2,0,0,0 538 | 567,5,3,66,30,1 539 | 568,4,0,9,17,2 540 | 569,19,9,113,24,105 541 | 570,2,3,0,0,0 542 | 571,0,0,30,13,0 543 | 572,0,0,11,9,0 544 | 573,2,5,0,0,0 545 | 574,4,2,21,9,7 546 | 575,2,1,19,3,0 547 | 576,3,12,24,24,0 548 | 577,3,1,116,97,0 549 | 578,6,3,26,0,16 550 | 579,0,0,24,33,0 551 | 580,3,5,0,0,0 552 | 581,21,10,130,15,16 553 | 582,1,4,0,0,0 554 | 583,2,2,3,1,0 555 | 584,7,0,4,0,3 556 | 585,2,1,4,16,0 557 | 586,3,0,1,5,1 558 | 587,31,13,200,75,268 559 | 588,5,31,13,11,0 560 | 589,1,1,21,30,0 561 | 590,47,16,322,41,300 562 | 591,22,14,134,116,43 563 | 592,1,1,0,0,0 564 | 593,1,2,32,58,0 565 | 594,0,1,1,0,0 566 | 595,2,0,28,14,0 567 | 596,5,4,30,47,0 568 | 597,0,0,1,0,0 569 | 598,2,1,32,15,0 570 | 599,3,8,1,5,0 571 | 600,0,1,19,27,0 572 | 601,9,4,38,92,0 573 | 602,3,17,0,0,0 574 | 603,2,3,1,0,0 575 | 604,12,4,105,204,2 576 | 606,15,5,288,11,211 577 | 608,2,0,12,24,0 578 | 609,1,5,0,0,0 579 | 610,5,5,17,12,0 580 | 611,2,1,32,13,0 581 | 612,3,0,1,0,0 582 | 613,4,6,15,2,1 583 | 615,2,1,35,22,0 584 | 616,8,18,79,25,0 585 | 617,14,12,88,47,38 586 | 618,5,23,98,25,0 587 | 619,6,2,27,5,12 588 | 620,5,6,20,1,21 589 | 622,6,2,32,1,38 590 | 623,6,0,0,0,0 591 | 624,7,6,38,1,20 592 | 625,28,5,149,11,129 593 | 626,1,0,0,0,0 594 | 627,1,1,0,0,0 595 | 628,11,4,18,1,0 596 | 629,10,9,75,1,57 597 | 630,2,0,0,0,0 598 | 631,2,2,0,0,0 599 | 632,4,3,1,6,0 600 | 633,5,1,39,15,23 601 | 634,1,0,0,0,0 602 | 635,3,1,20,7,0 603 | 636,2,2,34,2,0 604 | 637,6,18,85,17,40 605 | 639,4,12,31,81,2 606 | 640,8,3,33,8,0 607 | 641,1,2,70,102,0 608 | 642,1,9,0,0,0 609 | 643,1,5,0,21,0 610 | 645,10,7,73,50,67 611 | 646,1,1,11,3,0 612 | 647,1,2,0,1,0 613 | 648,5,0,56,0,41 614 | 649,2,0,19,49,0 615 | 650,2,25,0,13,0 616 | 651,1,7,11,26,0 617 | 652,0,3,0,0,0 618 | 653,2,3,0,18,0 619 | 654,1,3,0,8,0 620 | 655,5,3,21,1,17 621 | 656,4,6,14,7,9 622 | 657,0,1,0,0,0 623 | 658,5,8,0,0,0 624 | 659,1,1,21,25,0 625 | 660,1,0,1,4,0 626 | 661,2,3,17,49,0 627 | 662,8,6,245,20,0 628 | 663,1,4,9,11,0 629 | 664,18,15,129,67,71 630 | 665,0,9,0,2,0 631 | 666,0,4,4,14,0 632 | 667,4,1,1,0,1 633 | 668,0,3,4,32,0 634 | 669,5,4,16,0,0 635 | 670,0,4,21,19,0 636 | 671,1,1,11,58,0 637 | 672,5,25,0,10,0 638 | 673,1,0,0,16,0 639 | 674,20,3,112,23,118 640 | 675,4,1,5,0,3 641 | 676,16,10,43,6,21 642 | 677,7,6,6,4,2 643 | 678,3,7,19,45,0 644 | 679,15,1,94,16,83 645 | 680,17,5,160,8,162 646 | 681,2,1,24,9,0 647 | 682,1,2,6,9,0 648 | 683,1,2,3,2,0 649 | 684,17,4,89,45,54 650 | 685,6,12,0,0,0 651 | 686,3,34,0,59,0 652 | 688,0,8,0,13,0 653 | 689,1,0,1,0,0 654 | 690,5,5,34,7,28 655 | 691,1,1,0,0,0 656 | 692,0,0,15,45,0 657 | 693,3,10,7,2,0 658 | 694,3,1,5,1,0 659 | 695,1,0,2,0,2 660 | 696,2,4,0,2,0 661 | 697,1,1,2,0,0 662 | 698,1,0,0,14,0 663 | 699,18,13,39,2,14 664 | 700,9,4,81,7,23 665 | 701,12,10,91,19,158 666 | 702,8,4,25,10,10 667 | 703,7,0,38,52,0 668 | 704,26,6,167,25,105 669 | 705,3,0,19,38,2 670 | 706,2,4,39,18,0 671 | 707,4,21,1,7,0 672 | 708,3,7,0,3,0 673 | 709,1,2,19,50,0 674 | 710,6,3,0,0,0 675 | 711,4,7,5,3,0 676 | 713,2,3,10,5,0 677 | 714,13,2,85,7,21 678 | 715,9,1,57,7,35 679 | 716,5,5,53,30,0 680 | 717,1,4,16,2,1 681 | 718,13,3,39,1,42 682 | 719,1,3,15,10,0 683 | 720,8,1,51,7,24 684 | 722,19,6,164,7,109 685 | 723,8,0,41,5,45 686 | 724,6,39,4,85,0 687 | 725,6,2,68,100,0 688 | 726,3,4,3,19,0 689 | 727,13,6,103,18,66 690 | 728,13,4,104,11,77 691 | 729,1,4,3,28,0 692 | 730,1,3,0,0,0 693 | 731,4,0,12,0,8 694 | 732,4,6,0,0,0 695 | 733,1,0,0,0,0 696 | 734,1,1,0,0,0 697 | 735,0,3,9,0,0 698 | 736,9,36,0,2,0 699 | 737,2,3,0,0,0 700 | 738,1,1,0,8,0 701 | 739,4,3,1,0,1 702 | 740,9,4,20,8,2 703 | 741,33,5,228,38,176 704 | 742,6,1,23,6,14 705 | 743,12,5,113,13,91 706 | 744,6,3,51,497,0 707 | 745,32,10,269,30,183 708 | 746,1,0,11,19,0 709 | 747,17,6,251,17,225 710 | 748,0,5,0,0,0 711 | 749,6,0,24,8,2 712 | 750,3,1,9,5,0 713 | 751,0,4,0,0,0 714 | 752,6,1,16,6,12 715 | 753,8,1,35,19,18 716 | 754,2,0,19,29,0 717 | 755,5,17,51,139,0 718 | 756,0,0,20,34,0 719 | 757,1,0,0,0,0 720 | 758,3,0,12,85,0 721 | 759,18,2,144,13,119 722 | 760,3,15,0,7,0 723 | 761,9,19,53,58,0 724 | 762,2,2,2,0,0 725 | 763,0,1,0,72,0 726 | 764,4,4,26,2,6 727 | 765,0,1,20,19,0 728 | 766,1,1,1,0,0 729 | 768,2,1,0,0,0 730 | 769,12,0,161,15,130 731 | 770,1,4,24,5,0 732 | 771,7,1,113,28,0 733 | 772,0,1,0,1,0 734 | 773,28,8,213,5,138 735 | 774,20,0,139,12,114 736 | 775,3,4,51,4,49 737 | 776,8,2,25,2,29 738 | 777,2,3,0,0,0 739 | 778,2,3,0,1,0 740 | 780,0,0,3,3,0 741 | 782,0,3,95,22,0 742 | 783,17,8,88,18,10 743 | 784,11,5,211,211,0 744 | 785,41,8,288,22,230 745 | 786,1,2,0,0,0 746 | 787,3,4,0,10,0 747 | 788,3,18,12,5,0 748 | 789,3,2,23,1,18 749 | 790,7,2,40,4,36 750 | 791,4,4,37,3,26 751 | 792,10,0,49,1,42 752 | 793,11,5,8,0,0 753 | 795,3,0,7,0,0 754 | 796,1,0,1,8,0 755 | 797,1,0,0,0,0 756 | 798,2,1,0,0,0 757 | 799,4,12,13,74,3 758 | 801,9,3,56,30,57 759 | 802,11,6,48,54,12 760 | 803,21,10,259,40,151 761 | 804,6,1,56,52,0 762 | 805,12,1,60,14,31 763 | 806,5,11,14,3,10 764 | 807,7,0,57,7,56 765 | 808,1,0,0,0,0 766 | 809,3,0,48,43,1 767 | 810,15,4,68,28,20 768 | 812,3,0,16,3,0 769 | 813,1,3,6,20,0 770 | 814,1,6,0,5,0 771 | 815,5,3,2,4,0 772 | 816,3,0,35,33,0 773 | 817,4,5,26,39,0 774 | 818,1,0,4,7,0 775 | 819,2,0,0,0,0 776 | 820,4,3,44,4,41 777 | 821,1,0,3,2,0 778 | 822,30,10,305,11,210 779 | 823,2,7,29,12,0 780 | 824,2,4,0,0,0 781 | 825,3,1,0,1,0 782 | 826,0,2,3,0,0 783 | 827,4,7,4,0,0 784 | 828,15,9,48,4,35 785 | 829,4,3,15,9,8 786 | 830,2,10,4,32,0 787 | 831,3,2,6,1,0 788 | 832,0,4,0,18,0 789 | 833,22,3,226,43,215 790 | 834,0,7,0,1,0 791 | 835,4,6,1,2,0 792 | 836,2,3,3,1,0 793 | 837,1,0,0,0,0 794 | 838,11,28,2,5,0 795 | 841,3,6,0,26,0 796 | 842,2,4,6,4,0 797 | 843,3,0,6,4,0 798 | 844,9,7,5,4,0 799 | 845,8,1,9,17,0 800 | 846,1,1,13,3,0 801 | 847,27,3,227,46,151 802 | 848,1,1,5,23,0 803 | 849,6,1,24,5,10 804 | 850,6,13,13,0,0 805 | 851,1,2,10,8,0 806 | 852,6,27,3,10,3 807 | 853,0,4,0,0,0 808 | 854,1,2,0,1,0 809 | 855,3,2,24,78,0 810 | 856,8,4,14,5,0 811 | 857,0,0,0,0,0 812 | 858,4,12,20,67,0 813 | 859,3,6,8,33,0 814 | 860,3,2,20,11,7 815 | 861,4,6,110,0,0 816 | 862,4,2,18,99,1 817 | 863,1,1,0,0,0 818 | 864,14,2,207,20,172 819 | 865,0,0,0,1,0 820 | 866,2,0,4,14,0 821 | 867,0,2,5,11,0 822 | 868,5,11,62,94,0 823 | 870,2,0,7,0,0 824 | 871,23,10,193,43,172 825 | 872,7,11,132,36,0 826 | 873,1,0,30,76,0 827 | 874,18,7,118,47,117 828 | 875,4,8,60,14,0 829 | 876,1,0,0,0,0 830 | 877,1,3,7,3,0 831 | 878,1,0,16,36,0 832 | 879,0,0,28,75,0 833 | 880,2,0,9,23,0 834 | 881,44,16,173,58,117 835 | 883,1,3,0,0,0 836 | 884,3,0,59,13,7 837 | 885,3,0,21,12,0 838 | 886,2,0,30,0,15 839 | 887,1,2,0,0,0 840 | 888,1,0,5,4,0 841 | 889,8,0,32,4,14 842 | 890,3,0,10,3,0 843 | 891,7,0,27,5,22 844 | 892,16,6,144,3,124 845 | 893,1,3,30,35,8 846 | 894,2,2,41,39,0 847 | 895,3,2,52,15,7 848 | 896,4,0,0,0,0 849 | 897,0,4,21,11,0 850 | 898,10,4,79,6,45 851 | 899,2,2,3,3,0 852 | 900,42,5,681,26,529 853 | 902,0,2,0,0,0 854 | 904,1,1,0,0,0 855 | 906,4,1,18,7,2 856 | 907,3,3,32,55,0 857 | 908,4,2,0,0,0 858 | 910,3,16,0,1,0 859 | 911,4,5,149,50,0 860 | 912,30,2,248,13,205 861 | 914,3,23,15,115,0 862 | 915,0,0,2,5,0 863 | 916,1,3,12,13,0 864 | 917,10,8,31,109,0 865 | 918,4,2,22,7,3 866 | 919,5,3,41,64,3 867 | 920,11,2,280,212,2 868 | 921,1,0,25,52,0 869 | 922,4,4,7,6,0 870 | 923,8,2,106,11,86 871 | 924,23,3,131,8,124 872 | 925,3,2,18,59,0 873 | 926,2,0,3,31,0 874 | 928,1,0,0,22,0 875 | 929,3,0,23,50,0 876 | 930,8,0,36,8,21 877 | 931,3,2,20,17,0 878 | 932,5,8,21,67,0 879 | 933,6,7,68,69,21 880 | 934,4,2,36,16,0 881 | 935,0,1,0,0,0 882 | 936,1,0,1,2,0 883 | 937,4,9,26,10,0 884 | 938,0,0,4,0,0 885 | 939,16,3,100,8,66 886 | 940,2,4,3,74,0 887 | 941,6,2,46,1,37 888 | 942,1,6,0,37,0 889 | 943,1,1,24,26,0 890 | 944,1,4,1,0,0 891 | 945,6,0,79,6,45 892 | 947,6,4,29,6,24 893 | -------------------------------------------------------------------------------- /data/train.csv: -------------------------------------------------------------------------------- 1 | train_id,adult_males,subadult_males,adult_females,juveniles,pups 2 | 0,62,12,486,42,344 3 | 1,2,20,0,12,0 4 | 2,2,0,38,20,0 5 | 3,8,5,41,7,38 6 | 4,6,9,2,0,0 7 | 5,6,4,14,4,19 8 | 6,2,5,20,18,0 9 | 7,6,2,33,16,3 10 | 8,9,5,76,4,51 11 | 9,4,1,64,36,11 12 | 10,5,4,7,5,0 13 | 11,3,7,36,13,0 14 | 12,8,9,13,1,7 15 | 13,2,5,20,13,0 16 | 14,1,1,0,27,0 17 | 15,2,3,35,61,0 18 | 16,1,1,32,35,0 19 | 17,4,4,60,18,0 20 | 18,2,4,0,0,0 21 | 19,7,3,34,24,1 22 | 20,7,21,31,4,1 23 | 21,6,1,25,9,0 24 | 22,1,1,0,5,0 25 | 23,1,0,1,6,0 26 | 24,4,2,0,0,0 27 | 25,1,2,0,0,0 28 | 26,0,8,0,4,0 29 | 27,1,4,0,8,0 30 | 28,2,0,0,0,0 31 | 29,4,4,44,15,0 32 | 30,2,0,1,3,0 33 | 31,3,2,20,76,1 34 | 32,18,6,159,18,111 35 | 33,0,1,18,8,0 36 | 34,4,1,127,237,0 37 | 35,4,1,0,0,0 38 | 36,9,19,0,0,0 39 | 37,1,2,5,0,0 40 | 38,3,0,36,2,0 41 | 39,10,14,62,10,15 42 | 40,2,2,63,8,0 43 | 41,15,0,85,18,59 44 | 42,7,4,10,1,0 45 | 43,28,4,338,47,189 46 | 44,3,2,25,15,0 47 | 45,4,7,100,27,0 48 | 46,1,4,0,0,0 49 | 47,13,16,48,3,33 50 | 48,5,10,66,24,0 51 | 49,0,0,4,15,0 52 | 50,1,0,0,0,0 53 | 51,9,13,10,35,0 54 | 52,3,3,20,23,0 55 | 53,2,1,15,6,0 56 | 54,2,5,22,26,0 57 | 55,6,2,59,74,0 58 | 56,9,3,91,5,93 59 | 57,2,1,0,0,0 60 | 58,36,17,143,71,145 61 | 59,6,4,16,55,1 62 | 60,5,4,0,0,0 63 | 61,0,0,1,0,0 64 | 62,6,2,222,28,1 65 | 63,7,22,58,15,60 66 | 64,5,5,6,0,0 67 | 65,29,8,267,14,270 68 | 66,0,0,0,0,0 69 | 67,10,7,30,4,26 70 | 68,4,3,3,0,0 71 | 69,4,2,46,31,14 72 | 70,9,11,15,636,0 73 | 71,3,2,21,18,0 74 | 72,13,3,56,37,20 75 | 73,3,0,20,6,8 76 | 74,1,2,0,4,0 77 | 75,3,1,0,34,0 78 | 76,5,3,0,0,0 79 | 77,20,11,136,37,84 80 | 78,2,2,6,0,5 81 | 79,11,5,37,0,26 82 | 80,0,12,0,19,0 83 | 81,1,3,15,0,0 84 | 82,0,0,37,17,0 85 | 83,5,2,44,44,0 86 | 84,1,1,0,12,0 87 | 85,0,1,0,0,0 88 | 86,2,2,4,2,2 89 | 87,7,1,43,16,0 90 | 88,1,2,1,24,0 91 | 89,1,1,1,0,0 92 | 90,1,0,17,12,0 93 | 91,17,0,229,1,157 94 | 92,3,5,1,1,1 95 | 93,24,21,144,99,62 96 | 94,4,4,17,0,0 97 | 95,5,0,19,58,0 98 | 96,9,3,167,5,0 99 | 97,0,0,19,11,0 100 | 98,0,6,0,0,0 101 | 99,3,1,13,68,0 102 | 100,12,3,50,4,27 103 | 101,10,0,85,56,0 104 | 102,13,0,92,3,73 105 | 103,3,0,0,0,0 106 | 104,3,3,38,43,0 107 | 105,2,0,33,17,3 108 | 106,0,4,0,5,0 109 | 107,7,11,3,1,2 110 | 108,4,4,0,3,0 111 | 109,4,2,35,4,27 112 | 110,16,8,95,13,80 113 | 111,23,9,123,40,80 114 | 112,11,20,387,190,0 115 | 113,0,9,10,3,0 116 | 114,5,7,12,0,9 117 | 115,0,13,0,4,0 118 | 116,0,0,4,9,0 119 | 117,0,1,1,0,0 120 | 118,1,1,10,43,0 121 | 119,7,7,19,2,11 122 | 120,2,0,5,1,3 123 | 121,6,1,90,7,67 124 | 122,11,43,40,15,0 125 | 123,0,0,1,0,0 126 | 124,19,8,104,84,46 127 | 125,6,1,36,29,8 128 | 126,35,14,127,7,117 129 | 127,1,10,15,65,0 130 | 128,11,22,53,43,0 131 | 129,0,3,0,43,0 132 | 130,13,30,43,17,0 133 | 131,5,11,25,73,2 134 | 132,2,2,0,0,0 135 | 133,1,1,2,2,0 136 | 134,3,2,15,7,0 137 | 135,1,0,10,4,0 138 | 136,1,9,3,22,0 139 | 137,6,3,0,0,0 140 | 138,1,0,25,12,0 141 | 139,2,4,9,0,8 142 | 140,1,2,0,0,0 143 | 141,5,28,0,16,0 144 | 142,1,1,3,5,0 145 | 143,0,1,0,0,0 146 | 144,8,0,18,0,20 147 | 145,4,9,60,70,0 148 | 146,4,1,25,23,5 149 | 147,6,4,44,4,26 150 | 148,0,3,0,6,0 151 | 149,2,5,0,2,0 152 | 150,0,2,0,0,0 153 | 151,6,4,21,0,16 154 | 152,5,4,0,0,0 155 | 153,12,27,0,0,0 156 | 154,6,13,62,25,0 157 | 155,23,11,114,5,65 158 | 156,2,1,11,0,0 159 | 157,1,1,15,0,0 160 | 158,20,0,90,3,87 161 | 159,0,6,0,0,0 162 | 160,12,5,199,65,30 163 | 161,2,5,5,0,0 164 | 162,4,1,58,37,0 165 | 163,11,8,89,15,72 166 | 164,32,15,248,14,169 167 | 165,2,5,4,3,0 168 | 166,5,5,56,25,0 169 | 167,1,0,0,0,0 170 | 168,6,4,5,101,0 171 | 169,4,2,35,14,11 172 | 170,10,0,82,4,68 173 | 171,0,23,3,37,0 174 | 172,2,0,6,23,0 175 | 173,4,0,6,6,0 176 | 174,4,0,35,0,29 177 | 175,2,1,4,18,0 178 | 176,17,9,92,6,56 179 | 177,2,10,0,8,0 180 | 178,2,0,2,2,0 181 | 179,2,5,0,1,0 182 | 180,3,1,7,4,0 183 | 181,6,6,49,38,2 184 | 182,1,2,13,10,0 185 | 183,6,7,31,28,0 186 | 184,6,16,19,7,0 187 | 185,8,1,13,0,8 188 | 186,3,5,112,146,0 189 | 187,56,14,592,8,434 190 | 188,1,3,10,19,0 191 | 189,64,40,382,23,135 192 | 190,1,0,2,6,0 193 | 191,1,5,0,0,0 194 | 192,1,0,0,0,0 195 | 193,2,1,29,15,0 196 | 194,1,0,0,0,0 197 | 195,4,0,63,24,0 198 | 196,12,8,106,11,64 199 | 197,7,3,79,6,62 200 | 198,3,2,0,0,0 201 | 199,1,0,0,0,0 202 | 200,36,3,56,2,5 203 | 201,9,2,44,13,33 204 | 202,3,0,11,23,0 205 | 203,8,8,43,59,2 206 | 204,4,8,1,6,1 207 | 205,14,1,135,10,68 208 | 206,0,5,1,19,1 209 | 207,1,1,3,4,0 210 | 208,4,3,0,0,0 211 | 209,4,9,0,1,0 212 | 210,2,9,1,15,0 213 | 211,19,5,185,20,243 214 | 212,4,6,16,0,0 215 | 213,11,12,26,44,6 216 | 214,6,0,18,5,5 217 | 215,0,4,0,0,0 218 | 216,6,0,29,0,33 219 | 217,16,4,140,12,142 220 | 218,0,2,1,1,1 221 | 219,11,7,37,39,0 222 | 220,0,1,0,0,0 223 | 221,5,1,26,9,2 224 | 222,2,0,0,0,0 225 | 223,0,0,1,0,0 226 | 224,12,1,58,3,68 227 | 225,0,1,22,6,0 228 | 226,17,8,207,3,144 229 | 227,5,1,52,111,0 230 | 228,1,9,61,38,0 231 | 229,1,1,0,1,0 232 | 230,0,0,16,5,0 233 | 231,7,4,7,1,1 234 | 232,9,2,21,1,16 235 | 233,1,0,0,0,0 236 | 234,0,2,46,25,0 237 | 235,31,3,278,99,135 238 | 236,3,0,29,44,0 239 | 237,0,1,29,38,0 240 | 238,3,3,8,1,3 241 | 239,4,10,8,17,0 242 | 240,25,11,160,23,114 243 | 241,3,0,101,54,0 244 | 242,1,0,0,0,0 245 | 243,4,0,0,0,0 246 | 244,1,1,3,13,0 247 | 245,7,2,74,29,8 248 | 246,5,3,24,31,1 249 | 247,3,1,34,18,0 250 | 248,4,0,64,34,0 251 | 249,4,8,13,38,0 252 | 250,2,4,2,0,0 253 | 251,1,0,5,31,0 254 | 252,23,4,105,15,67 255 | 253,2,0,0,0,0 256 | 254,2,9,7,1,6 257 | 255,0,3,0,0,0 258 | 256,15,0,92,54,59 259 | 257,1,11,23,53,9 260 | 258,19,11,350,266,1 261 | 259,3,3,32,20,0 262 | 260,3,1,8,51,0 263 | 261,3,4,29,9,2 264 | 262,3,0,34,31,5 265 | 263,12,2,57,32,23 266 | 264,0,0,3,3,0 267 | 265,1,8,0,2,0 268 | 266,4,0,39,35,0 269 | 267,14,8,62,14,44 270 | 268,3,8,10,1,0 271 | 269,3,2,0,0,0 272 | 270,3,0,21,29,0 273 | 271,30,7,289,34,217 274 | 272,0,2,0,4,0 275 | 273,14,3,296,7,250 276 | 274,1,0,0,0,0 277 | 275,0,1,25,40,0 278 | 276,6,3,37,0,0 279 | 277,5,6,68,102,0 280 | 278,5,0,18,19,2 281 | 279,1,0,8,4,0 282 | 280,12,5,19,3,29 283 | 281,3,3,51,20,0 284 | 282,0,3,3,0,0 285 | 283,4,7,26,12,0 286 | 284,2,0,16,15,0 287 | 285,0,2,0,0,0 288 | 286,2,2,0,0,0 289 | 287,10,8,1,0,0 290 | 288,4,0,12,15,1 291 | 289,0,2,38,15,0 292 | 290,2,0,0,0,0 293 | 291,39,20,275,23,237 294 | 292,2,3,23,15,1 295 | 293,6,4,38,2,33 296 | 294,1,1,2,5,0 297 | 295,16,6,51,29,26 298 | 296,1,6,0,2,0 299 | 297,3,40,24,39,0 300 | 298,22,10,172,18,184 301 | 299,27,10,209,32,55 302 | 300,6,3,7,11,0 303 | 301,6,3,67,5,35 304 | 302,1,0,0,34,0 305 | 303,1,2,0,24,0 306 | 304,2,3,0,82,0 307 | 305,1,2,3,0,0 308 | 306,14,4,71,45,17 309 | 307,8,0,17,6,17 310 | 308,0,2,4,2,0 311 | 309,5,0,61,23,9 312 | 310,4,8,95,30,0 313 | 311,2,1,14,9,3 314 | 312,1,1,22,14,0 315 | 313,5,3,83,30,0 316 | 314,1,2,0,0,0 317 | 315,1,2,0,0,0 318 | 316,5,0,0,1,0 319 | 317,1,2,34,16,0 320 | 318,2,9,33,42,1 321 | 319,17,12,145,68,49 322 | 320,16,4,227,39,11 323 | 321,2,3,0,1,0 324 | 322,8,3,63,22,10 325 | 323,4,0,1,0,1 326 | 324,2,3,12,33,0 327 | 325,0,2,2,80,0 328 | 326,1,3,69,80,0 329 | 327,0,2,4,0,0 330 | 328,5,4,18,5,13 331 | 329,18,3,72,6,49 332 | 330,1,0,25,67,0 333 | 331,6,18,10,0,0 334 | 332,10,4,57,27,21 335 | 333,0,15,0,74,0 336 | 334,9,14,21,39,9 337 | 335,6,36,19,12,0 338 | 336,10,2,77,18,27 339 | 337,0,3,4,86,0 340 | 338,36,7,275,41,249 341 | 339,2,2,5,0,0 342 | 340,21,4,181,23,145 343 | 341,0,1,0,1,0 344 | 342,3,0,24,16,0 345 | 343,2,1,4,2,3 346 | 344,2,0,18,10,0 347 | 345,1,0,16,2,0 348 | 346,7,1,46,39,0 349 | 347,10,55,2,23,0 350 | 348,0,2,0,17,0 351 | 349,24,29,21,11,0 352 | 350,4,13,0,0,0 353 | 351,2,0,0,0,0 354 | 352,2,1,0,0,0 355 | 353,5,4,78,29,0 356 | 354,2,1,31,4,0 357 | 355,0,4,0,1,0 358 | 356,0,0,1,0,0 359 | 357,4,1,0,0,0 360 | 358,0,3,0,38,0 361 | 359,10,4,54,12,46 362 | 360,2,4,23,77,0 363 | 361,27,1,146,38,54 364 | 362,18,1,127,5,99 365 | 363,1,7,4,53,1 366 | 364,6,0,24,0,45 367 | 365,4,7,0,0,0 368 | 366,1,1,2,2,0 369 | 367,15,4,99,4,55 370 | 368,6,0,37,3,34 371 | 369,8,43,0,24,0 372 | 370,9,4,43,7,16 373 | 371,15,2,62,5,49 374 | 372,29,6,136,33,82 375 | 373,12,6,69,54,6 376 | 374,0,0,36,43,0 377 | 375,4,0,0,0,0 378 | 376,2,3,24,4,0 379 | 377,8,5,27,2,29 380 | 378,5,1,43,1,0 381 | 379,4,7,23,45,5 382 | 380,2,0,83,31,0 383 | 381,1,0,0,0,0 384 | 382,1,0,12,22,0 385 | 383,8,10,70,124,0 386 | 384,7,5,61,19,0 387 | 385,2,2,0,1,0 388 | 386,4,3,15,8,10 389 | 387,2,0,0,0,0 390 | 388,2,0,12,24,0 391 | 389,0,1,42,14,0 392 | 390,4,5,4,2,3 393 | 391,7,3,8,0,8 394 | 392,2,0,15,19,0 395 | 393,7,6,33,2,21 396 | 394,1,6,0,0,0 397 | 395,2,1,25,16,0 398 | 396,1,11,0,2,0 399 | 397,2,1,2,0,0 400 | 398,0,7,0,11,0 401 | 399,6,13,30,70,0 402 | 400,3,1,7,0,0 403 | 401,8,8,33,32,16 404 | 402,6,11,300,71,0 405 | 403,1,17,21,195,2 406 | 404,2,0,12,1,6 407 | 405,1,2,4,10,0 408 | 406,10,12,141,47,3 409 | 407,5,2,4,0,0 410 | 408,9,5,45,2,34 411 | 409,4,0,15,7,1 412 | 410,1,3,0,0,0 413 | 411,17,0,329,345,0 414 | 412,5,13,105,61,0 415 | 413,8,2,0,0,0 416 | 414,3,7,10,13,0 417 | 415,4,0,20,2,0 418 | 416,2,34,7,341,0 419 | 417,6,2,4,0,0 420 | 418,11,6,139,150,0 421 | 419,3,1,0,0,0 422 | 420,8,6,13,4,16 423 | 421,1,3,0,0,0 424 | 422,10,0,93,19,90 425 | 423,0,4,0,4,0 426 | 424,1,0,34,18,0 427 | 425,0,1,14,16,0 428 | 426,2,5,6,32,5 429 | 427,1,3,4,28,0 430 | 428,1,0,1,1,0 431 | 429,3,18,19,25,0 432 | 430,4,0,10,6,1 433 | 431,0,2,5,8,0 434 | 432,4,8,23,23,0 435 | 433,0,3,5,41,0 436 | 434,7,3,21,101,0 437 | 435,0,4,1,19,0 438 | 436,2,1,0,0,0 439 | 437,36,1,215,4,192 440 | 438,15,6,169,0,123 441 | 439,2,3,21,16,0 442 | 440,2,13,4,7,0 443 | 441,1,0,8,16,0 444 | 442,1,2,5,23,0 445 | 443,4,1,12,35,0 446 | 444,1,4,0,0,0 447 | 445,1,0,11,16,0 448 | 446,9,1,62,19,32 449 | 447,0,4,0,2,0 450 | 448,1,1,2,1,1 451 | 449,1,6,0,0,0 452 | 450,1,0,0,0,0 453 | 451,2,3,44,29,0 454 | 452,35,7,207,9,100 455 | 453,2,1,9,20,0 456 | 454,2,5,22,35,1 457 | 455,3,0,49,17,2 458 | 456,3,3,5,0,0 459 | 457,2,1,5,13,0 460 | 458,1,9,7,6,0 461 | 459,15,7,58,3,30 462 | 460,1,1,22,21,0 463 | 461,1,2,17,40,0 464 | 462,15,6,59,6,59 465 | 463,6,0,57,7,7 466 | 464,0,1,0,16,0 467 | 465,10,2,94,172,0 468 | 466,3,1,9,3,7 469 | 467,1,4,0,2,0 470 | 468,8,14,39,10,0 471 | 469,0,2,8,7,0 472 | 470,0,0,1,1,0 473 | 471,4,2,7,4,0 474 | 472,6,4,45,15,0 475 | 473,2,0,19,57,0 476 | 474,1,2,7,26,0 477 | 475,3,6,61,85,0 478 | 476,9,0,46,39,0 479 | 477,9,5,50,16,14 480 | 478,8,9,49,13,44 481 | 479,5,5,0,0,0 482 | 480,3,2,30,9,1 483 | 481,3,2,4,45,0 484 | 482,7,1,44,7,33 485 | 483,1,3,3,31,0 486 | 484,6,40,0,5,0 487 | 485,10,6,45,35,0 488 | 486,3,2,0,1,1 489 | 487,3,1,0,0,0 490 | 488,6,11,145,78,0 491 | 489,2,0,2,16,0 492 | 490,6,5,14,4,0 493 | 491,0,1,9,4,0 494 | 492,2,2,9,21,1 495 | 493,0,21,0,65,0 496 | 494,24,44,72,19,53 497 | 495,5,0,20,2,20 498 | 496,0,2,5,0,4 499 | 497,1,2,0,0,0 500 | 498,0,1,1,19,0 501 | 499,3,3,2,0,0 502 | 500,4,5,0,2,0 503 | 501,2,0,2,3,0 504 | 502,6,3,59,32,0 505 | 503,0,5,6,31,0 506 | 504,2,0,0,3,0 507 | 505,1,9,0,42,0 508 | 506,0,0,9,21,0 509 | 507,8,6,27,47,0 510 | 508,0,0,4,1,0 511 | 509,23,11,87,63,44 512 | 510,2,1,0,0,0 513 | 511,3,4,16,27,0 514 | 512,1,4,0,0,0 515 | 513,7,0,8,2,7 516 | 514,6,3,260,109,0 517 | 515,0,8,2,1,0 518 | 516,17,6,93,14,61 519 | 517,2,1,4,0,0 520 | 518,4,7,59,31,0 521 | 519,6,3,44,17,13 522 | 520,6,2,21,0,0 523 | 521,1,0,7,5,0 524 | 522,5,3,67,43,5 525 | 523,16,4,64,13,16 526 | 524,5,4,1,1,0 527 | 525,3,0,9,35,0 528 | 526,4,10,20,19,13 529 | 527,2,0,29,6,1 530 | 528,8,0,30,4,24 531 | 529,5,3,16,31,0 532 | 530,3,1,24,16,0 533 | 531,0,9,5,6,0 534 | 532,2,0,0,0,0 535 | 533,2,2,0,2,0 536 | 534,0,1,0,0,0 537 | 535,7,0,81,36,7 538 | 536,15,1,52,4,44 539 | 537,1,4,2,8,0 540 | 538,10,3,162,9,115 541 | 539,2,5,0,0,0 542 | 540,22,21,40,11,1 543 | 541,4,1,12,2,0 544 | 542,3,2,0,0,0 545 | 543,4,2,0,6,0 546 | 544,4,5,8,2,0 547 | 545,3,14,36,21,0 548 | 546,3,1,16,47,0 549 | 547,2,2,18,34,0 550 | 548,3,0,5,4,0 551 | 549,1,4,31,0,0 552 | 550,2,0,11,14,0 553 | 551,1,0,13,13,0 554 | 552,2,3,52,45,0 555 | 553,6,3,3,1,0 556 | 554,2,9,9,1,1 557 | 555,5,3,20,28,0 558 | 556,13,17,133,32,102 559 | 557,0,3,0,3,0 560 | 558,14,3,87,21,39 561 | 559,2,2,29,41,0 562 | 560,1,2,0,38,0 563 | 561,3,0,33,42,0 564 | 562,3,4,31,68,0 565 | 563,6,5,68,6,88 566 | 564,1,0,0,0,0 567 | 565,0,0,6,8,0 568 | 566,2,2,0,0,0 569 | 567,5,3,66,30,1 570 | 568,4,0,9,19,2 571 | 569,19,9,113,24,105 572 | 570,2,3,0,0,0 573 | 571,3,0,30,13,0 574 | 572,0,0,11,9,0 575 | 573,2,5,0,0,0 576 | 574,4,4,22,9,7 577 | 575,2,1,19,3,0 578 | 576,3,12,24,24,0 579 | 577,3,1,117,97,0 580 | 578,6,3,26,0,16 581 | 579,0,0,24,33,0 582 | 580,3,5,0,0,0 583 | 581,21,10,130,15,16 584 | 582,1,4,0,0,0 585 | 583,2,2,3,1,0 586 | 584,7,0,4,0,3 587 | 585,2,4,5,20,0 588 | 586,3,0,1,5,1 589 | 587,31,14,200,77,274 590 | 588,5,31,13,11,0 591 | 589,1,1,21,30,0 592 | 590,47,16,322,41,300 593 | 591,22,14,134,116,43 594 | 592,1,1,0,0,0 595 | 593,1,2,32,5,0 596 | 594,0,1,1,0,0 597 | 595,2,0,30,15,0 598 | 596,5,4,30,47,0 599 | 597,0,0,1,0,0 600 | 598,4,5,32,15,0 601 | 599,3,8,1,5,0 602 | 600,0,1,19,27,0 603 | 601,9,4,38,92,0 604 | 602,3,17,0,0,0 605 | 603,2,3,1,0,0 606 | 604,12,5,105,204,2 607 | 605,4,6,2,5,0 608 | 606,16,5,288,11,211 609 | 607,2,3,15,4,0 610 | 608,2,0,12,24,0 611 | 609,1,5,0,0,0 612 | 610,5,5,17,12,0 613 | 611,2,1,32,13,0 614 | 612,3,0,1,0,0 615 | 613,4,6,15,2,1 616 | 614,1,0,5,10,0 617 | 615,2,1,35,22,0 618 | 616,8,18,79,25,0 619 | 617,14,12,88,47,38 620 | 618,5,23,98,25,0 621 | 619,6,2,27,6,12 622 | 620,5,6,20,1,21 623 | 621,15,12,53,36,30 624 | 622,6,2,32,1,38 625 | 623,6,0,0,0,0 626 | 624,7,6,38,1,20 627 | 625,28,5,149,11,129 628 | 626,1,0,0,0,0 629 | 627,1,1,0,0,0 630 | 628,11,4,18,1,0 631 | 629,13,12,75,1,57 632 | 630,2,0,0,0,0 633 | 631,2,2,0,0,0 634 | 632,5,3,1,6,0 635 | 633,5,1,39,15,25 636 | 634,1,0,0,0,0 637 | 635,3,1,20,7,0 638 | 636,2,2,34,2,0 639 | 637,6,18,85,17,40 640 | 638,2,2,0,0,0 641 | 639,4,12,31,81,2 642 | 640,8,3,33,8,0 643 | 641,1,2,70,102,0 644 | 642,1,9,0,0,0 645 | 643,1,6,0,29,0 646 | 644,2,2,22,9,0 647 | 645,10,7,73,51,67 648 | 646,1,1,11,3,0 649 | 647,1,2,0,1,0 650 | 648,5,0,56,0,41 651 | 649,2,0,19,49,0 652 | 650,2,25,0,13,0 653 | 651,1,7,11,26,0 654 | 652,0,3,0,0,0 655 | 653,2,3,0,18,0 656 | 654,1,3,0,8,0 657 | 655,5,3,22,1,17 658 | 656,4,6,14,7,9 659 | 657,0,1,0,0,0 660 | 658,5,8,0,0,0 661 | 659,1,1,21,25,0 662 | 660,1,0,1,4,0 663 | 661,2,3,17,49,0 664 | 662,10,6,245,20,0 665 | 663,1,4,9,11,0 666 | 664,18,15,129,67,71 667 | 665,0,9,0,2,0 668 | 666,0,4,4,14,0 669 | 667,4,1,1,0,1 670 | 668,0,3,4,32,0 671 | 669,5,4,16,0,0 672 | 670,0,4,21,19,0 673 | 671,1,1,11,58,0 674 | 672,5,25,0,10,0 675 | 673,1,0,0,16,0 676 | 674,20,3,112,23,118 677 | 675,4,1,5,0,5 678 | 676,16,10,43,6,22 679 | 677,7,6,6,4,2 680 | 678,3,7,19,45,0 681 | 679,15,3,94,16,83 682 | 680,17,5,160,8,162 683 | 681,2,1,24,9,0 684 | 682,1,2,6,9,0 685 | 683,1,2,3,2,0 686 | 684,17,4,88,45,54 687 | 685,6,12,0,0,0 688 | 686,3,36,0,59,0 689 | 687,6,4,72,9,3 690 | 688,0,8,0,13,0 691 | 689,1,0,1,0,0 692 | 690,5,5,34,7,28 693 | 691,1,1,0,0,0 694 | 692,0,0,15,45,0 695 | 693,3,10,7,2,0 696 | 694,3,1,5,1,0 697 | 695,1,0,2,0,2 698 | 696,2,4,0,2,0 699 | 697,1,1,2,0,0 700 | 698,1,1,0,14,0 701 | 699,20,13,39,2,14 702 | 700,9,4,82,9,23 703 | 701,12,10,91,19,158 704 | 702,8,4,25,10,10 705 | 703,7,0,38,52,0 706 | 704,26,6,168,25,105 707 | 705,3,0,19,38,2 708 | 706,2,5,39,18,0 709 | 707,4,22,1,7,0 710 | 708,3,7,0,3,0 711 | 709,1,2,19,50,0 712 | 710,7,3,0,0,0 713 | 711,4,7,5,3,0 714 | 712,3,0,52,64,0 715 | 713,2,3,10,5,0 716 | 714,13,2,85,7,21 717 | 715,9,1,57,7,35 718 | 716,5,5,53,30,0 719 | 717,1,4,16,2,1 720 | 718,13,3,39,1,42 721 | 719,1,3,15,10,0 722 | 720,8,1,51,7,24 723 | 721,6,1,47,13,0 724 | 722,19,6,164,7,109 725 | 723,8,0,41,5,45 726 | 724,6,40,4,87,0 727 | 725,6,2,68,100,0 728 | 726,3,4,3,19,0 729 | 727,13,6,103,18,66 730 | 728,13,4,104,11,77 731 | 729,1,4,3,29,0 732 | 730,1,3,0,0,0 733 | 731,4,0,12,0,8 734 | 732,4,8,0,0,0 735 | 733,1,0,0,0,0 736 | 734,1,1,0,0,0 737 | 735,0,3,9,0,0 738 | 736,9,36,0,2,0 739 | 737,2,3,0,0,0 740 | 738,1,1,0,8,0 741 | 739,4,6,1,0,1 742 | 740,9,4,20,8,2 743 | 741,32,5,228,38,176 744 | 742,6,1,23,6,14 745 | 743,12,5,113,13,91 746 | 744,6,3,51,498,0 747 | 745,32,10,270,30,183 748 | 746,1,0,11,19,0 749 | 747,17,6,251,17,225 750 | 748,0,6,0,0,0 751 | 749,6,0,24,8,2 752 | 750,5,1,9,7,0 753 | 751,0,7,0,2,0 754 | 752,6,1,16,6,12 755 | 753,8,1,35,19,18 756 | 754,2,0,19,31,0 757 | 755,5,17,51,139,0 758 | 756,0,0,20,34,0 759 | 757,1,0,0,0,0 760 | 758,3,0,12,85,0 761 | 759,19,5,143,13,119 762 | 760,3,15,0,7,0 763 | 761,9,19,53,61,0 764 | 762,2,2,2,0,0 765 | 763,0,1,0,74,0 766 | 764,4,4,26,2,7 767 | 765,0,1,20,19,0 768 | 766,1,1,1,0,0 769 | 767,6,7,53,14,0 770 | 768,2,1,0,0,0 771 | 769,12,0,161,15,130 772 | 770,1,4,24,5,0 773 | 771,7,1,113,28,0 774 | 772,0,1,0,1,0 775 | 773,28,8,213,5,138 776 | 774,20,0,139,12,114 777 | 775,3,4,51,4,49 778 | 776,3,2,25,2,29 779 | 777,2,3,0,0,0 780 | 778,2,3,0,1,0 781 | 779,1,0,0,0,0 782 | 780,0,0,3,3,0 783 | 781,3,1,43,7,0 784 | 782,0,3,95,22,0 785 | 783,17,8,88,18,10 786 | 784,11,5,211,211,0 787 | 785,41,8,288,22,230 788 | 786,1,2,0,0,0 789 | 787,3,4,0,10,0 790 | 788,4,19,12,5,0 791 | 789,3,2,23,1,18 792 | 790,7,2,41,4,36 793 | 791,4,4,37,3,26 794 | 792,10,0,49,1,42 795 | 793,11,5,8,0,0 796 | 794,4,3,89,106,0 797 | 795,3,0,8,0,1 798 | 796,1,0,1,8,0 799 | 797,1,0,0,0,0 800 | 798,2,1,0,1,0 801 | 799,4,12,13,74,3 802 | 800,1,1,0,0,0 803 | 801,9,3,56,30,57 804 | 802,11,6,48,54,12 805 | 803,21,10,258,40,151 806 | 804,6,1,56,52,0 807 | 805,12,1,60,14,32 808 | 806,5,12,14,4,10 809 | 807,7,0,57,7,56 810 | 808,1,0,0,0,0 811 | 809,3,0,48,43,1 812 | 810,15,4,68,28,20 813 | 811,7,5,12,0,0 814 | 812,3,0,16,3,0 815 | 813,1,3,6,21,0 816 | 814,1,6,0,6,0 817 | 815,5,3,2,4,0 818 | 816,3,0,35,33,0 819 | 817,4,5,26,39,0 820 | 818,1,0,4,7,0 821 | 819,2,0,0,0,0 822 | 820,4,3,44,4,41 823 | 821,1,0,3,2,0 824 | 822,30,10,305,12,210 825 | 823,2,10,29,12,0 826 | 824,2,4,0,0,0 827 | 825,3,1,0,1,0 828 | 826,0,2,3,0,0 829 | 827,8,7,4,0,0 830 | 828,15,9,49,4,35 831 | 829,4,3,15,9,8 832 | 830,2,10,4,32,0 833 | 831,3,2,6,1,0 834 | 832,0,4,0,18,0 835 | 833,22,3,226,43,215 836 | 834,0,7,0,1,0 837 | 835,4,6,1,2,0 838 | 836,2,3,3,1,0 839 | 837,1,0,3,4,0 840 | 838,11,28,2,5,0 841 | 839,4,1,1,1,0 842 | 840,2,3,47,14,0 843 | 841,3,6,0,26,0 844 | 842,2,4,6,4,0 845 | 843,3,0,6,4,0 846 | 844,9,7,5,4,0 847 | 845,8,1,9,16,0 848 | 846,1,1,13,3,0 849 | 847,27,3,227,46,151 850 | 848,1,1,5,23,0 851 | 849,6,1,24,5,10 852 | 850,6,13,13,0,0 853 | 851,1,2,10,8,0 854 | 852,6,27,3,10,3 855 | 853,0,4,0,0,0 856 | 854,1,2,0,1,0 857 | 855,3,2,24,78,0 858 | 856,8,4,14,5,0 859 | 857,1,10,0,3,0 860 | 858,6,12,20,67,0 861 | 859,3,6,8,33,0 862 | 860,3,2,20,11,7 863 | 861,4,6,110,0,0 864 | 862,4,2,18,99,1 865 | 863,1,1,0,0,0 866 | 864,14,2,207,20,172 867 | 865,1,0,0,1,1 868 | 866,2,0,4,14,0 869 | 867,0,2,5,11,0 870 | 868,5,11,62,94,0 871 | 869,2,3,0,0,0 872 | 870,2,0,7,0,0 873 | 871,23,10,196,43,176 874 | 872,7,11,132,36,0 875 | 873,1,0,30,77,0 876 | 874,18,7,118,47,117 877 | 875,4,8,60,14,0 878 | 876,1,0,0,0,0 879 | 877,1,3,7,3,0 880 | 878,1,1,16,36,0 881 | 879,0,0,28,75,0 882 | 880,2,0,9,23,0 883 | 881,43,16,169,59,117 884 | 882,2,4,6,16,0 885 | 883,1,3,0,0,0 886 | 884,3,0,59,13,7 887 | 885,3,0,21,12,0 888 | 886,2,0,30,0,15 889 | 887,1,2,0,0,0 890 | 888,1,0,5,4,0 891 | 889,11,1,32,4,15 892 | 890,3,0,10,3,0 893 | 891,7,0,27,5,22 894 | 892,16,6,144,3,124 895 | 893,1,3,30,35,8 896 | 894,2,2,41,39,0 897 | 895,3,2,52,15,7 898 | 896,4,0,0,0,0 899 | 897,0,4,21,11,0 900 | 898,10,4,79,6,45 901 | 899,0,3,0,2,0 902 | 900,42,5,682,26,529 903 | 901,3,11,123,31,0 904 | 902,0,2,0,0,0 905 | 903,4,5,30,14,1 906 | 904,1,1,0,0,0 907 | 905,3,2,6,0,0 908 | 906,4,2,20,7,2 909 | 907,3,3,32,55,0 910 | 908,4,2,0,0,0 911 | 909,5,4,30,9,0 912 | 910,3,16,0,5,0 913 | 911,4,5,149,50,0 914 | 912,30,2,244,13,205 915 | 913,1,0,20,33,0 916 | 914,3,23,15,115,0 917 | 915,0,0,2,5,0 918 | 916,1,3,12,13,0 919 | 917,10,8,32,110,0 920 | 918,4,2,25,7,0 921 | 919,5,3,41,64,3 922 | 920,11,2,280,212,2 923 | 921,1,0,25,51,0 924 | 922,4,4,7,6,0 925 | 923,8,2,106,11,86 926 | 924,23,3,131,8,125 927 | 925,3,3,18,62,0 928 | 926,2,0,3,32,0 929 | 927,1,0,17,6,0 930 | 928,1,0,0,22,0 931 | 929,3,0,23,50,0 932 | 930,8,0,36,8,21 933 | 931,3,2,20,17,0 934 | 932,5,8,21,67,0 935 | 933,6,7,68,70,21 936 | 934,4,2,36,17,0 937 | 935,0,1,0,0,0 938 | 936,1,0,1,2,0 939 | 937,4,10,26,10,0 940 | 938,0,0,4,0,0 941 | 939,16,3,98,8,66 942 | 940,2,4,3,74,0 943 | 941,6,2,47,2,39 944 | 942,1,6,0,37,0 945 | 943,1,1,24,26,0 946 | 944,1,4,1,0,0 947 | 945,6,0,79,6,45 948 | 946,3,1,34,33,0 949 | 947,6,4,29,6,24 950 | -------------------------------------------------------------------------------- /dataset.py: -------------------------------------------------------------------------------- 1 | """ Kaggle Sealion Pytorch Dataset 2 | Pytorch Dataset code for patched based training and prediction of the 3 | NOAA Fishes Sea Lion counting Kaggle data. 4 | 5 | Dataset code generates or loads targets for density and counception 6 | based counting models. 7 | """ 8 | from collections import defaultdict 9 | import cv2 10 | import torch 11 | import torch.utils.data as data 12 | from torch.utils.data.sampler import Sampler 13 | from torchvision import datasets, transforms 14 | from PIL import Image 15 | import random 16 | import pandas as pd 17 | import numpy as np 18 | import os 19 | import functools 20 | import time 21 | import mytransforms 22 | import utils 23 | 24 | IMG_EXTENSIONS = ['.jpg', '.jpeg', '.png'] 25 | CATEGORIES = ["adult_males", "subadult_males", "adult_females", "juveniles", "pups"] 26 | CATEGORY_MAP = {"adult_males": 0, "subadult_males": 1, "adult_females": 2, "juveniles": 3, "pups": 4} 27 | TARGET_TYPES = ['density', 'countception'] 28 | 29 | 30 | def to_tensor(arr): 31 | assert(isinstance(arr, np.ndarray)) 32 | t = torch.from_numpy(arr.transpose((2, 0, 1))) 33 | if isinstance(t, torch.ByteTensor): 34 | return t.float().div(255) 35 | return t 36 | 37 | 38 | def find_inputs(folder, types=IMG_EXTENSIONS, extract_extra=False): 39 | inputs = [] 40 | for root, _, files in os.walk(folder, topdown=False): 41 | for rel_filename in files: 42 | base, ext = os.path.splitext(rel_filename) 43 | if ext.lower() in types: 44 | abs_filename = os.path.join(root, rel_filename) 45 | if extract_extra: 46 | img = Image.open(abs_filename) 47 | if not img: 48 | continue 49 | w, h = img.size 50 | info = dict(filename=abs_filename, width=w, height=h, xmin=0, ymin=0, xmax=w, ymax=h) 51 | else: 52 | info = dict(filename=abs_filename) 53 | inputs.append((int(base), info)) 54 | if inputs: 55 | return zip(*sorted(inputs, key=lambda k: k[0])) 56 | else: 57 | return [], [] 58 | 59 | 60 | def find_targets(folder, input_ids, types=IMG_EXTENSIONS): 61 | inputs_set = set(input_ids) 62 | targets = defaultdict(dict) 63 | for root, _, files in os.walk(folder, topdown=False): 64 | for rel_filename in files: 65 | base, ext = os.path.splitext(rel_filename) 66 | if ext.lower() in types: 67 | split = base.split('-') 68 | fid = int(split[0]) 69 | if fid in inputs_set: 70 | abs_filename = os.path.join(root, rel_filename) 71 | if len(split) > 2: 72 | targets[fid][int(split[2])] = abs_filename 73 | else: 74 | targets[fid] = abs_filename 75 | return targets 76 | 77 | 78 | def gen_target_gauss(coords, size, sigma=5, kernel_size=(21, 21), factor=1024.): 79 | w, h = size 80 | num_outputs = len(CATEGORIES) 81 | target_img = np.zeros(shape=(h, w, num_outputs), dtype=np.float32) 82 | for cat_idx, cat_name in enumerate(CATEGORIES): 83 | xy = coords[coords[:, 2] == cat_idx][:, :2] 84 | for x, y in xy: 85 | target_img[y, x, cat_idx] += factor 86 | target_img = cv2.GaussianBlur(target_img, kernel_size, sigma, borderType=cv2.BORDER_CONSTANT) 87 | return target_img 88 | 89 | 90 | def gen_target_countception(coords, size, subpatch_size=32, stride=1, max_count=0, dtype=np.float32): 91 | w, h = size 92 | pad = (subpatch_size - 1) // 2 93 | w = (w + 2 * pad) // stride 94 | h = (h + 2 * pad) // stride 95 | #print(size, w, h) 96 | num_outputs = len(CATEGORIES) 97 | coords_pad = coords.copy() 98 | coords_pad[:, :2] = coords[:, :2] + [subpatch_size, subpatch_size] 99 | target_img = np.zeros(shape=(h, w, num_outputs), dtype=dtype) 100 | for x in range(w): 101 | for y in range(h): 102 | subpatch_points = utils.crop_points(coords, x * stride, y * stride, subpatch_size, subpatch_size) 103 | for p in subpatch_points: 104 | target_img[y][x][p[2]] += dtype(1) 105 | #print(target_img.sum(axis=(0, 1))/(subpatch_size**2), target_img.max()) 106 | if max_count > 0: 107 | target_img = np.clip(target_img, 0, max_count) 108 | return target_img 109 | 110 | 111 | def gen_mask(input_img, dotted_file): 112 | img_dotted = cv2.imread(dotted_file) 113 | mask = cv2.cvtColor(img_dotted, cv2.COLOR_BGR2GRAY) 114 | _, mask = cv2.threshold(mask, 10, 255, cv2.THRESH_BINARY) 115 | img_masked = cv2.bitwise_and(input_img, input_img, mask=mask) 116 | return img_masked, mask 117 | 118 | 119 | class ImagePatchIndex: 120 | def __init__(self, image_index, patch_index=0): 121 | self.image_index = image_index 122 | self.patch_index = patch_index 123 | 124 | 125 | class IndexedPatchSampler(Sampler): 126 | """Samples patches across images sequentially by index in raster order. 127 | """ 128 | 129 | def __init__(self, data_source): 130 | self.num_images = len(data_source) 131 | if data_source.patch_count: 132 | self.num_patches = data_source.patch_count 133 | self.patch_index = data_source.patch_index 134 | else: 135 | # fallback to indexing whole images from dataset 136 | print('Warning: Data source has no patch information, falling back to whole image indexing.') 137 | self.num_patches = 0 138 | self.patch_index = [] 139 | 140 | def __iter__(self): 141 | if self.num_patches: 142 | for i in range(self.num_images): 143 | for j in self.patch_index[i]: 144 | yield ImagePatchIndex(i, j) 145 | else: 146 | for i in range(self.num_images): 147 | yield i 148 | 149 | def __len__(self): 150 | return self.num_patches if self.num_patches else self.num_images 151 | 152 | 153 | class RandomPatchSampler(Sampler): 154 | """Oversamples random patches from images in random order. 155 | Repeats the same image index multiple times in a row to sample 'repeat' times 156 | from the same image for big read efficiency gains. 157 | """ 158 | def __init__(self, data_source, oversample=32, repeat=1): 159 | self.oversample = oversample//repeat * repeat 160 | self.repeat = repeat 161 | self.num_samples = len(data_source) 162 | 163 | def __iter__(self): 164 | # There are simpler/more compact ways of doing this, but why not have a somewhat 165 | # meaningful fake patch index? 166 | for to in range(self.oversample//self.repeat): 167 | samples = torch.randperm(self.num_samples).long() 168 | for image_index in samples: 169 | for ti in range(self.repeat): 170 | fake_patch_index = to * self.repeat + ti 171 | yield ImagePatchIndex(image_index, fake_patch_index) 172 | 173 | def __len__(self): 174 | return self.num_samples * self.oversample 175 | 176 | 177 | class SealionDataset(data.Dataset): 178 | def __init__( 179 | self, 180 | input_root, 181 | target_root='', 182 | counts_file='', 183 | coords_file='', 184 | processing_file='', 185 | train=True, 186 | patch_size=(256, 256), 187 | patch_stride=128, 188 | prescale=0.0, 189 | generate_target=True, 190 | target_type='density', 191 | per_image_norm=False, 192 | num_logits=0, 193 | transform=None, 194 | target_transform=None): 195 | 196 | extract_extra = False if os.path.exists(processing_file) else True 197 | input_ids, input_infos = find_inputs( 198 | input_root, types=['.jpg'], extract_extra=extract_extra) 199 | if len(input_ids) == 0: 200 | raise(RuntimeError("Found 0 images in : " + input_root)) 201 | self.input_index = input_ids 202 | 203 | self.patch_index = [[]] * len(input_ids) 204 | self.patch_count = 0 205 | self.patch_size = patch_size 206 | self.patch_stride = patch_stride 207 | self.prescale = prescale if prescale != 1.0 else 0.0 208 | assert target_type in TARGET_TYPES 209 | self.target_type = target_type 210 | self.num_logits = num_logits 211 | if train and num_logits: 212 | assert target_type == 'countception' 213 | self.generate_target = generate_target # generate on the fly instead of loading 214 | 215 | self.data_by_id = dict() 216 | for index, (k, v) in enumerate(zip(input_ids, input_infos)): 217 | if 'width' in v: 218 | if self.prescale: 219 | v = self._apply_prescale(v, self.prescale) 220 | patch_info = self._calc_patch_info(v) 221 | num_patches = patch_info['num'] 222 | self.patch_index[index] = list(range(num_patches)) 223 | self.patch_count += num_patches 224 | v['patches'] = patch_info 225 | v['index'] = index 226 | self.data_by_id[k] = v 227 | 228 | self.has_targets = False 229 | if os.path.exists(target_root): 230 | targets = find_targets(target_root, input_ids, types=['.npz']) 231 | if len(targets): 232 | for k, v in targets.items(): 233 | self.data_by_id[k]['target'] = v 234 | self.has_targets = True 235 | else: 236 | raise (RuntimeError("Found 0 targets in : " + target_root)) 237 | 238 | if train: 239 | assert self.has_targets 240 | self.train = train 241 | 242 | if counts_file: 243 | counts_df = pd.read_csv(counts_file).rename(columns=CATEGORY_MAP) 244 | counts_df.drop(['train_id'], 1, inplace=True) 245 | for k, v in counts_df.to_dict(orient='index').items(): 246 | if k in self.data_by_id: 247 | d = self.data_by_id[k] 248 | d['counts_by_cat'] = v 249 | d['count'] = sum(v.values()) 250 | 251 | if processing_file: 252 | process_df = pd.read_csv(processing_file, index_col=False) 253 | cols = ['xmin', 'ymin', 'xmax', 'ymax', 'width', 'height'] 254 | process_df[cols] = process_df[cols].astype(int) 255 | process_df['train_id'] = process_df.filename.map(lambda x: int(os.path.splitext(x)[0])) 256 | process_df.set_index(['train_id'], inplace=True) 257 | for k, v in process_df[cols].to_dict(orient='index').items(): 258 | if k in self.data_by_id: 259 | d = self.data_by_id[k] 260 | if self.prescale: 261 | v = self._apply_prescale(v, self.prescale) 262 | patch_info = self._calc_patch_info(v) 263 | num_patches = patch_info['num'] 264 | self.patch_index[d['index']] = list(range(num_patches)) 265 | self.patch_count += num_patches 266 | v['patches'] = patch_info 267 | d.update(v) 268 | #print(d, self.patch_count) 269 | 270 | if coords_file: 271 | coords_df = pd.read_csv(coords_file, index_col=False) 272 | coords_df.x_coord = coords_df.x_coord.astype('int') 273 | coords_df.y_coord = coords_df.y_coord.astype('int') 274 | coords_df.category = coords_df.category.replace(CATEGORY_MAP) 275 | groupby_file = coords_df.groupby(['filename']) 276 | for file in groupby_file.indices: 277 | coords = groupby_file.get_group(file) 278 | coords = coords[['x_coord', 'y_coord', 'category']].as_matrix() 279 | coords = coords[coords[:, 0].argsort()] 280 | fid = int(os.path.splitext(file)[0]) 281 | if fid in self.data_by_id: 282 | d = self.data_by_id[fid] 283 | if self.prescale: 284 | coords[:, :2] = np.rint(coords[:, :2] * self.prescale) 285 | xy_offset = np.array([d['xmin'], d['ymin']]) 286 | coords[:, :2] = coords[:, :2] + xy_offset 287 | d['coords'] = coords 288 | 289 | self.dataset_mean = [0.43632373, 0.46022959, 0.4618598] 290 | self.dataset_std = [0.17749958, 0.16631233, 0.16272708] 291 | if transform is None: 292 | tfs = [] 293 | if per_image_norm: 294 | tfs.append(mytransforms.NormalizeImg()) 295 | tfs.append(mytransforms.ToTensor()) 296 | if self.train: 297 | tfs.append(mytransforms.ColorJitter()) 298 | if not per_image_norm: 299 | tfs.append(transforms.Normalize(self.dataset_mean, self.dataset_std)) 300 | self.transform = transforms.Compose(tfs) 301 | self.target_transform = target_transform 302 | self.ttime = utils.AverageMeter() 303 | 304 | def _apply_prescale(self, input_info, scale): 305 | for k in ['xmin', 'xmax', 'ymin', 'ymax', 'width', 'height']: 306 | input_info[k] = np.rint(input_info[k] * scale).astype(np.int) 307 | return input_info 308 | 309 | def _calc_patch_info(self, input_info): 310 | x_min = input_info['xmin'] 311 | x_max = input_info['xmax'] 312 | y_min = input_info['ymin'] 313 | y_max = input_info['ymax'] 314 | assert y_max > y_min and x_max > x_min 315 | buffer_w = input_info['width'] 316 | buffer_h = input_info['height'] 317 | box_w = x_max - x_min 318 | box_h = y_max - y_min 319 | # FIXME switch to use bbox constraints 320 | num_patches, patch_cols, patch_rows = utils.calc_num_patches( 321 | buffer_w, buffer_h, self.patch_size, self.patch_stride) 322 | patch_origin_x = 0 323 | patch_origin_y = 0 324 | # if we have a bounding box border, see if we can squeeze an extra box in each dimension 325 | # if x_min != 0 or x_max != buffer_w: 326 | # new_w = patch_cols * stride + patch_size[0] 327 | # print(new_w, buffer_w) 328 | # if new_w <= buffer_w: 329 | # patch_cols += 1 330 | # patch_origin_x = x_min - (new_w - box_w) // 2 331 | # if y_min != 0 or y_max != buffer_h: 332 | # new_h = patch_rows * stride + patch_size[1] 333 | # if new_h <= buffer_h: 334 | # patch_rows += 1 335 | # patch_origin_y = y_min - (new_h - box_h) // 2 336 | num_patches = patch_cols * patch_rows 337 | patch_info = dict( 338 | num=num_patches, cols=patch_cols, rows=patch_rows, origin_x=patch_origin_x, origin_y=patch_origin_y) 339 | return patch_info 340 | 341 | @functools.lru_cache(4) 342 | def _load_input(self, input_id): 343 | path = self.data_by_id[input_id]['filename'] 344 | print("Loading %s" % path) 345 | img = cv2.imread(path) 346 | if self.prescale: 347 | dsize = (self.data_by_id[input_id]['width'], self.data_by_id[input_id]['height']) 348 | img = cv2.resize(img, dsize) 349 | img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) 350 | #h, w = img.shape[:2] 351 | #bh = ((h - 1) // self.patch_size[1] + 1) * self.patch_size[1] - h 352 | #bw = ((w - 1) // self.patch_size[0] + 1) * self.patch_size[0] - w 353 | #if bh or bw: 354 | # bwl = bw // 2 355 | # bhl = bh // 2 356 | # print("Adding border...", bh, bw) 357 | # img = cv2.copyMakeBorder(img, bhl, bh-bhl, bwl, bw-bwl, cv2.BORDER_CONSTANT, (0, 0, 0)) 358 | # print('%d -> %d x %d -> %d' % (w, img.shape[1], h, img.shape[0])) 359 | return img 360 | 361 | @functools.lru_cache(4) 362 | def _load_target(self, input_id): 363 | d = self.data_by_id[input_id] 364 | if isinstance(d['target'], dict): 365 | tp = [cv2.imread(d['target'][x], -1) for x in range(5)] 366 | target = np.dstack(tp) 367 | target = target / np.iinfo(np.uint16).max 368 | target = target.astype(np.float32, copy=False) 369 | else: 370 | target = np.load(d['target'])['arr_0'] 371 | target = target.astype(np.float32, copy=False) 372 | return target 373 | 374 | def _indexed_patch_center(self, input_id, patch_index): 375 | d = self.data_by_id[input_id] 376 | patch_info = d['patches'] 377 | pc, pr = utils.index_to_rc(patch_index, patch_info['cols']) 378 | cx = pc * self.patch_stride + self.patch_size[0] // 2 379 | cy = pr * self.patch_stride + self.patch_size[1] // 2 380 | return cx, cy 381 | 382 | def _random_patch_center(self, input_id, w, h): 383 | d = self.data_by_id[input_id] 384 | if len(d['coords']) and random.random() < 0.5: 385 | # 50% of the time, randomly pick a point around an actual sealion 386 | cx, cy, _ = d['coords'][random.randint(0, len(d['coords']) - 1)] 387 | cx = cx + random.randint(-self.patch_size[0] // 4, self.patch_size[0] // 4) 388 | cy = cy + random.randint(-self.patch_size[1] // 4, self.patch_size[1] // 4) 389 | else: 390 | # return random center coords for specified patch size within a specified (x, y, w, h) bounding box 391 | pw, ph = self.patch_size[0] // 2, self.patch_size[1] // 2 392 | if 'xmin' in d: 393 | x_min = d['xmin'] 394 | x_max = d['xmax'] 395 | y_min = d['ymin'] 396 | y_max = d['ymax'] 397 | assert x_max <= w and x_max - x_min > 0 398 | assert y_max <= h and y_max - y_min > 0 399 | else: 400 | x_min = 0 401 | x_max = w 402 | y_min = 0 403 | y_max = h 404 | x_min += pw 405 | x_max -= pw 406 | y_min += ph 407 | y_max -= ph 408 | assert x_max - x_min > 0 and y_max - y_min > 0 409 | cx = random.randint(x_min, x_max) 410 | cy = random.randint(y_min, y_max) 411 | return cx, cy 412 | 413 | def _crop_and_transform(self, cx, cy, input_img, target_arr, randomize=False): 414 | target_tile = None 415 | transform_target = False if target_arr is None else True 416 | target_is_coords = True if transform_target and target_arr.shape[1] == 3 else False 417 | 418 | if randomize: 419 | angle = 0. 420 | hflip = random.random() < 0.5 421 | vflip = random.random() < 0.5 422 | do_rotate = random.random() < 0.25 if not hflip and not vflip else False 423 | if do_rotate: 424 | angle = random.random() * 360 425 | scale = random.uniform(0.5, 1.125) 426 | #print('hflip: %d, vflip: %d, angle: %f, scale: %f' % (hflip, vflip, angle, scale)) 427 | else: 428 | angle = 0. 429 | scale = 1. 430 | hflip = False 431 | vflip = False 432 | 433 | crop_w, crop_h = utils.calc_crop_size(self.patch_size[0], self.patch_size[1], angle, scale) 434 | input_tile = utils.crop_center(input_img, cx, cy, crop_w, crop_h) 435 | if transform_target: 436 | if target_is_coords: 437 | target_points = target_arr.copy() 438 | target_points = utils.crop_points_center(target_points, cx, cy, crop_w, crop_h) 439 | #print(cx, cy, crop_w, crop_h, angle, scale, hflip, vflip) 440 | #print(target_points) 441 | target_points[:, :2] = target_points[:, :2] - [cx, cy] 442 | else: 443 | target_tile = utils.crop_center(target_arr, cx, cy, crop_w, crop_h) 444 | 445 | # Perform tile geometry transforms if needed 446 | if angle or scale != 1. or hflip or vflip: 447 | Mtrans = np.identity(3) 448 | Mtrans[0, 2] = (self.patch_size[0] - crop_w) // 2 449 | Mtrans[1, 2] = (self.patch_size[1] - crop_h) // 2 450 | if hflip: 451 | Mtrans[0, 0] *= -1 452 | Mtrans[0, 2] = self.patch_size[0] - Mtrans[0, 2] 453 | if vflip: 454 | Mtrans[1, 1] *= -1 455 | Mtrans[1, 2] = self.patch_size[1] - Mtrans[1, 2] 456 | 457 | if angle or scale != 1.: 458 | Mrot = cv2.getRotationMatrix2D((crop_w//2, crop_h//2), angle, scale) 459 | Mfinal = np.dot(Mtrans, np.vstack([Mrot, [0, 0, 1]])) 460 | else: 461 | Mfinal = Mtrans 462 | 463 | input_tile = cv2.warpAffine(input_tile, Mfinal[:2, :], tuple(self.patch_size)) 464 | if transform_target: 465 | if target_is_coords: 466 | if len(target_points): 467 | target_cats = target_points[:, 2].copy() 468 | target_points[:, 2] = np.ones(len(target_points)) 469 | target_points = np.dot(target_points, Mfinal) 470 | #print(target_points) 471 | target_points[:, 2] = target_cats 472 | else: 473 | tt64 = target_tile.astype(np.float64) 474 | tt64 = cv2.warpAffine(tt64, Mfinal[:2, :], tuple(self.patch_size)) 475 | if scale != 1.: 476 | tt64 /= scale**2 477 | target_tile = tt64.astype(np.float32) 478 | 479 | if target_is_coords: 480 | target_points = np.rint(target_points).astype(np.int) 481 | target_points[:, :2] = target_points[:, :2] + [self.patch_size[0] // 2, self.patch_size[1] // 2] 482 | target_points = utils.crop_points(target_points, 0, 0, self.patch_size[0], self.patch_size[1]) 483 | #print(target_points) 484 | if self.target_type == 'countception': 485 | dtype = np.uint8 if self.num_logits else np.float32 486 | max_count = self.num_logits - 1 if self.num_logits else 0 487 | target_tile = gen_target_countception( 488 | target_points, self.patch_size, max_count=max_count, dtype=dtype) 489 | else: 490 | target_tile = gen_target_gauss(target_points, self.patch_size, factor=1024.) 491 | 492 | return input_tile, target_tile 493 | 494 | def __getitem__(self, index): 495 | if isinstance(index, ImagePatchIndex): 496 | patch_index = index.patch_index 497 | index = index.image_index 498 | else: 499 | patch_index = 0 #FIXME sort this out 500 | 501 | input_id = self.input_index[index % len(self)] 502 | input_img = self._load_input(input_id) 503 | #print(input_id, index, patch_index) 504 | h, w = input_img.shape[:2] 505 | if self.train: 506 | if self.generate_target: 507 | target_arr = self.data_by_id[input_id]['coords'] 508 | else: 509 | target_arr = self._load_target(input_id) 510 | #print(target_arr.shape) 511 | 512 | attempts = 2 513 | for i in range(attempts): 514 | pw, ph = self.patch_size 515 | cx, cy = self._random_patch_center(input_id, w, h) 516 | input_patch, target_patch = self._crop_and_transform(cx, cy, input_img, target_arr, randomize=True) 517 | # check centre of chosen patch_index for valid pixels 518 | if np.any(utils.crop_center(input_patch, pw//2, ph//2, pw//4, ph//4)): 519 | break 520 | 521 | input_tile_tensor = self.transform(input_patch) 522 | target_tile_tensor = to_tensor(target_patch) 523 | else: 524 | target_arr = None 525 | if self.has_targets: 526 | if self.generate_target: 527 | target_arr = self.data_by_id[input_id]['coords'] 528 | else: 529 | target_arr = self._load_target(input_id) 530 | 531 | cx, cy = self._indexed_patch_center(input_id, patch_index) 532 | input_patch, target_patch = self._crop_and_transform(cx, cy, input_img, target_arr, randomize=False) 533 | input_tile_tensor = self.transform(input_patch) 534 | if target_patch is None: 535 | target_tile_tensor = torch.zeros(1) 536 | else: 537 | target_tile_tensor = to_tensor(target_patch) 538 | #print(input_tile_tensor.size(), target_tile_tensor) 539 | 540 | #cv2.imwrite('test-scaled-input-%d.png' % index, input_patch) 541 | #cv2.imwrite('test-scaled-target-%d.png' % index, 4096*target_tile[:, :, :3]) 542 | 543 | index_tensor = torch.LongTensor([input_id, index, patch_index]) 544 | 545 | return input_tile_tensor, target_tile_tensor, index_tensor 546 | 547 | def __len__(self): 548 | return len(self.input_index) 549 | 550 | def get_num_patches(self, input_id=None): 551 | if input_id is None: 552 | return self.patch_count 553 | else: 554 | if input_id in self.data_by_id: 555 | return self.data_by_id[input_id]['patches']['num'] 556 | else: 557 | return 0 558 | 559 | def get_input_size(self, input_id): 560 | if input_id in self.data_by_id: 561 | d = self.data_by_id[input_id] 562 | return d['width'], d['height'] 563 | else: 564 | return 0, 0 565 | 566 | def get_patch_cols(self, input_id): 567 | if input_id in self.data_by_id: 568 | return self.data_by_id[input_id]['patches']['cols'] 569 | else: 570 | return 0 -------------------------------------------------------------------------------- /inference.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import time 4 | import cv2 5 | import numpy as np 6 | import pandas as pd 7 | from dataset import SealionDataset, IndexedPatchSampler 8 | from models import ModelCnet, ModelCountception 9 | from utils import AverageMeter 10 | from utils_cython import merge_patches_float32 11 | import torch 12 | import torch.autograd as autograd 13 | import torch.utils.data as data 14 | 15 | 16 | parser = argparse.ArgumentParser(description='PyTorch Sealion count inference') 17 | parser.add_argument('data', metavar='DIR', 18 | help='path to dataset') 19 | parser.add_argument('--model', default='countception', type=str, metavar='MODEL', 20 | help='Name of model to train (default: "countception"') 21 | parser.add_argument('--use-logits', action='store_true', default=False, 22 | help='Enable use of logits for model output') 23 | parser.add_argument('--patch-size', type=int, default=256, metavar='N', 24 | help='Image patch size (default: 256)') 25 | parser.add_argument('--batch-size', type=int, default=16, metavar='N', 26 | help='input batch size for training (default: 16)') 27 | parser.add_argument('--seed', type=int, default=1, metavar='S', 28 | help='random seed (default: 1)') 29 | parser.add_argument('--log-interval', type=int, default=100, metavar='N', 30 | help='how many batches to wait before logging training status') 31 | parser.add_argument('--num-processes', type=int, default=2, metavar='N', 32 | help='how many training processes to use (default: 2)') 33 | parser.add_argument('-r', '--restore-checkpoint', default=None, 34 | help='path to restore checkpoint, e.g. ./checkpoint-1.tar') 35 | parser.add_argument('--no-cuda', action='store_true', default=False, 36 | help='disables CUDA training') 37 | parser.add_argument('--num-gpu', type=int, default=1, 38 | help='Number of GPUS to use') 39 | 40 | COLS = ['test_id', 'adult_males', 'subadult_males', 'adult_females', 'juveniles', 'pups'] 41 | 42 | 43 | def main(): 44 | args = parser.parse_args() 45 | 46 | processed_file = os.path.join(args.data, 'processed.csv') 47 | 48 | batch_size = args.batch_size 49 | patch_size = (args.patch_size, args.patch_size) 50 | num_outputs = 5 51 | count_factor = 1024. 52 | overlapped_patches = False 53 | debug_image = False 54 | debug_model = False 55 | use_logits = args.use_logits 56 | num_logits = 12 if use_logits else 0 57 | dataset = SealionDataset( 58 | args.data, 59 | processing_file=processed_file, 60 | train=False, 61 | patch_size=patch_size, 62 | patch_stride=patch_size[0] // 2 if overlapped_patches else patch_size[0], 63 | prescale=0.5, 64 | per_image_norm=True, 65 | num_logits=num_logits) 66 | sampler = IndexedPatchSampler(dataset) 67 | loader = data.DataLoader( 68 | dataset, 69 | batch_size=batch_size, 70 | shuffle=False, 71 | num_workers=args.num_processes, 72 | sampler=sampler) 73 | 74 | if args.model == 'cnet': 75 | model = ModelCnet( 76 | outplanes=num_outputs, target_size=patch_size, debug=debug_model) 77 | elif args.model == 'countception' or args.model == 'cc': 78 | model = ModelCountception( 79 | outplanes=num_outputs, use_logits=use_logits, logits_per_output=num_logits, debug=debug_model) 80 | else: 81 | assert False and "Invalid model" 82 | 83 | if not args.no_cuda: 84 | if args.num_gpu > 1: 85 | model = torch.nn.DataParallel(model, device_ids=list(range(args.num_gpu))).cuda() 86 | else: 87 | model.cuda() 88 | 89 | if args.restore_checkpoint is not None: 90 | assert os.path.isfile(args.restore_checkpoint), '%s not found' % args.restore_checkpoint 91 | checkpoint = torch.load(args.restore_checkpoint) 92 | model.load_state_dict(checkpoint['state_dict']) 93 | print('Model restored from file: %s' % args.restore_checkpoint) 94 | 95 | model.eval() 96 | 97 | batch_time_m = AverageMeter() 98 | data_time_m = AverageMeter() 99 | current_id = -1 100 | patches = [] 101 | results = [] 102 | try: 103 | end = time.time() 104 | for batch_idx, (input, target, index) in enumerate(loader): 105 | data_time_m.update(time.time() - end) 106 | if not args.no_cuda: 107 | input_var, target_var = autograd.Variable(input.cuda()), autograd.Variable(target.cuda()) 108 | else: 109 | input_var, target_var = autograd.Variable(input), autograd.Variable(target) 110 | output = model(input_var) 111 | 112 | if use_logits: 113 | output = torch.cat([o.max(dim=1)[1] for o in output], dim=1).float() 114 | output = output.permute(0, 2, 3, 1) / count_factor 115 | if not overlapped_patches: 116 | output = torch.squeeze(output.sum(dim=1)) 117 | output = torch.squeeze(output.sum(dim=1)) 118 | output = output.cpu().data.numpy() 119 | 120 | for result_index, o in zip(index, output): 121 | input_id, index, patch_index = result_index 122 | #print('input_id, index, patch_index: ', input_id, index, patch_index) 123 | 124 | if current_id == -1: 125 | current_id = input_id 126 | elif current_id != input_id: 127 | if overlapped_patches: 128 | # reconstruct output image from overlapping patches 129 | w, h = dataset.get_input_size(current_id) 130 | cols = dataset.get_patch_cols(current_id) 131 | output_arr = np.zeros((h, w, num_outputs), dtype=np.float32) 132 | patches_arr = np.stack(patches) 133 | # FIXME there are some bounds issues that need to be debuged with merge and certain image 134 | # w/h and patch/stride alignments 135 | merge_patches_float32(output_arr, patches_arr, cols, dataset.patch_size, dataset.patch_stride) 136 | counts = list(np.sum(output_arr, axis=(0, 1))) 137 | if debug_image: 138 | write_debug_img(output_arr, current_id) 139 | else: 140 | #print(len(patches)) 141 | counts = list(np.sum(patches, axis=0)) 142 | print(counts) 143 | results.append([current_id] + counts) 144 | patches = [] 145 | current_id = input_id 146 | 147 | patches.append(o) 148 | # end iterating through batch 149 | 150 | batch_time_m.update(time.time() - end) 151 | if batch_idx % args.log_interval == 0: 152 | print('Inference: [{}/{} ({:.0f}%)] ' 153 | 'Time: {batch_time.val:.3f}s, {rate:.3f}/s ' 154 | '({batch_time.avg:.3f}s, {rate_avg:.3f}/s) ' 155 | 'Data: {data_time.val:.3f} ({data_time.avg:.3f})'.format( 156 | batch_idx * len(input), len(loader.sampler), 157 | 100. * batch_idx / len(loader), 158 | batch_time=batch_time_m, 159 | rate=input_var.size(0) / batch_time_m.val, 160 | rate_avg=input_var.size(0) / batch_time_m.avg, 161 | data_time=data_time_m)) 162 | 163 | end = time.time() 164 | #end iterating through dataset 165 | except KeyboardInterrupt: 166 | pass 167 | results_df = pd.DataFrame(results, columns=COLS) 168 | results_df.to_csv('submission.csv', index=False) 169 | 170 | 171 | def write_debug_img(img, current_id): 172 | dimg = img.astype(np.float64) 173 | dimg = (dimg[:, :, 0] + 2**8 * dimg[:, :, 1] + 2**16 * dimg[:, :, 2] 174 | + 2**24 * dimg[:, :, 3] + 2**32 * dimg[:, :, 4]) 175 | dimg = cv2.normalize( 176 | dimg, None, 0, 255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8UC1) 177 | dimg = cv2.applyColorMap(dimg, colormap=cv2.COLORMAP_JET) 178 | cv2.imwrite('output-%d.png' % current_id, dimg) 179 | 180 | 181 | if __name__ == '__main__': 182 | main() 183 | -------------------------------------------------------------------------------- /models/__init__.py: -------------------------------------------------------------------------------- 1 | from .model_cnet import ModelCnet 2 | from .model_countception import ModelCountception 3 | -------------------------------------------------------------------------------- /models/model_cnet.py: -------------------------------------------------------------------------------- 1 | """ C-Net Model (Count-Net) 2 | A Pytorch model (inspired by U-net architecture) for object counting. 3 | 4 | Inspired by: https://arxiv.org/abs/1505.04597 5 | along with density counting ideas from: 6 | https://www.robots.ox.ac.uk/~vgg/publications/2015/Xie15/weidi15.pdf 7 | https://arxiv.org/pdf/1705.10118.pdf 8 | """ 9 | import torch 10 | import torch.nn as nn 11 | import torch.nn.functional as F 12 | import math 13 | 14 | 15 | def conv_block( 16 | in_chan, out_chan, 17 | ksize=3, stride=1, pad=0, 18 | activation=nn.ReLU(), use_bn=False, dropout=0.): 19 | layers = [] 20 | layers += [nn.Conv2d(in_chan, out_chan, kernel_size=ksize, stride=stride, padding=pad)] 21 | if use_bn: 22 | layers += [nn.BatchNorm2d(out_chan)] 23 | layers += [activation] 24 | if dropout: 25 | layers += [nn.Dropout(p=dropout)] 26 | layers += [nn.Conv2d(out_chan, out_chan, kernel_size=ksize, stride=stride, padding=pad)] 27 | if use_bn: 28 | layers += [nn.BatchNorm2d(out_chan)] 29 | layers += [activation] 30 | if dropout: 31 | layers += [nn.Dropout(p=dropout)] 32 | return nn.Sequential(*layers) 33 | 34 | 35 | def pool_layer(): 36 | return nn.Sequential(nn.MaxPool2d(2)) 37 | 38 | 39 | def upsample_layer(in_chan, out_chan): 40 | return nn.Sequential( 41 | nn.ConvTranspose2d(in_chan, out_chan, kernel_size=2, stride=2)) 42 | 43 | 44 | class ModelCnet(nn.Module): 45 | 46 | def __init__( 47 | self, 48 | inplanes=3, 49 | outplanes=1, 50 | use_batch_norm=False, 51 | use_padding=False, 52 | target_size=(256, 256), 53 | debug=False): 54 | 55 | super(ModelCnet, self).__init__() 56 | self.inplanes = inplanes 57 | self.outplanes = outplanes 58 | self.activation = nn.LeakyReLU(0.1) 59 | self.use_batch_norm = use_batch_norm 60 | self.use_padding = use_padding 61 | self.debug = debug 62 | 63 | torch.LongTensor() 64 | 65 | pad = 1 if self.use_padding else 0 66 | 67 | self.enc1 = conv_block(inplanes, 64, 3, pad=pad, activation=self.activation, use_bn=self.use_batch_norm) 68 | self.enc2 = conv_block(64, 128, 3, pad=pad, activation=self.activation, use_bn=self.use_batch_norm) 69 | self.enc3 = conv_block(128, 256, 3, pad=pad, activation=self.activation, use_bn=self.use_batch_norm) 70 | self.enc4 = conv_block(256, 512, 3, pad=pad, activation=self.activation, use_bn=self.use_batch_norm) 71 | self.enc5 = conv_block(512, 1024, 3, pad=pad, activation=self.activation, use_bn=self.use_batch_norm) 72 | 73 | self.pool1 = pool_layer() 74 | self.pool2 = pool_layer() 75 | self.pool3 = pool_layer() 76 | self.pool4 = pool_layer() 77 | 78 | self.dec4 = conv_block(1024, 512, 3, pad=pad, activation=self.activation, use_bn=self.use_batch_norm) 79 | self.dec3 = conv_block(512, 256, 3, pad=pad, activation=self.activation, use_bn=self.use_batch_norm) 80 | self.dec2 = conv_block(256, 128, 3, pad=pad, activation=self.activation, use_bn=self.use_batch_norm) 81 | self.dec1 = conv_block(128, 64, 3, pad=pad, activation=self.activation, use_bn=self.use_batch_norm) 82 | 83 | self.upsample4 = upsample_layer(1024, 512) 84 | self.upsample3 = upsample_layer(512, 256) 85 | self.upsample2 = upsample_layer(256, 128) 86 | self.upsample1 = upsample_layer(128, 64) 87 | 88 | if self.use_padding: 89 | self.conv_final = nn.Sequential( 90 | nn.Conv2d(64, self.outplanes, kernel_size=1, stride=1), 91 | nn.ReLU()) 92 | else: 93 | if not isinstance(target_size, tuple): 94 | target_size = tuple(target_size) 95 | self.conv_final = nn.Sequential( 96 | nn.Conv2d(64, self.outplanes, kernel_size=1, stride=1), 97 | nn.ReLU(), 98 | nn.UpsamplingBilinear2d(size=target_size)) 99 | 100 | # Weight initialization 101 | for m in self.modules(): 102 | if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d): 103 | n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 104 | m.weight.data.normal_(0, math.sqrt(2. / n)) 105 | elif isinstance(m, nn.BatchNorm2d): 106 | m.weight.data.fill_(1) 107 | m.bias.data.zero_() 108 | 109 | def _print(self, x, tag=[]): 110 | if isinstance(tag, str): 111 | tag = [tag] 112 | if self.debug: 113 | if tag: 114 | print('%s: %s' % (' '.join(filter(None, tag)), x.size())) 115 | else: 116 | print(x.size()) 117 | 118 | def _crop_and_concat(self, upsampled, bypass, crop=False, tag=''): 119 | self._print(bypass, [tag, 'bypass']) 120 | self._print(upsampled, [tag, 'upsampled']) 121 | if crop: 122 | c = (bypass.size()[2] - upsampled.size()[2]) // 2 123 | bypass = F.pad(bypass, (-c, -c, -c, -c)) 124 | return torch.cat((upsampled, bypass), 1) 125 | 126 | def forward(self, x): 127 | 128 | enc1_out = self.enc1(x) # 64 129 | self._print(enc1_out, 'enc1') 130 | enc2_out = self.enc2(self.pool1(enc1_out)) # 128 131 | self._print(enc2_out, 'enc2') 132 | enc3_out = self.enc3(self.pool2(enc2_out)) # 256 133 | self._print(enc3_out, 'enc3') 134 | enc4_out = self.enc4(self.pool3(enc3_out)) # 512 135 | self._print(enc4_out, 'enc4') 136 | enc5_out = self.enc5(self.pool4(enc4_out)) # 1024 137 | self._print(enc5_out, 'enc5') 138 | 139 | crop = False if self.use_padding else True 140 | dec4_out = self.dec4(self._crop_and_concat(self.upsample4(enc5_out), enc4_out, crop, 'dec4')) 141 | dec3_out = self.dec3(self._crop_and_concat(self.upsample3(dec4_out), enc3_out, crop, 'dec3')) 142 | dec2_out = self.dec2(self._crop_and_concat(self.upsample2(dec3_out), enc2_out, crop, 'dec2')) 143 | dec1_out = self.dec1(self._crop_and_concat(self.upsample1(dec2_out), enc1_out, crop, 'dec1')) 144 | conv_final_out = self.conv_final(dec1_out) 145 | self._print(conv_final_out, 'final') 146 | 147 | return conv_final_out 148 | 149 | def name(self): 150 | return 'cnet' 151 | -------------------------------------------------------------------------------- /models/model_countception.py: -------------------------------------------------------------------------------- 1 | """ Counception Model 2 | A Pytorch implementation of Count-ception 3 | 4 | Inspired by: https://arxiv.org/abs/1703.08710 5 | """ 6 | import torch 7 | import torch.nn as nn 8 | import torch.nn.init as init 9 | 10 | 11 | class ConvBlock(nn.Module): 12 | def __init__(self, in_chan, out_chan, ksize=3, stride=1, pad=0, activation=nn.LeakyReLU()): 13 | super(ConvBlock, self).__init__() 14 | self.conv1 = nn.Conv2d(in_chan, out_chan, kernel_size=ksize, stride=stride, padding=pad) 15 | self.activation = activation 16 | self.batch_norm = nn.BatchNorm2d(out_chan) 17 | 18 | def forward(self, x): 19 | return self.activation(self.batch_norm(self.conv1(x))) 20 | 21 | 22 | class SimpleBlock(nn.Module): 23 | def __init__(self, in_chan, out_chan_1x1, out_chan_3x3, activation=nn.LeakyReLU()): 24 | super(SimpleBlock, self).__init__() 25 | self.conv1 = ConvBlock(in_chan, out_chan_1x1, ksize=1, pad=0, activation=activation) 26 | self.conv2 = ConvBlock(in_chan, out_chan_3x3, ksize=3, pad=1, activation=activation) 27 | 28 | def forward(self, x): 29 | conv1_out = self.conv1(x) 30 | conv2_out = self.conv2(x) 31 | output = torch.cat([conv1_out, conv2_out], 1) 32 | return output 33 | 34 | 35 | class ModelCountception(nn.Module): 36 | def __init__(self, inplanes=3, outplanes=1, use_logits=False, logits_per_output=12, debug=False): 37 | super(ModelCountception, self).__init__() 38 | # params 39 | self.inplanes = inplanes 40 | self.outplanes = outplanes 41 | self.activation = nn.LeakyReLU(0.01) 42 | self.final_activation = nn.LeakyReLU(0.01) 43 | self.patch_size = 32 44 | self.use_logits = use_logits 45 | self.logits_per_output = logits_per_output 46 | self.debug = debug 47 | 48 | torch.LongTensor() 49 | 50 | self.conv1 = ConvBlock(self.inplanes, 64, ksize=3, pad=self.patch_size, activation=self.activation) 51 | self.simple1 = SimpleBlock(64, 16, 16, activation=self.activation) 52 | self.simple2 = SimpleBlock(32, 16, 32, activation=self.activation) 53 | self.conv2 = ConvBlock(48, 16, ksize=14, activation=self.activation) 54 | self.simple3 = SimpleBlock(16, 112, 48, activation=self.activation) 55 | self.simple4 = SimpleBlock(160, 64, 32, activation=self.activation) 56 | self.simple5 = SimpleBlock(96, 40, 40, activation=self.activation) 57 | self.simple6 = SimpleBlock(80, 32, 96, activation=self.activation) 58 | self.conv3 = ConvBlock(128, 32, ksize=20, activation=self.activation) 59 | self.conv4 = ConvBlock(32, 64, ksize=1, activation=self.activation) 60 | self.conv5 = ConvBlock(64, 64, ksize=1, activation=self.activation) 61 | if use_logits: 62 | self.conv6 = nn.ModuleList([ConvBlock( 63 | 64, logits_per_output, ksize=1, activation=self.final_activation) for _ in range(outplanes)]) 64 | else: 65 | self.conv6 = ConvBlock(64, self.outplanes, ksize=1, activation=self.final_activation) 66 | 67 | # Weight initialization 68 | for m in self.modules(): 69 | if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d): 70 | init.xavier_uniform(m.weight, gain=init.calculate_gain('leaky_relu', param=0.01)) 71 | elif isinstance(m, nn.BatchNorm2d): 72 | m.weight.data.fill_(1) 73 | m.bias.data.zero_() 74 | 75 | def _print(self, x): 76 | if self.debug: 77 | print(x.size()) 78 | 79 | def forward(self, x): 80 | net = self.conv1(x) # 32 81 | self._print(net) 82 | net = self.simple1(net) 83 | self._print(net) 84 | net = self.simple2(net) 85 | self._print(net) 86 | net = self.conv2(net) 87 | self._print(net) 88 | net = self.simple3(net) 89 | self._print(net) 90 | net = self.simple4(net) 91 | self._print(net) 92 | net = self.simple5(net) 93 | self._print(net) 94 | net = self.simple6(net) 95 | self._print(net) 96 | net = self.conv3(net) 97 | self._print(net) 98 | net = self.conv4(net) 99 | self._print(net) 100 | net = self.conv5(net) 101 | self._print(net) 102 | if self.use_logits: 103 | net = [c(net) for c in self.conv6] 104 | [self._print(n) for n in net] 105 | else: 106 | net = self.conv6(net) 107 | self._print(net) 108 | return net 109 | 110 | def name(self): 111 | return 'countception' 112 | -------------------------------------------------------------------------------- /mytransforms.py: -------------------------------------------------------------------------------- 1 | """ A few fb.resnet.torch like tranforms 2 | Most taken from https://github.com/pytorch/vision/pull/27 3 | """ 4 | import torch 5 | import random 6 | import cv2 7 | import numpy as np 8 | 9 | 10 | 11 | 12 | class Grayscale(object): 13 | 14 | def __call__(self, img): 15 | gs = img.clone() 16 | gs[0].mul_(0.299).add_(0.587, gs[1]).add_(0.114, gs[2]) 17 | gs[1].copy_(gs[0]) 18 | gs[2].copy_(gs[0]) 19 | return gs 20 | 21 | 22 | class Saturation(object): 23 | 24 | def __init__(self, var): 25 | self.var = var 26 | 27 | def __call__(self, img): 28 | gs = Grayscale()(img) 29 | alpha = random.uniform(0, self.var) 30 | return img.lerp(gs, alpha) 31 | 32 | 33 | class Brightness(object): 34 | 35 | def __init__(self, var): 36 | self.var = var 37 | 38 | def __call__(self, img): 39 | gs = img.new().resize_as_(img).zero_() 40 | alpha = random.uniform(0, self.var) 41 | return img.lerp(gs, alpha) 42 | 43 | 44 | class Contrast(object): 45 | 46 | def __init__(self, var): 47 | self.var = var 48 | 49 | def __call__(self, img): 50 | gs = Grayscale()(img) 51 | gs.fill_(gs.mean()) 52 | alpha = random.uniform(0, self.var) 53 | return img.lerp(gs, alpha) 54 | 55 | 56 | class RandomOrder(object): 57 | """ Composes several transforms together in random order. 58 | """ 59 | 60 | def __init__(self, transforms): 61 | self.transforms = transforms 62 | 63 | def __call__(self, img): 64 | if self.transforms is None: 65 | return img 66 | order = torch.randperm(len(self.transforms)) 67 | for i in order: 68 | img = self.transforms[i](img) 69 | return img 70 | 71 | 72 | class ColorJitter(RandomOrder): 73 | 74 | def __init__(self, brightness=0.4, contrast=0.4, saturation=0.4): 75 | self.transforms = [] 76 | if brightness != 0: 77 | self.transforms.append(Brightness(brightness)) 78 | if contrast != 0: 79 | self.transforms.append(Contrast(contrast)) 80 | if saturation != 0: 81 | self.transforms.append(Saturation(saturation)) 82 | 83 | 84 | class NormalizeImg: 85 | """Normalize each image or patch by its own mean/std 86 | """ 87 | 88 | def __init__(self, std_epsilon=.0001): 89 | self.std_epsilon = std_epsilon 90 | 91 | def __call__(self, img): 92 | # This should still be a H x W x C Numpy/OpenCv compat image, not a Torch Tensor 93 | assert isinstance(img, np.ndarray) 94 | mean, std = cv2.meanStdDev(img) 95 | mean, std = mean.astype(np.float32), std.astype(np.float32) 96 | img = img.astype(np.float32) 97 | img = (img - np.squeeze(mean)) / (np.squeeze(std) + self.std_epsilon) 98 | return img 99 | 100 | 101 | class ToTensor: 102 | def __call__(self, img): 103 | assert isinstance(img, np.ndarray) 104 | # handle numpy array 105 | img = torch.from_numpy(img.transpose((2, 0, 1))) 106 | if isinstance(img, torch.ByteTensor): 107 | return img.float().div(255) 108 | else: 109 | return img 110 | -------------------------------------------------------------------------------- /scripts/other/README.md: -------------------------------------------------------------------------------- 1 | The scripts in this folder are work by: 2 | * notebook -- https://www.kaggle.com/radustoicescu 3 | * script.py -- https://www.kaggle.com/threeplusone -------------------------------------------------------------------------------- /scripts/other/__notebook__.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "_cell_guid": "7daa5a37-ea6b-2274-4bc1-fadb0a2fc41a" 7 | }, 8 | "source": [ 9 | "### Get dot coordinates using blob_log from skimage library" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "metadata": { 16 | "_cell_guid": "35cabbd8-8e7b-5be7-bd00-61b1addc2d75", 17 | "collapsed": true 18 | }, 19 | "outputs": [], 20 | "source": [ 21 | "import numpy as np\n", 22 | "import pandas as pd\n", 23 | "import os\n", 24 | "import cv2\n", 25 | "import matplotlib.pyplot as plt\n", 26 | "import skimage.feature\n", 27 | "%matplotlib inline" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 2, 33 | "metadata": { 34 | "_cell_guid": "3a6329c7-aa3b-b576-b6e1-d675e6e8fbf2", 35 | "collapsed": false 36 | }, 37 | "outputs": [], 38 | "source": [ 39 | "classes = [\"adult_males\", \"subadult_males\", \"adult_females\", \"juveniles\", \"pups\", \"error\"]\n", 40 | "coords_cols = [\"filename\", \"x\", \"y\", \"category\"]\n", 41 | "\n", 42 | "file_names = os.listdir(\"/data/x/sealion/Train\")\n", 43 | "file_names = sorted(file_names, key=lambda \n", 44 | " item: (int(item.partition('.')[0]) if item[0].isdigit() else float('inf'), item)) \n", 45 | "\n", 46 | "indices = [531, 946, 34, 30, 290, 406, 380, 913, 621, 811, 7, 421, 292, 66, 593, 490, 909, 800, 215, \n", 47 | " 426, 475, 614, 184, 905, 97, 882, 776, 899, 344, 473, 510, 234, 291, 331, 433, 712, 741, 767, 912]\n", 48 | "#indices = [290, 291]\n", 49 | "# select a subset of files to run on\n", 50 | "file_names = [file_names[i] for i in indices]" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 17, 56 | "metadata": { 57 | "_cell_guid": "7755c681-04df-368a-aca2-f099dd9ce805", 58 | "collapsed": false 59 | }, 60 | "outputs": [ 61 | { 62 | "name": "stdout", 63 | "output_type": "stream", 64 | "text": [ 65 | "531.jpg\n" 66 | ] 67 | }, 68 | { 69 | "name": "stdout", 70 | "output_type": "stream", 71 | "text": [ 72 | "163.097349682\nWarning: Bad data for 531.jpg\n946.jpg\n" 73 | ] 74 | }, 75 | { 76 | "name": "stdout", 77 | "output_type": "stream", 78 | "text": [ 79 | "113.417159824\nWarning: Bad data for 946.jpg\n34.jpg\n" 80 | ] 81 | }, 82 | { 83 | "name": "stdout", 84 | "output_type": "stream", 85 | "text": [ 86 | "154.315978785\nWarning: Bad data for 34.jpg\n30.jpg\n" 87 | ] 88 | }, 89 | { 90 | "name": "stdout", 91 | "output_type": "stream", 92 | "text": [ 93 | "31.2134717832\n" 94 | ] 95 | }, 96 | { 97 | "name": "stdout", 98 | "output_type": "stream", 99 | "text": [ 100 | "290.jpg\n" 101 | ] 102 | }, 103 | { 104 | "name": "stdout", 105 | "output_type": "stream", 106 | "text": [ 107 | "173.808650511\nWarning: Bad data for 290.jpg\n406.jpg\n" 108 | ] 109 | }, 110 | { 111 | "name": "stdout", 112 | "output_type": "stream", 113 | "text": [ 114 | "67.2546158589\nWarning: Bad data for 406.jpg\n380.jpg\n" 115 | ] 116 | }, 117 | { 118 | "name": "stdout", 119 | "output_type": "stream", 120 | "text": [ 121 | "33.9865956805\n" 122 | ] 123 | }, 124 | { 125 | "name": "stdout", 126 | "output_type": "stream", 127 | "text": [ 128 | "913.jpg\n" 129 | ] 130 | }, 131 | { 132 | "name": "stdout", 133 | "output_type": "stream", 134 | "text": [ 135 | "25.8726279355\n" 136 | ] 137 | }, 138 | { 139 | "name": "stdout", 140 | "output_type": "stream", 141 | "text": [ 142 | "621.jpg\n" 143 | ] 144 | }, 145 | { 146 | "name": "stdout", 147 | "output_type": "stream", 148 | "text": [ 149 | "141.90937851\nWarning: Bad data for 621.jpg\n811.jpg\n" 150 | ] 151 | }, 152 | { 153 | "name": "stdout", 154 | "output_type": "stream", 155 | "text": [ 156 | "98.1498576208\nWarning: Bad data for 811.jpg\n7.jpg\n" 157 | ] 158 | }, 159 | { 160 | "name": "stdout", 161 | "output_type": "stream", 162 | "text": [ 163 | "102.454059496\nWarning: Bad data for 7.jpg\n421.jpg\n" 164 | ] 165 | }, 166 | { 167 | "name": "stdout", 168 | "output_type": "stream", 169 | "text": [ 170 | "115.55434783\nWarning: Bad data for 421.jpg\n292.jpg\n" 171 | ] 172 | }, 173 | { 174 | "name": "stdout", 175 | "output_type": "stream", 176 | "text": [ 177 | "11.0090252666\n" 178 | ] 179 | }, 180 | { 181 | "name": "stdout", 182 | "output_type": "stream", 183 | "text": [ 184 | "66.jpg\n" 185 | ] 186 | }, 187 | { 188 | "name": "stdout", 189 | "output_type": "stream", 190 | "text": [ 191 | "10.0156795507\n" 192 | ] 193 | }, 194 | { 195 | "name": "stdout", 196 | "output_type": "stream", 197 | "text": [ 198 | "593.jpg\n" 199 | ] 200 | }, 201 | { 202 | "name": "stdout", 203 | "output_type": "stream", 204 | "text": [ 205 | "6.46708742401\n" 206 | ] 207 | }, 208 | { 209 | "name": "stdout", 210 | "output_type": "stream", 211 | "text": [ 212 | "490.jpg\n" 213 | ] 214 | }, 215 | { 216 | "name": "stdout", 217 | "output_type": "stream", 218 | "text": [ 219 | "156.886961145\nWarning: Bad data for 490.jpg\n909.jpg\n" 220 | ] 221 | }, 222 | { 223 | "name": "stdout", 224 | "output_type": "stream", 225 | "text": [ 226 | "93.3790219399\nWarning: Bad data for 909.jpg\n800.jpg\n" 227 | ] 228 | }, 229 | { 230 | "name": "stdout", 231 | "output_type": "stream", 232 | "text": [ 233 | "156.377165604\nWarning: Bad data for 800.jpg\n215.jpg\n" 234 | ] 235 | }, 236 | { 237 | "name": "stdout", 238 | "output_type": "stream", 239 | "text": [ 240 | "120.281212143\nWarning: Bad data for 215.jpg\n426.jpg\n" 241 | ] 242 | }, 243 | { 244 | "name": "stdout", 245 | "output_type": "stream", 246 | "text": [ 247 | "4.59449292657\n" 248 | ] 249 | }, 250 | { 251 | "name": "stdout", 252 | "output_type": "stream", 253 | "text": [ 254 | "475.jpg\n" 255 | ] 256 | }, 257 | { 258 | "name": "stdout", 259 | "output_type": "stream", 260 | "text": [ 261 | "27.8335969079\n" 262 | ] 263 | }, 264 | { 265 | "name": "stdout", 266 | "output_type": "stream", 267 | "text": [ 268 | "614.jpg\n" 269 | ] 270 | }, 271 | { 272 | "name": "stdout", 273 | "output_type": "stream", 274 | "text": [ 275 | "86.4739226162\nWarning: Bad data for 614.jpg\n184.jpg\n" 276 | ] 277 | }, 278 | { 279 | "name": "stdout", 280 | "output_type": "stream", 281 | "text": [ 282 | "92.231638618\nWarning: Bad data for 184.jpg\n905.jpg\n" 283 | ] 284 | }, 285 | { 286 | "name": "stdout", 287 | "output_type": "stream", 288 | "text": [ 289 | "146.463161792\nWarning: Bad data for 905.jpg\n97.jpg\n" 290 | ] 291 | }, 292 | { 293 | "name": "stdout", 294 | "output_type": "stream", 295 | "text": [ 296 | "7.23316007535\n" 297 | ] 298 | }, 299 | { 300 | "name": "stdout", 301 | "output_type": "stream", 302 | "text": [ 303 | "882.jpg\n" 304 | ] 305 | }, 306 | { 307 | "name": "stdout", 308 | "output_type": "stream", 309 | "text": [ 310 | "62.3129591392\nWarning: Bad data for 882.jpg\n776.jpg\n" 311 | ] 312 | }, 313 | { 314 | "name": "stdout", 315 | "output_type": "stream", 316 | "text": [ 317 | "4.32146405759\n" 318 | ] 319 | }, 320 | { 321 | "name": "stdout", 322 | "output_type": "stream", 323 | "text": [ 324 | "899.jpg\n" 325 | ] 326 | }, 327 | { 328 | "name": "stdout", 329 | "output_type": "stream", 330 | "text": [ 331 | "6.21177445166\n" 332 | ] 333 | }, 334 | { 335 | "name": "stdout", 336 | "output_type": "stream", 337 | "text": [ 338 | "344.jpg\n" 339 | ] 340 | }, 341 | { 342 | "name": "stdout", 343 | "output_type": "stream", 344 | "text": [ 345 | "65.4390775954\nWarning: Bad data for 344.jpg\n473.jpg\n" 346 | ] 347 | }, 348 | { 349 | "name": "stdout", 350 | "output_type": "stream", 351 | "text": [ 352 | "13.8186259674\n" 353 | ] 354 | }, 355 | { 356 | "name": "stdout", 357 | "output_type": "stream", 358 | "text": [ 359 | "510.jpg\n" 360 | ] 361 | }, 362 | { 363 | "name": "stdout", 364 | "output_type": "stream", 365 | "text": [ 366 | "8.1044958258\n" 367 | ] 368 | }, 369 | { 370 | "name": "stdout", 371 | "output_type": "stream", 372 | "text": [ 373 | "234.jpg\n" 374 | ] 375 | }, 376 | { 377 | "name": "stdout", 378 | "output_type": "stream", 379 | "text": [ 380 | "123.899784005\nWarning: Bad data for 234.jpg\n291.jpg\n" 381 | ] 382 | }, 383 | { 384 | "name": "stdout", 385 | "output_type": "stream", 386 | "text": [ 387 | "8.37217530004\n" 388 | ] 389 | }, 390 | { 391 | "name": "stdout", 392 | "output_type": "stream", 393 | "text": [ 394 | "331.jpg\n" 395 | ] 396 | }, 397 | { 398 | "name": "stdout", 399 | "output_type": "stream", 400 | "text": [ 401 | "4.38011716182\n" 402 | ] 403 | }, 404 | { 405 | "name": "stdout", 406 | "output_type": "stream", 407 | "text": [ 408 | "433.jpg\n" 409 | ] 410 | }, 411 | { 412 | "name": "stdout", 413 | "output_type": "stream", 414 | "text": [ 415 | "8.13684525821\n" 416 | ] 417 | }, 418 | { 419 | "name": "stdout", 420 | "output_type": "stream", 421 | "text": [ 422 | "712.jpg\n" 423 | ] 424 | }, 425 | { 426 | "name": "stdout", 427 | "output_type": "stream", 428 | "text": [ 429 | "121.709471527\nWarning: Bad data for 712.jpg\n741.jpg\n" 430 | ] 431 | }, 432 | { 433 | "name": "stdout", 434 | "output_type": "stream", 435 | "text": [ 436 | "6.15084790713\n" 437 | ] 438 | }, 439 | { 440 | "name": "stdout", 441 | "output_type": "stream", 442 | "text": [ 443 | "767.jpg\n" 444 | ] 445 | }, 446 | { 447 | "name": "stdout", 448 | "output_type": "stream", 449 | "text": [ 450 | "163.374341254\nWarning: Bad data for 767.jpg\n912.jpg\n" 451 | ] 452 | }, 453 | { 454 | "name": "stdout", 455 | "output_type": "stream", 456 | "text": [ 457 | "5.23696682721\n" 458 | ] 459 | } 460 | ], 461 | "source": [ 462 | "# dataframe to store results in\n", 463 | "count_df = pd.DataFrame(index=file_names, columns=classes).fillna(0)\n", 464 | "coords = []\n", 465 | "\n", 466 | "for filename in file_names:\n", 467 | " print(filename)\n", 468 | " \n", 469 | " # read the Train and Train Dotted images\n", 470 | " image_1 = cv2.imread(\"/data/x/sealion/TrainDotted/\" + filename)\n", 471 | " image_2 = cv2.imread(\"/data/x/sealion/Train/\" + filename)\n", 472 | " \n", 473 | " # absolute difference between Train and Train Dotted\n", 474 | " image_3 = cv2.absdiff(image_1, image_2)\n", 475 | " \n", 476 | " # mask out blackened regions from Train Dotted\n", 477 | " mask_1 = cv2.cvtColor(image_1, cv2.COLOR_BGR2GRAY)\n", 478 | " mask_1[mask_1 < 10] = 0\n", 479 | " mask_1[mask_1 > 0] = 255\n", 480 | " \n", 481 | " mask_2 = cv2.cvtColor(image_2, cv2.COLOR_BGR2GRAY)\n", 482 | " mask_2[mask_2 < 10] = 0\n", 483 | " mask_2[mask_2 > 0] = 255\n", 484 | " \n", 485 | " image_4 = cv2.bitwise_or(image_3, image_3, mask=mask_1)\n", 486 | " \n", 487 | " # Detect bad data. If train and dotted images are very different then somethings wrong.\n", 488 | " avg_diff = image_4.sum() / (image_4.shape[0] * image_4.shape[1])\n", 489 | " print(avg_diff)\n", 490 | " if avg_diff > 60:\n", 491 | " print('Warning: Bad data for %s' % filename)\n", 492 | " continue \n", 493 | " \n", 494 | " image_5 = cv2.bitwise_or(image_4, image_4, mask=mask_2) \n", 495 | " \n", 496 | " # convert to grayscale to be accepted by skimage.feature.blob_log\n", 497 | " image_6 = cv2.cvtColor(image_5, cv2.COLOR_BGR2GRAY)\n", 498 | " \n", 499 | " # detect blobs\n", 500 | " blobs = skimage.feature.blob_log(image_6, min_sigma=3, max_sigma=4, num_sigma=2, threshold=0.02)\n", 501 | " \n", 502 | " # prepare the image to plot the results on\n", 503 | " image_7 = cv2.cvtColor(image_6, cv2.COLOR_GRAY2BGR)\n", 504 | " \n", 505 | " sizes = [list()] * 6\n", 506 | " for blob in blobs:\n", 507 | " # get the coordinates for each blob\n", 508 | " y, x, s = blob\n", 509 | " # get the color of the pixel from Train Dotted in the center of the blob\n", 510 | " b, g, r = image_1[int(y)][int(x)][:]\n", 511 | " \n", 512 | " # decision tree to pick the class of the blob by looking at the color in Train Dotted\n", 513 | " class_idx = -1\n", 514 | " if r > 200 and b < 50 and g < 50: # RED\n", 515 | " class_idx = 0\n", 516 | " cv2.circle(image_7, (int(x), int(y)), 8, (0, 0, 255), 2) \n", 517 | " elif r > 200 and b > 200 and g < 50: # MAGENTA\n", 518 | " class_idx = 1\n", 519 | " cv2.circle(image_7, (int(x), int(y)), 8, (250, 10, 250), 2) \n", 520 | " elif r < 100 and b < 100 and 150 < g < 200: # GREEN\n", 521 | " class_idx = 4\n", 522 | " cv2.circle(image_7, (int(x), int(y)), 8, (20, 180, 35), 2) \n", 523 | " elif r < 100 and 100 < b and g < 100: # BLUE\n", 524 | " class_idx = 3\n", 525 | " cv2.circle(image_7, (int(x), int(y)), 8, (180, 60, 30), 2)\n", 526 | " elif r < 150 and b < 50 and g < 100: # BROWN\n", 527 | " class_idx = 2\n", 528 | " cv2.circle(image_7, (int(x), int(y)), 8, (0, 42, 84), 2) \n", 529 | " else:\n", 530 | " class_idx = 5\n", 531 | " cv2.circle(image_7, (int(x), int(y)), 8, (255, 255, 155), 2)\n", 532 | " \n", 533 | " sizes[class_idx].append(s)\n", 534 | " class_name = classes[class_idx]\n", 535 | " count_df[class_name][filename] += 1\n", 536 | " record = dict(filename=filename, x=x, y=y, category=cls, )\n", 537 | " if -1 < class_idx < 5:\n", 538 | " coords.append(record)\n", 539 | " coords_df = pd.DataFrame.from_records(coords, columns=coords_cols)\n", 540 | " coords_df.x = coords_df.x.astype('int')\n", 541 | " coords_df.y = coords_df.y.astype('int')\n", 542 | " # output the results\n", 543 | " \n", 544 | "# f, ax = plt.subplots(3, 2, figsize=(10,16))\n", 545 | "# (ax1, ax2, ax3, ax4, ax5, ax6) = ax.flatten()\n", 546 | "# plt.title('%s'%filename)\n", 547 | " \n", 548 | "# ax1.imshow(cv2.cvtColor(image_2[:,:,:], cv2.COLOR_BGR2RGB))\n", 549 | "# ax1.set_title('Train')\n", 550 | "# ax2.imshow(cv2.cvtColor(image_1[:,:,:], cv2.COLOR_BGR2RGB))\n", 551 | "# ax2.set_title('Train Dotted')\n", 552 | "# ax3.imshow(cv2.cvtColor(image_3[:,:,:], cv2.COLOR_BGR2RGB))\n", 553 | "# ax3.set_title('Train Dotted - Train')\n", 554 | "# ax4.imshow(cv2.cvtColor(image_5[:,:,:], cv2.COLOR_BGR2RGB))\n", 555 | "# ax4.set_title('Mask blackened areas of Train Dotted')\n", 556 | "# ax5.imshow(image_6[:,:], cmap='gray')\n", 557 | "# ax5.set_title('Grayscale for input to blob_log')\n", 558 | "# ax6.imshow(cv2.cvtColor(image_7[:,:,:], cv2.COLOR_BGR2RGB))\n", 559 | "# ax6.set_title('Result')\n", 560 | "\n", 561 | "# plt.show()" 562 | ] 563 | }, 564 | { 565 | "cell_type": "markdown", 566 | "metadata": { 567 | "_cell_guid": "d03424c1-b12b-ae53-2fed-1dff86398164" 568 | }, 569 | "source": [ 570 | "### Check count results" 571 | ] 572 | }, 573 | { 574 | "cell_type": "code", 575 | "execution_count": 18, 576 | "metadata": { 577 | "_cell_guid": "c9549615-3a64-2ef2-2be0-1946e07c2ee2", 578 | "collapsed": false 579 | }, 580 | "outputs": [ 581 | { 582 | "data": { 583 | "text/html": [ 584 | "
\n", 585 | "\n", 586 | " \n", 587 | " \n", 588 | " \n", 589 | " \n", 590 | " \n", 591 | " \n", 592 | " \n", 593 | " \n", 594 | " \n", 595 | " \n", 596 | " \n", 597 | " \n", 598 | " \n", 599 | " \n", 600 | " \n", 601 | " \n", 602 | " \n", 603 | " \n", 604 | " \n", 605 | " \n", 606 | " \n", 607 | " \n", 608 | " \n", 609 | " \n", 610 | " \n", 611 | " \n", 612 | " \n", 613 | " \n", 614 | " \n", 615 | " \n", 616 | " \n", 617 | " \n", 618 | " \n", 619 | " \n", 620 | " \n", 621 | " \n", 622 | " \n", 623 | " \n", 624 | " \n", 625 | " \n", 626 | " \n", 627 | " \n", 628 | " \n", 629 | " \n", 630 | " \n", 631 | " \n", 632 | " \n", 633 | " \n", 634 | " \n", 635 | " \n", 636 | " \n", 637 | " \n", 638 | " \n", 639 | " \n", 640 | " \n", 641 | " \n", 642 | " \n", 643 | " \n", 644 | " \n", 645 | " \n", 646 | " \n", 647 | " \n", 648 | " \n", 649 | " \n", 650 | " \n", 651 | " \n", 652 | " \n", 653 | " \n", 654 | " \n", 655 | " \n", 656 | " \n", 657 | " \n", 658 | " \n", 659 | " \n", 660 | " \n", 661 | " \n", 662 | " \n", 663 | " \n", 664 | " \n", 665 | " \n", 666 | " \n", 667 | " \n", 668 | " \n", 669 | " \n", 670 | " \n", 671 | " \n", 672 | " \n", 673 | " \n", 674 | " \n", 675 | " \n", 676 | " \n", 677 | " \n", 678 | " \n", 679 | " \n", 680 | " \n", 681 | " \n", 682 | " \n", 683 | " \n", 684 | " \n", 685 | " \n", 686 | " \n", 687 | " \n", 688 | " \n", 689 | " \n", 690 | " \n", 691 | " \n", 692 | " \n", 693 | " \n", 694 | " \n", 695 | " \n", 696 | " \n", 697 | " \n", 698 | " \n", 699 | " \n", 700 | " \n", 701 | " \n", 702 | " \n", 703 | " \n", 704 | " \n", 705 | " \n", 706 | " \n", 707 | " \n", 708 | " \n", 709 | " \n", 710 | " \n", 711 | " \n", 712 | " \n", 713 | " \n", 714 | " \n", 715 | " \n", 716 | " \n", 717 | " \n", 718 | " \n", 719 | " \n", 720 | " \n", 721 | " \n", 722 | " \n", 723 | " \n", 724 | " \n", 725 | " \n", 726 | " \n", 727 | " \n", 728 | " \n", 729 | " \n", 730 | " \n", 731 | " \n", 732 | " \n", 733 | " \n", 734 | " \n", 735 | " \n", 736 | " \n", 737 | " \n", 738 | " \n", 739 | " \n", 740 | " \n", 741 | " \n", 742 | " \n", 743 | " \n", 744 | " \n", 745 | " \n", 746 | " \n", 747 | " \n", 748 | " \n", 749 | " \n", 750 | " \n", 751 | " \n", 752 | " \n", 753 | " \n", 754 | " \n", 755 | " \n", 756 | " \n", 757 | " \n", 758 | " \n", 759 | " \n", 760 | " \n", 761 | " \n", 762 | " \n", 763 | " \n", 764 | " \n", 765 | " \n", 766 | " \n", 767 | " \n", 768 | " \n", 769 | " \n", 770 | " \n", 771 | " \n", 772 | " \n", 773 | " \n", 774 | " \n", 775 | " \n", 776 | " \n", 777 | " \n", 778 | " \n", 779 | " \n", 780 | " \n", 781 | " \n", 782 | " \n", 783 | " \n", 784 | " \n", 785 | " \n", 786 | " \n", 787 | " \n", 788 | " \n", 789 | " \n", 790 | " \n", 791 | " \n", 792 | " \n", 793 | " \n", 794 | " \n", 795 | " \n", 796 | " \n", 797 | " \n", 798 | " \n", 799 | " \n", 800 | " \n", 801 | " \n", 802 | " \n", 803 | " \n", 804 | " \n", 805 | " \n", 806 | " \n", 807 | " \n", 808 | " \n", 809 | " \n", 810 | " \n", 811 | " \n", 812 | " \n", 813 | " \n", 814 | " \n", 815 | " \n", 816 | " \n", 817 | " \n", 818 | " \n", 819 | " \n", 820 | " \n", 821 | " \n", 822 | " \n", 823 | " \n", 824 | " \n", 825 | " \n", 826 | " \n", 827 | " \n", 828 | " \n", 829 | " \n", 830 | " \n", 831 | " \n", 832 | " \n", 833 | " \n", 834 | " \n", 835 | " \n", 836 | " \n", 837 | " \n", 838 | " \n", 839 | " \n", 840 | " \n", 841 | " \n", 842 | " \n", 843 | " \n", 844 | " \n", 845 | " \n", 846 | " \n", 847 | " \n", 848 | " \n", 849 | " \n", 850 | " \n", 851 | " \n", 852 | " \n", 853 | " \n", 854 | " \n", 855 | " \n", 856 | " \n", 857 | " \n", 858 | " \n", 859 | " \n", 860 | " \n", 861 | " \n", 862 | " \n", 863 | " \n", 864 | " \n", 865 | " \n", 866 | " \n", 867 | " \n", 868 | " \n", 869 | " \n", 870 | " \n", 871 | " \n", 872 | " \n", 873 | " \n", 874 | " \n", 875 | " \n", 876 | " \n", 877 | " \n", 878 | " \n", 879 | " \n", 880 | " \n", 881 | " \n", 882 | " \n", 883 | " \n", 884 | " \n", 885 | " \n", 886 | " \n", 887 | " \n", 888 | " \n", 889 | " \n", 890 | " \n", 891 | " \n", 892 | " \n", 893 | " \n", 894 | " \n", 895 | " \n", 896 | " \n", 897 | " \n", 898 | " \n", 899 | " \n", 900 | " \n", 901 | " \n", 902 | " \n", 903 | " \n", 904 | " \n", 905 | " \n", 906 | " \n", 907 | " \n", 908 | " \n", 909 | " \n", 910 | " \n", 911 | " \n", 912 | " \n", 913 | " \n", 914 | " \n", 915 | " \n", 916 | " \n", 917 | " \n", 918 | " \n", 919 | " \n", 920 | " \n", 921 | " \n", 922 | " \n", 923 | " \n", 924 | " \n", 925 | " \n", 926 | " \n", 927 | " \n", 928 | " \n", 929 | " \n", 930 | " \n", 931 | " \n", 932 | " \n", 933 | " \n", 934 | " \n", 935 | " \n", 936 | " \n", 937 | " \n", 938 | " \n", 939 | " \n", 940 | " \n", 941 | " \n", 942 | " \n", 943 | " \n", 944 | " \n", 945 | " \n", 946 | " \n", 947 | " \n", 948 | " \n", 949 | " \n", 950 | "
adult_malessubadult_malesadult_femalesjuvenilespupserror
531.jpg000000
946.jpg000000
34.jpg000000
30.jpg203301
290.jpg000000
406.jpg000000
380.jpg20111460150
913.jpg1010933047
621.jpg000000
811.jpg000000
7.jpg000000
421.jpg000000
292.jpg557043117
66.jpg856117232
593.jpg12325803
490.jpg000000
909.jpg000000
800.jpg000000
215.jpg000000
426.jpg26344251
475.jpg36618505
614.jpg000000
184.jpg000000
905.jpg000000
97.jpg00271100
882.jpg000000
776.jpg82512296
899.jpg2213300
344.jpg000000
473.jpg20256001
510.jpg516000
234.jpg000000
291.jpg43202992423711
331.jpg61867005
433.jpg0394303
712.jpg000000
741.jpg345231381761
767.jpg000000
912.jpg30225145205432
\n", 951 | "
" 952 | ], 953 | "text/plain": [ 954 | "
\n", 955 | "\n", 956 | " \n", 957 | " \n", 958 | " \n", 959 | " \n", 960 | " \n", 961 | " \n", 962 | " \n", 963 | " \n", 964 | " \n", 965 | " \n", 966 | " \n", 967 | " \n", 968 | " \n", 969 | " \n", 970 | " \n", 971 | " \n", 972 | " \n", 973 | " \n", 974 | " \n", 975 | " \n", 976 | " \n", 977 | " \n", 978 | " \n", 979 | " \n", 980 | " \n", 981 | " \n", 982 | " \n", 983 | " \n", 984 | " \n", 985 | " \n", 986 | " \n", 987 | " \n", 988 | " \n", 989 | " \n", 990 | " \n", 991 | " \n", 992 | " \n", 993 | " \n", 994 | " \n", 995 | " \n", 996 | " \n", 997 | " \n", 998 | " \n", 999 | " \n", 1000 | " \n", 1001 | " \n", 1002 | " \n", 1003 | " \n", 1004 | " \n", 1005 | " \n", 1006 | " \n", 1007 | " \n", 1008 | " \n", 1009 | " \n", 1010 | " \n", 1011 | " \n", 1012 | " \n", 1013 | " \n", 1014 | " \n", 1015 | " \n", 1016 | " \n", 1017 | " \n", 1018 | " \n", 1019 | " \n", 1020 | " \n", 1021 | " \n", 1022 | " \n", 1023 | " \n", 1024 | " \n", 1025 | " \n", 1026 | " \n", 1027 | " \n", 1028 | " \n", 1029 | " \n", 1030 | " \n", 1031 | " \n", 1032 | " \n", 1033 | " \n", 1034 | " \n", 1035 | " \n", 1036 | " \n", 1037 | " \n", 1038 | " \n", 1039 | " \n", 1040 | " \n", 1041 | " \n", 1042 | " \n", 1043 | " \n", 1044 | " \n", 1045 | " \n", 1046 | " \n", 1047 | " \n", 1048 | " \n", 1049 | " \n", 1050 | " \n", 1051 | " \n", 1052 | " \n", 1053 | " \n", 1054 | " \n", 1055 | " \n", 1056 | " \n", 1057 | " \n", 1058 | " \n", 1059 | " \n", 1060 | " \n", 1061 | " \n", 1062 | " \n", 1063 | " \n", 1064 | " \n", 1065 | " \n", 1066 | " \n", 1067 | " \n", 1068 | " \n", 1069 | " \n", 1070 | " \n", 1071 | " \n", 1072 | " \n", 1073 | " \n", 1074 | " \n", 1075 | " \n", 1076 | " \n", 1077 | " \n", 1078 | " \n", 1079 | " \n", 1080 | " \n", 1081 | " \n", 1082 | " \n", 1083 | " \n", 1084 | " \n", 1085 | " \n", 1086 | " \n", 1087 | " \n", 1088 | " \n", 1089 | " \n", 1090 | " \n", 1091 | " \n", 1092 | " \n", 1093 | " \n", 1094 | " \n", 1095 | " \n", 1096 | " \n", 1097 | " \n", 1098 | " \n", 1099 | " \n", 1100 | " \n", 1101 | " \n", 1102 | " \n", 1103 | " \n", 1104 | " \n", 1105 | " \n", 1106 | " \n", 1107 | " \n", 1108 | " \n", 1109 | " \n", 1110 | " \n", 1111 | " \n", 1112 | " \n", 1113 | " \n", 1114 | " \n", 1115 | " \n", 1116 | " \n", 1117 | " \n", 1118 | " \n", 1119 | " \n", 1120 | " \n", 1121 | " \n", 1122 | " \n", 1123 | " \n", 1124 | " \n", 1125 | " \n", 1126 | " \n", 1127 | " \n", 1128 | " \n", 1129 | " \n", 1130 | " \n", 1131 | " \n", 1132 | " \n", 1133 | " \n", 1134 | " \n", 1135 | " \n", 1136 | " \n", 1137 | " \n", 1138 | " \n", 1139 | " \n", 1140 | " \n", 1141 | " \n", 1142 | " \n", 1143 | " \n", 1144 | " \n", 1145 | " \n", 1146 | " \n", 1147 | " \n", 1148 | " \n", 1149 | " \n", 1150 | " \n", 1151 | " \n", 1152 | " \n", 1153 | " \n", 1154 | " \n", 1155 | " \n", 1156 | " \n", 1157 | " \n", 1158 | " \n", 1159 | " \n", 1160 | " \n", 1161 | " \n", 1162 | " \n", 1163 | " \n", 1164 | " \n", 1165 | " \n", 1166 | " \n", 1167 | " \n", 1168 | " \n", 1169 | " \n", 1170 | " \n", 1171 | " \n", 1172 | " \n", 1173 | " \n", 1174 | " \n", 1175 | " \n", 1176 | " \n", 1177 | " \n", 1178 | " \n", 1179 | " \n", 1180 | " \n", 1181 | " \n", 1182 | " \n", 1183 | " \n", 1184 | " \n", 1185 | " \n", 1186 | " \n", 1187 | " \n", 1188 | " \n", 1189 | " \n", 1190 | " \n", 1191 | " \n", 1192 | " \n", 1193 | " \n", 1194 | " \n", 1195 | " \n", 1196 | " \n", 1197 | " \n", 1198 | " \n", 1199 | " \n", 1200 | " \n", 1201 | " \n", 1202 | " \n", 1203 | " \n", 1204 | " \n", 1205 | " \n", 1206 | " \n", 1207 | " \n", 1208 | " \n", 1209 | " \n", 1210 | " \n", 1211 | " \n", 1212 | " \n", 1213 | " \n", 1214 | " \n", 1215 | " \n", 1216 | " \n", 1217 | " \n", 1218 | " \n", 1219 | " \n", 1220 | " \n", 1221 | " \n", 1222 | " \n", 1223 | " \n", 1224 | " \n", 1225 | " \n", 1226 | " \n", 1227 | " \n", 1228 | " \n", 1229 | " \n", 1230 | " \n", 1231 | " \n", 1232 | " \n", 1233 | " \n", 1234 | " \n", 1235 | " \n", 1236 | " \n", 1237 | " \n", 1238 | " \n", 1239 | " \n", 1240 | " \n", 1241 | " \n", 1242 | " \n", 1243 | " \n", 1244 | " \n", 1245 | " \n", 1246 | " \n", 1247 | " \n", 1248 | " \n", 1249 | " \n", 1250 | " \n", 1251 | " \n", 1252 | " \n", 1253 | " \n", 1254 | " \n", 1255 | " \n", 1256 | " \n", 1257 | " \n", 1258 | " \n", 1259 | " \n", 1260 | " \n", 1261 | " \n", 1262 | " \n", 1263 | " \n", 1264 | " \n", 1265 | " \n", 1266 | " \n", 1267 | " \n", 1268 | " \n", 1269 | " \n", 1270 | " \n", 1271 | " \n", 1272 | " \n", 1273 | " \n", 1274 | " \n", 1275 | " \n", 1276 | " \n", 1277 | " \n", 1278 | " \n", 1279 | " \n", 1280 | " \n", 1281 | " \n", 1282 | " \n", 1283 | " \n", 1284 | " \n", 1285 | " \n", 1286 | " \n", 1287 | " \n", 1288 | " \n", 1289 | " \n", 1290 | " \n", 1291 | " \n", 1292 | " \n", 1293 | " \n", 1294 | " \n", 1295 | " \n", 1296 | " \n", 1297 | " \n", 1298 | " \n", 1299 | " \n", 1300 | " \n", 1301 | " \n", 1302 | " \n", 1303 | " \n", 1304 | " \n", 1305 | " \n", 1306 | " \n", 1307 | " \n", 1308 | " \n", 1309 | " \n", 1310 | " \n", 1311 | " \n", 1312 | " \n", 1313 | " \n", 1314 | " \n", 1315 | " \n", 1316 | " \n", 1317 | " \n", 1318 | " \n", 1319 | " \n", 1320 | "
adult_malessubadult_malesadult_femalesjuvenilespupserror
531.jpg000000
946.jpg000000
34.jpg000000
30.jpg203301
290.jpg000000
406.jpg000000
380.jpg20111460150
913.jpg1010933047
621.jpg000000
811.jpg000000
7.jpg000000
421.jpg000000
292.jpg557043117
66.jpg856117232
593.jpg12325803
490.jpg000000
909.jpg000000
800.jpg000000
215.jpg000000
426.jpg26344251
475.jpg36618505
614.jpg000000
184.jpg000000
905.jpg000000
97.jpg00271100
882.jpg000000
776.jpg82512296
899.jpg2213300
344.jpg000000
473.jpg20256001
510.jpg516000
234.jpg000000
291.jpg43202992423711
331.jpg61867005
433.jpg0394303
712.jpg000000
741.jpg345231381761
767.jpg000000
912.jpg30225145205432
\n", 1321 | "
" 1322 | ] 1323 | }, 1324 | "execution_count": 18, 1325 | "metadata": {}, 1326 | "output_type": "execute_result" 1327 | } 1328 | ], 1329 | "source": [ 1330 | "count_df" 1331 | ] 1332 | }, 1333 | { 1334 | "cell_type": "markdown", 1335 | "metadata": { 1336 | "_cell_guid": "36b150cc-ffbb-ebb5-049f-58c48e5bde00" 1337 | }, 1338 | "source": [ 1339 | "### Reference counts" 1340 | ] 1341 | }, 1342 | { 1343 | "cell_type": "code", 1344 | "execution_count": 16, 1345 | "metadata": { 1346 | "_cell_guid": "cf4ecf01-de99-b59e-d1d6-067e8f4478fe", 1347 | "collapsed": false 1348 | }, 1349 | "outputs": [ 1350 | { 1351 | "data": { 1352 | "text/html": [ 1353 | "
\n", 1354 | "\n", 1355 | " \n", 1356 | " \n", 1357 | " \n", 1358 | " \n", 1359 | " \n", 1360 | " \n", 1361 | " \n", 1362 | " \n", 1363 | " \n", 1364 | " \n", 1365 | " \n", 1366 | " \n", 1367 | " \n", 1368 | " \n", 1369 | " \n", 1370 | " \n", 1371 | " \n", 1372 | " \n", 1373 | " \n", 1374 | " \n", 1375 | " \n", 1376 | " \n", 1377 | " \n", 1378 | " \n", 1379 | " \n", 1380 | " \n", 1381 | " \n", 1382 | " \n", 1383 | " \n", 1384 | " \n", 1385 | " \n", 1386 | " \n", 1387 | " \n", 1388 | " \n", 1389 | " \n", 1390 | " \n", 1391 | " \n", 1392 | " \n", 1393 | " \n", 1394 | " \n", 1395 | " \n", 1396 | " \n", 1397 | " \n", 1398 | " \n", 1399 | " \n", 1400 | " \n", 1401 | " \n", 1402 | " \n", 1403 | " \n", 1404 | " \n", 1405 | " \n", 1406 | " \n", 1407 | " \n", 1408 | " \n", 1409 | " \n", 1410 | " \n", 1411 | " \n", 1412 | " \n", 1413 | " \n", 1414 | " \n", 1415 | " \n", 1416 | " \n", 1417 | " \n", 1418 | " \n", 1419 | " \n", 1420 | " \n", 1421 | " \n", 1422 | " \n", 1423 | " \n", 1424 | " \n", 1425 | " \n", 1426 | " \n", 1427 | " \n", 1428 | " \n", 1429 | " \n", 1430 | " \n", 1431 | " \n", 1432 | " \n", 1433 | " \n", 1434 | " \n", 1435 | " \n", 1436 | " \n", 1437 | " \n", 1438 | " \n", 1439 | " \n", 1440 | " \n", 1441 | " \n", 1442 | " \n", 1443 | " \n", 1444 | " \n", 1445 | " \n", 1446 | " \n", 1447 | " \n", 1448 | " \n", 1449 | " \n", 1450 | " \n", 1451 | " \n", 1452 | " \n", 1453 | " \n", 1454 | " \n", 1455 | " \n", 1456 | " \n", 1457 | " \n", 1458 | " \n", 1459 | " \n", 1460 | " \n", 1461 | " \n", 1462 | " \n", 1463 | " \n", 1464 | " \n", 1465 | " \n", 1466 | " \n", 1467 | " \n", 1468 | " \n", 1469 | " \n", 1470 | " \n", 1471 | " \n", 1472 | " \n", 1473 | " \n", 1474 | " \n", 1475 | " \n", 1476 | " \n", 1477 | " \n", 1478 | " \n", 1479 | " \n", 1480 | " \n", 1481 | " \n", 1482 | " \n", 1483 | " \n", 1484 | " \n", 1485 | " \n", 1486 | " \n", 1487 | " \n", 1488 | " \n", 1489 | " \n", 1490 | " \n", 1491 | " \n", 1492 | " \n", 1493 | " \n", 1494 | " \n", 1495 | " \n", 1496 | " \n", 1497 | " \n", 1498 | " \n", 1499 | " \n", 1500 | " \n", 1501 | " \n", 1502 | " \n", 1503 | " \n", 1504 | " \n", 1505 | " \n", 1506 | " \n", 1507 | " \n", 1508 | " \n", 1509 | " \n", 1510 | " \n", 1511 | " \n", 1512 | " \n", 1513 | " \n", 1514 | " \n", 1515 | " \n", 1516 | " \n", 1517 | " \n", 1518 | " \n", 1519 | " \n", 1520 | " \n", 1521 | " \n", 1522 | " \n", 1523 | " \n", 1524 | " \n", 1525 | " \n", 1526 | " \n", 1527 | " \n", 1528 | " \n", 1529 | " \n", 1530 | " \n", 1531 | " \n", 1532 | " \n", 1533 | " \n", 1534 | " \n", 1535 | " \n", 1536 | " \n", 1537 | " \n", 1538 | " \n", 1539 | " \n", 1540 | " \n", 1541 | " \n", 1542 | " \n", 1543 | " \n", 1544 | " \n", 1545 | " \n", 1546 | " \n", 1547 | " \n", 1548 | " \n", 1549 | " \n", 1550 | " \n", 1551 | " \n", 1552 | " \n", 1553 | " \n", 1554 | " \n", 1555 | " \n", 1556 | " \n", 1557 | " \n", 1558 | " \n", 1559 | " \n", 1560 | " \n", 1561 | " \n", 1562 | " \n", 1563 | " \n", 1564 | " \n", 1565 | " \n", 1566 | " \n", 1567 | " \n", 1568 | " \n", 1569 | " \n", 1570 | " \n", 1571 | " \n", 1572 | " \n", 1573 | " \n", 1574 | " \n", 1575 | " \n", 1576 | " \n", 1577 | " \n", 1578 | " \n", 1579 | " \n", 1580 | " \n", 1581 | " \n", 1582 | " \n", 1583 | " \n", 1584 | " \n", 1585 | " \n", 1586 | " \n", 1587 | " \n", 1588 | " \n", 1589 | " \n", 1590 | " \n", 1591 | " \n", 1592 | " \n", 1593 | " \n", 1594 | " \n", 1595 | " \n", 1596 | " \n", 1597 | " \n", 1598 | " \n", 1599 | " \n", 1600 | " \n", 1601 | " \n", 1602 | " \n", 1603 | " \n", 1604 | " \n", 1605 | " \n", 1606 | " \n", 1607 | " \n", 1608 | " \n", 1609 | " \n", 1610 | " \n", 1611 | " \n", 1612 | " \n", 1613 | " \n", 1614 | " \n", 1615 | " \n", 1616 | " \n", 1617 | " \n", 1618 | " \n", 1619 | " \n", 1620 | " \n", 1621 | " \n", 1622 | " \n", 1623 | " \n", 1624 | " \n", 1625 | " \n", 1626 | " \n", 1627 | " \n", 1628 | " \n", 1629 | " \n", 1630 | " \n", 1631 | " \n", 1632 | " \n", 1633 | " \n", 1634 | " \n", 1635 | " \n", 1636 | " \n", 1637 | " \n", 1638 | " \n", 1639 | " \n", 1640 | " \n", 1641 | " \n", 1642 | " \n", 1643 | " \n", 1644 | " \n", 1645 | " \n", 1646 | " \n", 1647 | " \n", 1648 | " \n", 1649 | " \n", 1650 | " \n", 1651 | " \n", 1652 | " \n", 1653 | " \n", 1654 | " \n", 1655 | " \n", 1656 | " \n", 1657 | " \n", 1658 | " \n", 1659 | " \n", 1660 | " \n", 1661 | " \n", 1662 | " \n", 1663 | " \n", 1664 | " \n", 1665 | " \n", 1666 | " \n", 1667 | " \n", 1668 | " \n", 1669 | " \n", 1670 | " \n", 1671 | " \n", 1672 | " \n", 1673 | " \n", 1674 | " \n", 1675 | " \n", 1676 | " \n", 1677 | " \n", 1678 | " \n", 1679 | " \n", 1680 | " \n", 1681 | " \n", 1682 | " \n", 1683 | " \n", 1684 | " \n", 1685 | " \n", 1686 | " \n", 1687 | " \n", 1688 | " \n", 1689 | " \n", 1690 | " \n", 1691 | " \n", 1692 | " \n", 1693 | " \n", 1694 | " \n", 1695 | " \n", 1696 | " \n", 1697 | " \n", 1698 | " \n", 1699 | " \n", 1700 | " \n", 1701 | " \n", 1702 | " \n", 1703 | " \n", 1704 | " \n", 1705 | " \n", 1706 | " \n", 1707 | " \n", 1708 | " \n", 1709 | " \n", 1710 | " \n", 1711 | " \n", 1712 | " \n", 1713 | " \n", 1714 | " \n", 1715 | " \n", 1716 | " \n", 1717 | " \n", 1718 | " \n", 1719 | "
train_idadult_malessubadult_malesadult_femalesjuvenilespups
53153109560
9469463134330
3434411272370
303020130
29029020000
4064061012141473
3803802083310
9139131020330
6216211512533630
811811751200
776233163
42142113000
2922922323151
666600000
593593123250
490490651440
909909543090
80080011000
21521504000
426426256325
4754753661850
614614105100
1841846161970
90590532600
97970019110
882882246160
7767763225229
89989903020
3443442018100
4734732019570
51051021000
2342340246250
291291392027523237
3313316181000
433433035410
7127123052640
74174132522838176
7677676753140
91291230224413205
\n", 1720 | "
" 1721 | ], 1722 | "text/plain": [ 1723 | "
\n", 1724 | "\n", 1725 | " \n", 1726 | " \n", 1727 | " \n", 1728 | " \n", 1729 | " \n", 1730 | " \n", 1731 | " \n", 1732 | " \n", 1733 | " \n", 1734 | " \n", 1735 | " \n", 1736 | " \n", 1737 | " \n", 1738 | " \n", 1739 | " \n", 1740 | " \n", 1741 | " \n", 1742 | " \n", 1743 | " \n", 1744 | " \n", 1745 | " \n", 1746 | " \n", 1747 | " \n", 1748 | " \n", 1749 | " \n", 1750 | " \n", 1751 | " \n", 1752 | " \n", 1753 | " \n", 1754 | " \n", 1755 | " \n", 1756 | " \n", 1757 | " \n", 1758 | " \n", 1759 | " \n", 1760 | " \n", 1761 | " \n", 1762 | " \n", 1763 | " \n", 1764 | " \n", 1765 | " \n", 1766 | " \n", 1767 | " \n", 1768 | " \n", 1769 | " \n", 1770 | " \n", 1771 | " \n", 1772 | " \n", 1773 | " \n", 1774 | " \n", 1775 | " \n", 1776 | " \n", 1777 | " \n", 1778 | " \n", 1779 | " \n", 1780 | " \n", 1781 | " \n", 1782 | " \n", 1783 | " \n", 1784 | " \n", 1785 | " \n", 1786 | " \n", 1787 | " \n", 1788 | " \n", 1789 | " \n", 1790 | " \n", 1791 | " \n", 1792 | " \n", 1793 | " \n", 1794 | " \n", 1795 | " \n", 1796 | " \n", 1797 | " \n", 1798 | " \n", 1799 | " \n", 1800 | " \n", 1801 | " \n", 1802 | " \n", 1803 | " \n", 1804 | " \n", 1805 | " \n", 1806 | " \n", 1807 | " \n", 1808 | " \n", 1809 | " \n", 1810 | " \n", 1811 | " \n", 1812 | " \n", 1813 | " \n", 1814 | " \n", 1815 | " \n", 1816 | " \n", 1817 | " \n", 1818 | " \n", 1819 | " \n", 1820 | " \n", 1821 | " \n", 1822 | " \n", 1823 | " \n", 1824 | " \n", 1825 | " \n", 1826 | " \n", 1827 | " \n", 1828 | " \n", 1829 | " \n", 1830 | " \n", 1831 | " \n", 1832 | " \n", 1833 | " \n", 1834 | " \n", 1835 | " \n", 1836 | " \n", 1837 | " \n", 1838 | " \n", 1839 | " \n", 1840 | " \n", 1841 | " \n", 1842 | " \n", 1843 | " \n", 1844 | " \n", 1845 | " \n", 1846 | " \n", 1847 | " \n", 1848 | " \n", 1849 | " \n", 1850 | " \n", 1851 | " \n", 1852 | " \n", 1853 | " \n", 1854 | " \n", 1855 | " \n", 1856 | " \n", 1857 | " \n", 1858 | " \n", 1859 | " \n", 1860 | " \n", 1861 | " \n", 1862 | " \n", 1863 | " \n", 1864 | " \n", 1865 | " \n", 1866 | " \n", 1867 | " \n", 1868 | " \n", 1869 | " \n", 1870 | " \n", 1871 | " \n", 1872 | " \n", 1873 | " \n", 1874 | " \n", 1875 | " \n", 1876 | " \n", 1877 | " \n", 1878 | " \n", 1879 | " \n", 1880 | " \n", 1881 | " \n", 1882 | " \n", 1883 | " \n", 1884 | " \n", 1885 | " \n", 1886 | " \n", 1887 | " \n", 1888 | " \n", 1889 | " \n", 1890 | " \n", 1891 | " \n", 1892 | " \n", 1893 | " \n", 1894 | " \n", 1895 | " \n", 1896 | " \n", 1897 | " \n", 1898 | " \n", 1899 | " \n", 1900 | " \n", 1901 | " \n", 1902 | " \n", 1903 | " \n", 1904 | " \n", 1905 | " \n", 1906 | " \n", 1907 | " \n", 1908 | " \n", 1909 | " \n", 1910 | " \n", 1911 | " \n", 1912 | " \n", 1913 | " \n", 1914 | " \n", 1915 | " \n", 1916 | " \n", 1917 | " \n", 1918 | " \n", 1919 | " \n", 1920 | " \n", 1921 | " \n", 1922 | " \n", 1923 | " \n", 1924 | " \n", 1925 | " \n", 1926 | " \n", 1927 | " \n", 1928 | " \n", 1929 | " \n", 1930 | " \n", 1931 | " \n", 1932 | " \n", 1933 | " \n", 1934 | " \n", 1935 | " \n", 1936 | " \n", 1937 | " \n", 1938 | " \n", 1939 | " \n", 1940 | " \n", 1941 | " \n", 1942 | " \n", 1943 | " \n", 1944 | " \n", 1945 | " \n", 1946 | " \n", 1947 | " \n", 1948 | " \n", 1949 | " \n", 1950 | " \n", 1951 | " \n", 1952 | " \n", 1953 | " \n", 1954 | " \n", 1955 | " \n", 1956 | " \n", 1957 | " \n", 1958 | " \n", 1959 | " \n", 1960 | " \n", 1961 | " \n", 1962 | " \n", 1963 | " \n", 1964 | " \n", 1965 | " \n", 1966 | " \n", 1967 | " \n", 1968 | " \n", 1969 | " \n", 1970 | " \n", 1971 | " \n", 1972 | " \n", 1973 | " \n", 1974 | " \n", 1975 | " \n", 1976 | " \n", 1977 | " \n", 1978 | " \n", 1979 | " \n", 1980 | " \n", 1981 | " \n", 1982 | " \n", 1983 | " \n", 1984 | " \n", 1985 | " \n", 1986 | " \n", 1987 | " \n", 1988 | " \n", 1989 | " \n", 1990 | " \n", 1991 | " \n", 1992 | " \n", 1993 | " \n", 1994 | " \n", 1995 | " \n", 1996 | " \n", 1997 | " \n", 1998 | " \n", 1999 | " \n", 2000 | " \n", 2001 | " \n", 2002 | " \n", 2003 | " \n", 2004 | " \n", 2005 | " \n", 2006 | " \n", 2007 | " \n", 2008 | " \n", 2009 | " \n", 2010 | " \n", 2011 | " \n", 2012 | " \n", 2013 | " \n", 2014 | " \n", 2015 | " \n", 2016 | " \n", 2017 | " \n", 2018 | " \n", 2019 | " \n", 2020 | " \n", 2021 | " \n", 2022 | " \n", 2023 | " \n", 2024 | " \n", 2025 | " \n", 2026 | " \n", 2027 | " \n", 2028 | " \n", 2029 | " \n", 2030 | " \n", 2031 | " \n", 2032 | " \n", 2033 | " \n", 2034 | " \n", 2035 | " \n", 2036 | " \n", 2037 | " \n", 2038 | " \n", 2039 | " \n", 2040 | " \n", 2041 | " \n", 2042 | " \n", 2043 | " \n", 2044 | " \n", 2045 | " \n", 2046 | " \n", 2047 | " \n", 2048 | " \n", 2049 | " \n", 2050 | " \n", 2051 | " \n", 2052 | " \n", 2053 | " \n", 2054 | " \n", 2055 | " \n", 2056 | " \n", 2057 | " \n", 2058 | " \n", 2059 | " \n", 2060 | " \n", 2061 | " \n", 2062 | " \n", 2063 | " \n", 2064 | " \n", 2065 | " \n", 2066 | " \n", 2067 | " \n", 2068 | " \n", 2069 | " \n", 2070 | " \n", 2071 | " \n", 2072 | " \n", 2073 | " \n", 2074 | " \n", 2075 | " \n", 2076 | " \n", 2077 | " \n", 2078 | " \n", 2079 | " \n", 2080 | " \n", 2081 | " \n", 2082 | " \n", 2083 | " \n", 2084 | " \n", 2085 | " \n", 2086 | " \n", 2087 | " \n", 2088 | " \n", 2089 | "
train_idadult_malessubadult_malesadult_femalesjuvenilespups
53153109560
9469463134330
3434411272370
303020130
29029020000
4064061012141473
3803802083310
9139131020330
6216211512533630
811811751200
776233163
42142113000
2922922323151
666600000
593593123250
490490651440
909909543090
80080011000
21521504000
426426256325
4754753661850
614614105100
1841846161970
90590532600
97970019110
882882246160
7767763225229
89989903020
3443442018100
4734732019570
51051021000
2342340246250
291291392027523237
3313316181000
433433035410
7127123052640
74174132522838176
7677676753140
91291230224413205
\n", 2090 | "
" 2091 | ] 2092 | }, 2093 | "execution_count": 16, 2094 | "metadata": {}, 2095 | "output_type": "execute_result" 2096 | } 2097 | ], 2098 | "source": [ 2099 | "reference = pd.read_csv('/data/x/sealion/Train/train.csv')\n", 2100 | "reference.ix[indices]" 2101 | ] 2102 | }, 2103 | { 2104 | "cell_type": "code", 2105 | "execution_count": 16, 2106 | "metadata": { 2107 | "collapsed": false 2108 | }, 2109 | "outputs": [], 2110 | "source": [ 2111 | "coords_df.to_csv('coords_notebook-bad2.csv', index=False)\n", 2112 | "count_df.to_csv('counts-bad2.csv', index=True)" 2113 | ] 2114 | }, 2115 | { 2116 | "cell_type": "code", 2117 | "execution_count": null, 2118 | "metadata": { 2119 | "collapsed": true 2120 | }, 2121 | "outputs": [], 2122 | "source": [ 2123 | "" 2124 | ] 2125 | }, 2126 | { 2127 | "cell_type": "code", 2128 | "execution_count": null, 2129 | "metadata": { 2130 | "collapsed": true 2131 | }, 2132 | "outputs": [], 2133 | "source": [ 2134 | "" 2135 | ] 2136 | } 2137 | ], 2138 | "metadata": { 2139 | "_change_revision": 2.0, 2140 | "_is_fork": false, 2141 | "kernelspec": { 2142 | "display_name": "Python 3", 2143 | "language": "python", 2144 | "name": "python3" 2145 | }, 2146 | "language_info": { 2147 | "codemirror_mode": { 2148 | "name": "ipython", 2149 | "version": 3.0 2150 | }, 2151 | "file_extension": ".py", 2152 | "mimetype": "text/x-python", 2153 | "name": "python", 2154 | "nbconvert_exporter": "python", 2155 | "pygments_lexer": "ipython3", 2156 | "version": "3.5.1+" 2157 | } 2158 | }, 2159 | "nbformat": 4, 2160 | "nbformat_minor": 0 2161 | } -------------------------------------------------------------------------------- /scripts/other/script.py: -------------------------------------------------------------------------------- 1 | """Sea Lion Prognostication Engine 2 | 3 | https://www.kaggle.com/c/noaa-fisheries-steller-sea-lion-population-count 4 | """ 5 | 6 | import sys 7 | import os 8 | from collections import namedtuple 9 | import operator 10 | import glob 11 | import csv 12 | from math import sqrt 13 | 14 | import numpy as np 15 | 16 | import PIL 17 | from PIL import Image, ImageDraw, ImageFilter 18 | 19 | import skimage 20 | import skimage.io 21 | import skimage.measure 22 | 23 | import shapely 24 | import shapely.geometry 25 | from shapely.geometry import Polygon 26 | 27 | # Notes 28 | # cls -- sea lion class 29 | # tid -- train, train dotted, or test image id 30 | # _nb -- short for number 31 | # x, y -- don't forget image arrays organized row, col, channels 32 | # 33 | # With contributions from @bitsofbits ... 34 | # 35 | 36 | 37 | # ================ Meta ==================== 38 | __description__ = 'Sea Lion Prognostication Engine' 39 | __version__ = '0.1.0' 40 | __license__ = 'MIT' 41 | __author__ = 'Gavin Crooks (@threeplusone)' 42 | __status__ = "Prototype" 43 | __copyright__ = "Copyright 2017" 44 | 45 | # python -c 'import sealiondata; sealiondata.package_versions()' 46 | def package_versions(): 47 | print('sealionengine \t', __version__) 48 | print('python \t', sys.version[0:5]) 49 | print('numpy \t', np.__version__) 50 | print('skimage \t', skimage.__version__) 51 | print('pillow (PIL) \t', PIL.__version__) 52 | print('shapely \t', shapely.__version__) 53 | 54 | 55 | SOURCEDIR = '/data/x/sealion/' 56 | 57 | DATADIR = '.' 58 | 59 | VERBOSITY = namedtuple('VERBOSITY', ['QUITE', 'NORMAL', 'VERBOSE', 'DEBUG'])(0,1,2,3) 60 | 61 | 62 | SeaLionCoord = namedtuple('SeaLionCoord', ['tid', 'cls', 'x', 'y']) 63 | Stats = namedtuple('Stats', [ 64 | 'tid', 65 | 'true_adult_males', 'true_subadult_males', 'true_adult_females', 'true_juveniles', 'true_pups', 'true_total', 66 | 'count_adult_males', 'count_subadult_males', 'count_adult_females', 'count_juveniles', 'count_pups', 'count_total', 67 | 'diff_adult_males', 'diff_subadult_males', 'diff_adult_females', 'diff_juveniles', 'diff_pups', 'diff_total', 68 | ]) 69 | 70 | 71 | class SeaLionData(object): 72 | 73 | def __init__(self, sourcedir=SOURCEDIR, datadir=DATADIR, verbosity=VERBOSITY.NORMAL): 74 | self.sourcedir = sourcedir 75 | self.datadir = datadir 76 | self.verbosity = verbosity 77 | 78 | self.cls_nb = 5 79 | 80 | self.cls_names = ( 81 | 'adult_males', 82 | 'subadult_males', 83 | 'adult_females', 84 | 'juveniles', 85 | 'pups', 86 | 'NOT_A_SEA_LION') 87 | 88 | self.cls = namedtuple('ClassIndex', self.cls_names)(*range(0,6)) 89 | 90 | # backported from @bitsofbits. Average actual color of dot centers. 91 | self.cls_colors = ( 92 | (243, 8, 5), # red 93 | (244, 8, 242), # magenta 94 | (87, 46, 10), # brown 95 | (25, 56, 176), # blue 96 | (38, 174, 21), # green 97 | ) 98 | 99 | 100 | self.dot_radius = 3 101 | 102 | self.train_nb = 947 103 | 104 | self.test_nb = 18636 105 | 106 | self.paths = { 107 | # Source paths 108 | 'sample' : os.path.join(sourcedir, 'sample_submission.csv'), 109 | 'counts' : os.path.join(sourcedir, 'Train', 'train.csv'), 110 | 'train' : os.path.join(sourcedir, 'Train', '{tid}.jpg'), 111 | 'dotted' : os.path.join(sourcedir, 'TrainDotted', '{tid}.jpg'), 112 | 'test' : os.path.join(sourcedir, 'Test', '{tid}.jpg'), 113 | # Data paths 114 | 'coords' : os.path.join(datadir, 'coords.csv'), 115 | 'stats': os.path.join(datadir, 'stats.csv'), 116 | } 117 | 118 | # From MismatchedTrainImages.txt 119 | self.bad_train_ids = () 120 | # 3, 7, 9, 21, 30, 34, 71, 81, 89, 97, 151, 184, 215, 234, 242, 121 | # 268, 290, 311, 331, 344, 380, 384, 406, 421, 469, 475, 490, 499, 122 | # 507, 530, 531, 605, 607, 614, 621, 638, 644, 687, 712, 721, 767, 123 | # 779, 781, 794, 800, 811, 839, 840, 869, 882, 901, 903, 905, 909, 124 | # 913, 927, 946) 125 | 126 | self._counts = None 127 | 128 | 129 | @property 130 | def trainshort_ids(self): 131 | return (0,1,2,4,5,6,8,10) # Trainshort1 132 | #return range(41,51) # Trainshort2 133 | 134 | @property 135 | def train_ids(self): 136 | """List of all valid train ids""" 137 | tids = range(0, self.train_nb) 138 | tids = list(set(tids) - set(self.bad_train_ids)) # Remove bad ids 139 | tids.sort() 140 | return tids 141 | 142 | @property 143 | def test_ids(self): 144 | return range(0, self.test_nb) 145 | 146 | def path(self, name, **kwargs): 147 | """Return path to various source files""" 148 | path = self.paths[name].format(**kwargs) 149 | return path 150 | 151 | @property 152 | def counts(self) : 153 | """A map from train_id to list of sea lion class counts""" 154 | if self._counts is None : 155 | counts = {} 156 | fn = self.path('counts') 157 | with open(fn) as f: 158 | f.readline() 159 | for line in f: 160 | tid_counts = list(map(int, line.split(','))) 161 | counts[tid_counts[0]] = tid_counts[1:] 162 | self._counts = counts 163 | return self._counts 164 | 165 | def rmse(self, tid_counts) : 166 | true_counts = self.counts 167 | 168 | error = np.zeros(shape=[5] ) 169 | 170 | for tid in tid_counts: 171 | true_counts = self.counts[tid] 172 | obs_counts = tid_counts[tid] 173 | diff = np.asarray(true_counts) - np.asarray(obs_counts) 174 | error += diff*diff 175 | #print(error) 176 | error /= len(tid_counts) 177 | rmse = np.sqrt(error).sum() / 5 178 | return rmse 179 | 180 | def load_train_image(self, train_id, border=0, mask=False): 181 | """Return image as numpy array 182 | 183 | border -- add a black border of this width around image 184 | mask -- If true mask out masked areas from corresponding dotted image 185 | """ 186 | img = self._load_image('train', train_id, border) 187 | if mask : 188 | # The masked areas are not uniformly black, presumable due to 189 | # jpeg compression artifacts 190 | dot_img = self._load_image('dotted', train_id, border).astype(np.uint16).sum(axis=-1) 191 | img = np.copy(img) 192 | img[dot_img < 32] = 0 193 | return img 194 | 195 | def load_dotted_image(self, train_id, border=0): 196 | return self._load_image('dotted', train_id, border) 197 | 198 | def load_test_image(self, test_id, border=0): 199 | return self._load_image('test', test_id, border) 200 | 201 | def _load_image(self, itype, tid, border=0) : 202 | fn = self.path(itype, tid=tid) 203 | img = np.asarray(Image.open(fn)) 204 | if border: 205 | height, width, channels = img.shape 206 | bimg = np.zeros(shape=(height+border*2, width+border*2, channels), dtype=np.uint8) 207 | bimg[border:-border, border:-border, :] = img 208 | img = bimg 209 | return img 210 | 211 | def coords(self, train_id): 212 | """Extract coordinates of dotted sealions and return list of SeaLionCoord objects)""" 213 | 214 | # Empirical constants 215 | MIN_DIFFERENCE = 16 216 | MIN_AREA = 9 217 | MAX_AREA = 100 218 | MAX_AVG_DIFF = 50 219 | MAX_COLOR_DIFF = 32 220 | 221 | src_img = np.asarray(self.load_train_image(train_id, mask=True), dtype=np.float) 222 | dot_img = np.asarray(self.load_dotted_image(train_id), dtype=np.float) 223 | 224 | img_diff = np.abs(src_img-dot_img) 225 | 226 | # Detect bad data. If train and dotted images are very different then somethings wrong. 227 | avg_diff = img_diff.sum() / (img_diff.shape[0] * img_diff.shape[1]) 228 | if avg_diff > MAX_AVG_DIFF: 229 | print('Warning: Bad data for %d' % train_id) 230 | 231 | img_diff = np.max(img_diff, axis=-1) 232 | 233 | img_diff[img_diff < MIN_DIFFERENCE] = 0 234 | img_diff[img_diff >= MIN_DIFFERENCE] = 255 235 | 236 | sealions = [] 237 | 238 | for cls, color in enumerate(self.cls_colors): 239 | # color search backported from @bitsofbits. 240 | color_array = np.array(color)[None, None, :] 241 | has_color = np.sqrt(np.sum(np.square( 242 | dot_img * (img_diff > 0)[:, :, None] - color_array), axis=-1)) < MAX_COLOR_DIFF 243 | contours = skimage.measure.find_contours(has_color.astype(float), 0.5) 244 | 245 | if self.verbosity == VERBOSITY.DEBUG : 246 | print() 247 | fn = 'diff_{}_{}.png'.format(train_id,cls) 248 | print('Saving train/dotted difference: {}'.format(fn)) 249 | Image.fromarray((has_color*255).astype(np.uint8)).save(fn) 250 | 251 | for cnt in contours : 252 | p = Polygon(shell=cnt) 253 | area = p.area 254 | if area > MIN_AREA and area < MAX_AREA: 255 | y, x = p.centroid.coords[0] # DANGER : skimage and cv2 coordinates transposed? 256 | x = int(round(x)) 257 | y = int(round(y)) 258 | sealions.append(SeaLionCoord(train_id, cls, x, y) ) 259 | 260 | counts = [0, 0, 0, 0, 0] 261 | for c in sealions: 262 | counts[c.cls] += 1 263 | true_counts = self.counts[train_id] 264 | diff_counts = np.array(true_counts) - np.array(counts) 265 | stats = Stats( 266 | train_id, 267 | true_counts[0], true_counts[1], true_counts[2], true_counts[3], true_counts[4], np.sum(true_counts), 268 | counts[0], counts[1], counts[2], counts[3], counts[4], np.sum(counts), 269 | diff_counts[0], diff_counts[1], diff_counts[2], diff_counts[3], diff_counts[4], np.sum(diff_counts) 270 | ) 271 | if self.verbosity >= VERBOSITY.VERBOSE: 272 | print() 273 | print('train_id', 'true_counts', 'counts', 'difference', sep='\t') 274 | print(train_id, true_counts, counts, diff_counts, sep='\t' ) 275 | 276 | if self.verbosity == VERBOSITY.DEBUG : 277 | img = np.copy(sld.load_dotted_image(train_id)) 278 | r = self.dot_radius 279 | dy, dx, c = img.shape 280 | for tid, cls, cx, cy in sealions: 281 | for x in range(cx-r, cx+r+1): img[cy, x, :] = 255 282 | for y in range(cy-r, cy+r+1): img[y, cx, :] = 255 283 | fn = 'cross_{}.png'.format(train_id) 284 | print('Saving crossed dots: {}'.format(fn)) 285 | Image.fromarray(img).save(fn) 286 | 287 | return sealions, stats 288 | 289 | def save_coords(self, train_ids=None): 290 | if train_ids is None: train_ids = self.train_ids 291 | coord_fn = self.path('coords') 292 | stats_fn = self.path('stats') 293 | self._progress('Saving sealion coordinates to {}'.format(coord_fn)) 294 | with open(coord_fn, 'w') as coord_csv, open(stats_fn, 'w') as stat_csv: 295 | writer_coord = csv.writer(coord_csv) 296 | writer_stat = csv.writer(stat_csv) 297 | writer_coord.writerow(SeaLionCoord._fields) 298 | writer_stat.writerow(Stats._fields) 299 | for tid in train_ids: 300 | self._progress() 301 | coords, stats = self.coords(tid) 302 | for coord in coords: 303 | writer_coord.writerow(coord) 304 | writer_stat.writerow(stats) 305 | self._progress('done') 306 | 307 | def load_coords(self): 308 | fn = self.path('coords') 309 | self._progress('Loading sea lion coordinates from {}'.format(fn)) 310 | with open(fn) as f: 311 | f.readline() 312 | return [SeaLionCoord(*[int(n) for n in line.split(',')]) for line in f] 313 | 314 | def save_sea_lion_chunks(self, coords, chunksize=128): 315 | self._progress('Saving image chunks...') 316 | self._progress('\n', verbosity=VERBOSITY.VERBOSE) 317 | 318 | last_tid = -1 319 | 320 | for tid, cls, x, y in coords: 321 | if tid != last_tid: 322 | img = self.load_train_image(tid, border=chunksize//2, mask=True) 323 | last_tid = tid 324 | 325 | fn = 'chunk_{tid}_{cls}_{x}_{y}_{size}.png'.format(size=chunksize, tid=tid, cls=cls, x=x, y=y) 326 | self._progress(' Saving '+fn, end='\n', verbosity=VERBOSITY.VERBOSE) 327 | Image.fromarray(img[y:y+chunksize, x:x+chunksize, :]).save(fn) 328 | self._progress() 329 | self._progress('done') 330 | 331 | 332 | def _progress(self, string=None, end=' ', verbosity=VERBOSITY.NORMAL): 333 | if self.verbosity < verbosity: return 334 | if not string: 335 | print('.', end='') 336 | elif string == 'done': 337 | print(' done') 338 | else: 339 | print(string, end=end) 340 | sys.stdout.flush() 341 | 342 | # end SeaLionData 343 | 344 | 345 | # Count sea lion dots and compare to truth from train.csv 346 | sld = SeaLionData() 347 | sld.verbosity = VERBOSITY.VERBOSE 348 | #for tid in sld.trainshort_ids: 349 | # coord = sld.coords(tid) 350 | sld.save_coords(sld.train_ids) 351 | -------------------------------------------------------------------------------- /scripts/preprocess.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | import pandas as pd 4 | import argparse 5 | import os 6 | import sys 7 | import math 8 | import copyreg 9 | import types 10 | import itertools 11 | from pathos import multiprocessing 12 | 13 | from collections import Counter 14 | from scipy.stats import kde 15 | from scipy.ndimage import gaussian_filter 16 | 17 | 18 | COLS = ['filename', 'width', 'height', 'xmin', 'ymin', 'xmax', 'ymax', 'mean', 'std'] 19 | CATEGORIES = ["adult_males", "subadult_males", "adult_females", "juveniles", "pups"] 20 | CATEGORY_MAP = {"adult_males": 0, "subadult_males": 1, "adult_females": 2, "juveniles": 3, "pups": 4} 21 | 22 | 23 | def _pickle_method(m): 24 | if m.im_self is None: 25 | return getattr, (m.im_class, m.im_func.func_name) 26 | else: 27 | return getattr, (m.im_self, m.im_func.func_name) 28 | 29 | copyreg.pickle(types.MethodType, _pickle_method) 30 | 31 | 32 | def get_outdir(parent_dir, child_dir=''): 33 | outdir = os.path.join(parent_dir, child_dir) 34 | if not os.path.exists(outdir): 35 | os.makedirs(outdir) 36 | return outdir 37 | 38 | 39 | def find_images(folder, types=('.jpg', '.jpeg')): 40 | results = [] 41 | for root, _, files in os.walk(folder, topdown=False): 42 | for rel_filename in files: 43 | if os.path.splitext(rel_filename)[1].lower() in types: 44 | abs_filename = os.path.join(root, rel_filename) 45 | results.append((rel_filename, abs_filename)) 46 | return results 47 | 48 | 49 | class Process(object): 50 | def __init__( 51 | self, 52 | root_path, 53 | src_folder='Train', 54 | metadata_folder='Train', 55 | dst_folder='Train-processed', 56 | padding_size=256, 57 | calc_stats=True): 58 | 59 | self.write_inputs = True if dst_folder else False 60 | self.generate_targets = False 61 | self.input_path = os.path.join(root_path, src_folder) 62 | 63 | self.dotted_path = os.path.join(root_path, src_folder + 'Dotted') 64 | if not os.path.exists(self.dotted_path): 65 | self.dotted_path = '' 66 | print("No dotted annotation for specified input source path") 67 | 68 | counts_path = os.path.join(root_path, metadata_folder, 'train.csv') 69 | if os.path.isfile(counts_path): 70 | self.counts_df = pd.read_csv(counts_path, index_col=0) 71 | else: 72 | self.counts_df = pd.DataFrame() 73 | print("No counts metadata available at %s" % counts_path) 74 | 75 | coords_path = os.path.join(root_path, metadata_folder, 'correct_coords.csv') 76 | if os.path.isfile(coords_path): 77 | self.coords_df = pd.read_csv(coords_path, index_col=False) 78 | self.coords_df.x_coord = self.coords_df.x_coord.astype('int') 79 | self.coords_df.y_coord = self.coords_df.y_coord.astype('int') 80 | self.coords_df.category = self.coords_df.category.replace(CATEGORY_MAP) 81 | self.coords_by_file = self.coords_df.groupby('filename') 82 | else: 83 | self.coords_df = pd.DataFrame() 84 | print("No coordinates metadata available at %s, not generating targets" % coords_path) 85 | self.generate_targets = False 86 | 87 | if self.write_inputs: 88 | if self.generate_targets: 89 | self.output_path_inputs = get_outdir(os.path.join(root_path, dst_folder, 'inputs')) 90 | self.output_path_targets = get_outdir(os.path.join(root_path, dst_folder, 'targets')) 91 | else: 92 | self.output_path_inputs = get_outdir(os.path.join(root_path, dst_folder)) 93 | self.output_path_targets = '' 94 | else: 95 | self.output_path_inputs = '' 96 | self.output_path_targets = '' 97 | 98 | self.padding_size = padding_size 99 | self.border_reflect = False 100 | self.calc_stats = calc_stats 101 | self.verify_targets = False 102 | self.write_scaled_pngs = False 103 | 104 | def _process_file(self, frel, fabs, results, stats=None): 105 | print('Processing %s...' % frel) 106 | basename = os.path.splitext(frel)[0] 107 | fid = int(basename) 108 | 109 | if len(self.coords_df) and frel not in self.coords_by_file.groups: 110 | print('Frame %s counts/coords not found, skipping.' % frel) 111 | return 112 | 113 | img = cv2.imread(fabs) 114 | h, w = img.shape[:2] 115 | if self.padding_size: 116 | wb = int(math.ceil((w + self.padding_size) / self.padding_size) * self.padding_size) 117 | hb = int(math.ceil((h + self.padding_size) / self.padding_size) * self.padding_size) 118 | else: 119 | wb = w 120 | hb = h 121 | x_diff = wb - w 122 | y_diff = hb - h 123 | x_min = x_diff // 2 124 | y_min = y_diff // 2 125 | x_max = x_min + w 126 | y_max = y_min + h 127 | 128 | if self.dotted_path: 129 | dotted_file = os.path.join(self.dotted_path, frel) 130 | if os.path.exists(dotted_file): 131 | img_dotted = cv2.imread(dotted_file) 132 | if img_dotted.shape[:2] != img.shape[:2]: 133 | print("Dotted image size doesn't match train for %s, skipping..." % frel) 134 | return 135 | mask = cv2.cvtColor(img_dotted, cv2.COLOR_BGR2GRAY) 136 | _, mask = cv2.threshold(mask, 15, 255, cv2.THRESH_BINARY) 137 | img = cv2.bitwise_and(img, img, mask=mask) 138 | # scale up the mask for targets 139 | mask = cv2.copyMakeBorder( 140 | mask, y_min, y_diff-y_min, x_min, x_diff-x_min, cv2.BORDER_CONSTANT, (0, 0, 0)) 141 | else: 142 | print("No matching dotted file exists for %s, skipping..." % frel) 143 | return 144 | else: 145 | mask = None 146 | 147 | result = dict() 148 | result['id'] = fid 149 | result['filename'] = frel 150 | result['height'] = hb 151 | result['width'] = wb 152 | result['xmin'] = x_min 153 | result['ymin'] = y_min 154 | result['xmax'] = x_max 155 | result['ymax'] = y_max 156 | 157 | if self.calc_stats: 158 | mean, std = cv2.meanStdDev(img, mask=mask) 159 | mean = mean[::-1].squeeze() / 255 160 | std = std[::-1].squeeze() / 255 161 | print('Mean, std: ', mean, std) 162 | result['mean'] = list(mean) 163 | result['std'] = list(std) 164 | if stats is not None: 165 | stats.append(np.array([mean, std])) 166 | if len(stats) % 10 == 0: 167 | print("Current avg mean, std:") 168 | statss = np.array(stats) 169 | print(np.mean(statss, axis=0)) 170 | 171 | if self.write_inputs: 172 | if self.padding_size: 173 | if self.border_reflect: 174 | border = cv2.BORDER_REFLECT_101 175 | value = None 176 | else: 177 | border = cv2.BORDER_CONSTANT 178 | value = (0, 0, 0) 179 | img = cv2.copyMakeBorder(img, y_min, y_diff-y_min, x_min, x_diff-x_min, border, value) 180 | cv2.imwrite(os.path.join(self.output_path_inputs, frel), img) 181 | 182 | if self.generate_targets: 183 | self._generate_target(fid, frel, y_min, x_min, wb, hb, mask) 184 | 185 | results.append(result) 186 | 187 | def _generate_target(self, fid, frel, y_min, x_min, width, height, mask): 188 | print(self.counts_df.ix[fid]) 189 | yxc = self.coords_by_file.get_group(frel).as_matrix(columns=['y_coord', 'x_coord', 'category']) 190 | targets = [] 191 | for cat_idx, cat_name in enumerate(CATEGORIES): 192 | yx = yxc[yxc[:, 2] == cat_idx][:, :2] 193 | yx += [y_min, x_min] 194 | 195 | gauss_img = np.zeros([height, width]) 196 | for y, x in yx: 197 | gauss_img[y, x] += 1024. 198 | 199 | # OpenCV gaussian blur 200 | target_img = cv2.GaussianBlur(gauss_img, (19, 19), 3, borderType=cv2.BORDER_REFLECT_101) 201 | target_img = cv2.bitwise_and(target_img, target_img, mask=mask) 202 | print("Min/max: ", np.min(target_img), np.max(target_img)) 203 | 204 | # Scipy, scipy.ndimage.filters.gaussian_filter 205 | #gauss_img = gaussian_filter(gauss_img, 3) 206 | #if mask_used: 207 | # blah2 = cv2.bitwise_and(blah2, blah2, mask=mask) 208 | #gauss_img = np.sum(blah2)/1024 209 | #gauss_img = gauss_img * 255 210 | 211 | targets.append(target_img.astype(np.float32)) 212 | 213 | # Verification 214 | if self.verify_targets: 215 | # Note sometimes masks cut out parts of density map that contribute to counts and this fails 216 | test_sum = np.sum(target_img) / 1024 217 | print('Counts for class %d:' % cat_idx, test_sum, len(yx)) 218 | assert np.isclose(test_sum, float(len(yx)), atol=.001) 219 | 220 | if self.write_scaled_pngs: 221 | INT_SCALE = np.iinfo(np.uint16).max / 32 222 | target_img_uint16 = target_img * INT_SCALE 223 | target_img_uint16 = target_img_uint16.astype('uint16') 224 | target_path = os.path.join(self.output_path_targets, '%d-target-%d.png' % (fid, cat_idx)) 225 | cv2.imwrite(target_path, target_img_uint16) 226 | 227 | target_stacked = np.dstack(targets) 228 | target_path = os.path.join(self.output_path_targets, '%d-target.npz' % fid) 229 | np.savez_compressed(target_path, target_stacked) 230 | 231 | def _process_files(self, inputs): 232 | results = [] 233 | stats = [] 234 | for frel, fabs in inputs: 235 | self._process_file(frel, fabs, results, stats) 236 | return results, stats 237 | 238 | def __call__(self, num_processes=1): 239 | if not os.path.isdir(self.input_path): 240 | print('Error: Folder %s does not exist.' % self.input_path) 241 | return [] 242 | inputs = find_images(self.input_path) 243 | if not inputs: 244 | print('Error: No inputs found at %s.' % self.input_path) 245 | return [] 246 | results = [] 247 | stats = [] 248 | if num_processes > 1: 249 | input_slices = [x.tolist() for x in np.array_split(inputs, num_processes)] 250 | pool = multiprocessing.Pool(num_processes) 251 | for m in pool.map(self._process_files, input_slices): 252 | results += m[0] 253 | stats += m[1] 254 | results.sort(key=lambda k: k['id']) 255 | else: 256 | results, stats = self._process_files(inputs) 257 | stats = np.array(stats) 258 | print('Dataset mean, std: ', np.mean(stats, axis=0)) 259 | return results 260 | 261 | 262 | def main(): 263 | parser = argparse.ArgumentParser() 264 | parser.add_argument('data', metavar='DIR', help='path to dataset') 265 | args = parser.parse_args() 266 | 267 | root_path = args.data 268 | src_folder = 'Test' 269 | if 'Test' not in src_folder: 270 | dst_folder = src_folder + '-processed' 271 | padding_size = 256 272 | else: 273 | dst_folder = '' 274 | padding_size = 0 275 | metadata_folder = src_folder 276 | process = Process( 277 | root_path, 278 | src_folder=src_folder, 279 | metadata_folder=metadata_folder, 280 | dst_folder=dst_folder, 281 | padding_size=padding_size, 282 | calc_stats=True) 283 | results = process(4) 284 | 285 | df = pd.DataFrame.from_records(results, columns=COLS) 286 | df.to_csv( 287 | os.path.join(root_path, dst_folder if dst_folder else src_folder, 'processed.csv'), 288 | index=False) 289 | 290 | if __name__ == '__main__': 291 | main() -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | from Cython.Build import cythonize 3 | import numpy 4 | 5 | setup( 6 | ext_modules=cythonize("utils_cython.pyx"), 7 | include_dirs=[numpy.get_include()] 8 | ) 9 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import time 4 | import shutil 5 | from datetime import datetime 6 | from dataset import SealionDataset, RandomPatchSampler 7 | from models import ModelCnet, ModelCountception 8 | from utils import AverageMeter, get_outdir 9 | 10 | import torch 11 | import torch.autograd as autograd 12 | import torch.utils.data as data 13 | import torch.optim as optim 14 | import torchvision.utils 15 | 16 | parser = argparse.ArgumentParser(description='PyTorch Sealion count training') 17 | parser.add_argument('data', metavar='DIR', 18 | help='path to dataset') 19 | parser.add_argument('--model', default='countception', type=str, metavar='MODEL', 20 | help='Name of model to train (default: "countception"') 21 | parser.add_argument('--opt', default='sgd', type=str, metavar='OPTIMIZER', 22 | help='Optimizer (default: "sgd"') 23 | parser.add_argument('--loss', default='l1', type=str, metavar='LOSS', 24 | help='Loss function (default: "l1"') 25 | parser.add_argument('--use-logits', action='store_true', default=False, 26 | help='Enable use of logits for model output') 27 | parser.add_argument('--patch-size', type=int, default=256, metavar='N', 28 | help='Image patch size (default: 256)') 29 | parser.add_argument('--batch-size', type=int, default=16, metavar='N', 30 | help='input batch size for training (default: 16)') 31 | parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N', 32 | help='input batch size for testing (default: 1000)') 33 | parser.add_argument('--epochs', type=int, default=10, metavar='N', 34 | help='number of epochs to train (default: 2)') 35 | parser.add_argument('--lr', type=float, default=0.01, metavar='LR', 36 | help='learning rate (default: 0.01)') 37 | parser.add_argument('--momentum', type=float, default=0.5, metavar='M', 38 | help='SGD momentum (default: 0.5)') 39 | parser.add_argument('--weight-decay', type=float, default=0.0001, metavar='M', 40 | help='weight decay (default: 0.0001)') 41 | parser.add_argument('--seed', type=int, default=1, metavar='S', 42 | help='random seed (default: 1)') 43 | parser.add_argument('--log-interval', type=int, default=10, metavar='N', 44 | help='how many batches to wait before logging training status') 45 | parser.add_argument('--num-processes', type=int, default=1, metavar='N', 46 | help='how many training processes to use (default: 1)') 47 | parser.add_argument('--no-cuda', action='store_true', default=False, 48 | help='disables CUDA training') 49 | parser.add_argument('--num-gpu', type=int, default=1, 50 | help='Number of GPUS to use') 51 | parser.add_argument('--resume', default='', type=str, metavar='PATH', 52 | help='path to latest checkpoint (default: none)') 53 | parser.add_argument('--save-batches', action='store_true', default=False, 54 | help='save images of batch inputs and targets every log interval for debugging/verification') 55 | 56 | 57 | def main(): 58 | args = parser.parse_args() 59 | 60 | train_input_root = os.path.join(args.data, 'inputs') 61 | train_target_root = os.path.join(args.data, 'targets') 62 | train_process_file = os.path.join(args.data, 'processed.csv') 63 | train_counts_file = './data/correct_train.csv' 64 | train_coords_file = './data/correct_coordinates.csv' 65 | output_dir = get_outdir('./output', 'train', datetime.now().strftime("%Y%m%d-%H%M%S")) 66 | 67 | batch_size = args.batch_size 68 | num_epochs = 1000 69 | patch_size = (args.patch_size, args.patch_size) 70 | num_outputs = 5 71 | target_type = 'countception' if args.model in ['countception', 'cc'] else 'density' 72 | debug_model = False 73 | use_logits = args.use_logits 74 | num_logits = 12 if use_logits else 0 75 | 76 | torch.manual_seed(args.seed) 77 | 78 | dataset = SealionDataset( 79 | train_input_root, 80 | train_target_root, 81 | train_counts_file, 82 | train_coords_file, 83 | train_process_file, 84 | train=True, 85 | patch_size=patch_size, 86 | target_type=target_type, 87 | generate_target=True, 88 | per_image_norm=True, 89 | num_logits=num_logits, 90 | ) 91 | 92 | sampler = RandomPatchSampler(dataset, oversample=32, repeat=16) 93 | 94 | loader = data.DataLoader( 95 | dataset, 96 | batch_size=batch_size, shuffle=True, num_workers=args.num_processes, sampler=sampler) 97 | 98 | if args.model == 'cnet': 99 | model = ModelCnet( 100 | outplanes=num_outputs, 101 | target_size=patch_size, 102 | debug=debug_model) 103 | elif args.model in ['countception', 'cc']: 104 | model = ModelCountception( 105 | outplanes=num_outputs, 106 | use_logits=use_logits, 107 | logits_per_output=num_logits, 108 | debug=debug_model) 109 | else: 110 | assert False and "Invalid model" 111 | 112 | if not args.no_cuda: 113 | if args.num_gpu > 1: 114 | model = torch.nn.DataParallel(model, device_ids=list(range(args.num_gpu))).cuda() 115 | else: 116 | model.cuda() 117 | 118 | if args.opt.lower() == 'sgd': 119 | optimizer = optim.SGD( 120 | model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) 121 | elif args.opt.lower() == 'adam': 122 | optimizer = optim.Adam( 123 | model.parameters(), lr=args.lr, weight_decay=args.weight_decay) 124 | elif args.opt.lower() == 'adadelta': 125 | optimizer = optim.Adadelta( 126 | model.parameters(), lr=args.lr, weight_decay=args.weight_decay) 127 | else: 128 | assert False and "Invalid optimizer" 129 | 130 | if args.loss.lower() == 'l1': 131 | loss_fn = torch.nn.L1Loss() 132 | elif args.loss.lower() == 'smoothl1': 133 | loss_fn = torch.nn.SmoothL1Loss() 134 | elif args.loss.lower() == 'mse': 135 | loss_fn = torch.nn.MSELoss() 136 | elif args.loss.lower() in ['crossentropy', 'nll']: 137 | loss_fn = torch.nn.CrossEntropyLoss() 138 | assert use_logits and "Cross entropy only a valid loss of logits are being used" 139 | else: 140 | assert False and "Invalid loss function" 141 | 142 | # optionally resume from a checkpoint 143 | start_epoch = 1 144 | if args.resume: 145 | if os.path.isfile(args.resume): 146 | print("=> loading checkpoint '{}'".format(args.resume)) 147 | checkpoint = torch.load(args.resume) 148 | args.start_epoch = checkpoint['epoch'] 149 | model.load_state_dict(checkpoint['state_dict']) 150 | optimizer.load_state_dict(checkpoint['optimizer']) 151 | print("=> loaded checkpoint '{}' (epoch {})" 152 | .format(args.resume, checkpoint['epoch'])) 153 | start_epoch = checkpoint['epoch'] 154 | else: 155 | print("=> no checkpoint found at '{}'".format(args.resume)) 156 | 157 | for epoch in range(start_epoch, num_epochs + 1): 158 | adjust_learning_rate(optimizer, epoch, initial_lr=args.lr, decay_epochs=3) 159 | train_epoch(epoch, model, loader, optimizer, loss_fn, args, output_dir, use_logits=use_logits) 160 | save_checkpoint({ 161 | 'epoch': epoch + 1, 162 | 'arch': model.name(), 163 | 'state_dict': model.state_dict(), 164 | 'optimizer': optimizer.state_dict(), 165 | }, 166 | is_best=False, 167 | filename='checkpoint-%d.pth.tar' % epoch, 168 | output_dir=output_dir) 169 | 170 | 171 | def train_epoch(epoch, model, loader, optimizer, loss_fn, args, output_dir='', use_logits=False): 172 | batch_time_m = AverageMeter() 173 | data_time_m = AverageMeter() 174 | losses_m = AverageMeter() 175 | 176 | model.train() 177 | 178 | end = time.time() 179 | for batch_idx, (input, target, index) in enumerate(loader): 180 | data_time_m.update(time.time() - end) 181 | if args.no_cuda: 182 | input_var, target_var = autograd.Variable(input), autograd.Variable(target) 183 | else: 184 | input_var, target_var = autograd.Variable(input.cuda()), autograd.Variable(target.cuda()) 185 | 186 | output = model(input_var) 187 | if use_logits: 188 | target_var = target_var.permute(1, 0, 2, 3).round().long() 189 | loss = sum([loss_fn(x, t) for x, t in zip(output, target_var)]) 190 | else: 191 | loss = loss_fn(output, target_var) 192 | losses_m.update(loss.data[0], input_var.size(0)) 193 | 194 | optimizer.zero_grad() 195 | loss.backward() 196 | optimizer.step() 197 | 198 | batch_time_m.update(time.time() - end) 199 | if batch_idx % args.log_interval == 0: 200 | print('Train Epoch: {} [{}/{} ({:.0f}%)] ' 201 | 'Loss: {loss.val:.6f} ({loss.avg:.4f}) ' 202 | 'Time: {batch_time.val:.3f}s, {rate:.3f}/s ' 203 | '({batch_time.avg:.3f}s, {rate_avg:.3f}/s) ' 204 | 'Data: {data_time.val:.3f} ({data_time.avg:.3f})'.format( 205 | epoch, 206 | batch_idx * len(input), len(loader.sampler), 207 | 100. * batch_idx / len(loader), 208 | loss=losses_m, 209 | batch_time=batch_time_m, 210 | rate=input_var.size(0) / batch_time_m.val, 211 | rate_avg=input_var.size(0) / batch_time_m.avg, 212 | data_time=data_time_m)) 213 | 214 | if args.save_batches: 215 | torchvision.utils.save_image( 216 | input, 217 | os.path.join(output_dir, 'input-batch-%d.jpg' % batch_idx), 218 | normalize=True) 219 | torchvision.utils.save_image( 220 | torch.sum(target, dim=1), 221 | os.path.join(output_dir, 'target-batch-%d.jpg' % batch_idx), 222 | normalize=True) 223 | end = time.time() 224 | 225 | 226 | def adjust_learning_rate(optimizer, epoch, initial_lr, decay_epochs=5): 227 | """Sets the learning rate to the initial LR decayed by 10 every 30 epochs""" 228 | lr = initial_lr * (0.1 ** (epoch // decay_epochs)) 229 | for param_group in optimizer.param_groups: 230 | param_group['lr'] = lr 231 | 232 | 233 | def save_checkpoint(state, is_best, filename='checkpoint.pth.tar', output_dir=''): 234 | save_path = os.path.join(output_dir, filename) 235 | torch.save(state, save_path) 236 | if is_best: 237 | shutil.copyfile(save_path, os.path.join(output_dir, 'model_best.pth.tar')) 238 | 239 | 240 | if __name__ == '__main__': 241 | main() 242 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import numbers 2 | import math 3 | import numpy as np 4 | import os 5 | from sklearn.feature_extraction.image import extract_patches 6 | from contextlib import contextmanager 7 | 8 | 9 | class AverageMeter: 10 | """Computes and stores the average and current value""" 11 | def __init__(self): 12 | self.reset() 13 | 14 | def reset(self): 15 | self.val = 0 16 | self.avg = 0 17 | self.sum = 0 18 | self.count = 0 19 | 20 | def update(self, val, n=1): 21 | self.val = val 22 | self.sum += val * n 23 | self.count += n 24 | self.avg = self.sum / self.count 25 | 26 | 27 | @contextmanager 28 | def measure_time(title='unknown'): 29 | t1 = time.clock() 30 | yield 31 | t2 = time.clock() 32 | print('%s: %0.2f seconds elapsed' % (title, t2-t1)) 33 | 34 | 35 | def calc_crop_size(target_w, target_h, angle, scale): 36 | crop_w = target_w 37 | crop_h = target_h 38 | if angle: 39 | corners = np.array( 40 | [[target_w/2, -target_w/2, -target_w/2, target_w/2], 41 | [target_h/2, target_h/2, -target_h/2, -target_h/2]]) 42 | s = np.sin(angle * np.pi/180) 43 | c = np.cos(angle * np.pi/180) 44 | M = np.array([[c, -s], [s, c]]) 45 | rotated_corners = np.dot(M, corners) 46 | crop_w = 2 * np.max(np.abs(rotated_corners[0, :])) 47 | crop_h = 2 * np.max(np.abs(rotated_corners[1, :])) 48 | crop_w = int(np.ceil(crop_w / scale)) 49 | crop_h = int(np.ceil(crop_h / scale)) 50 | #print(crop_w, crop_h) 51 | return crop_w, crop_h 52 | 53 | 54 | def crop_center(img, cx, cy, crop_w, crop_h): 55 | img_h, img_w = img.shape[:2] 56 | trunc_top = trunc_bottom = trunc_left = trunc_right = 0 57 | left = cx - crop_w//2 58 | if left < 0: 59 | trunc_left = 0 - left 60 | left = 0 61 | right = left - trunc_left + crop_w 62 | if right > img_w: 63 | trunc_right = right - img_w 64 | right = img_w 65 | top = cy - crop_h//2 66 | if top < 0: 67 | trunc_top = 0 - top 68 | top = 0 69 | bottom = top - trunc_top + crop_h 70 | if bottom > img_h: 71 | trunc_bottom = bottom - img_h 72 | bottom = img_h 73 | if trunc_left or trunc_right or trunc_top or trunc_bottom: 74 | img_new = np.zeros((crop_h, crop_w, img.shape[2]), dtype=img.dtype) 75 | trunc_bottom = crop_h - trunc_bottom 76 | trunc_right = crop_w - trunc_right 77 | img_new[trunc_top:trunc_bottom, trunc_left:trunc_right] = img[top:bottom, left:right] 78 | return img_new 79 | else: 80 | return img[top:bottom, left:right] 81 | 82 | 83 | def crop_points_center(points, cx, cy, crop_w, crop_h): 84 | xl = cx - crop_w // 2 85 | xu = xl + crop_w 86 | yl = cy - crop_h // 2 87 | yu = yl + crop_h 88 | mask = (points[:, 0] >= xl) & (points[:, 0] < xu) & (points[:, 1] >= yl) & (points[:, 1] < yu) 89 | return points[mask] 90 | 91 | 92 | def crop_points(points, x, y, crop_w, crop_h): 93 | xu = x + crop_w 94 | yu = y + crop_h 95 | mask = (points[:, 0] >= x) & (points[:, 0] < xu) & (points[:, 1] >= y) & (points[:, 1] < yu) 96 | return points[mask] 97 | 98 | 99 | def calc_num_patches(img_w, img_h, patch_size, stride): 100 | if isinstance(patch_size, numbers.Number): 101 | pw = ph = patch_size 102 | else: 103 | pw, ph = patch_size 104 | patches_rows = (img_h - ph) // stride + 1 105 | patches_cols = (img_w - pw) // stride + 1 106 | return patches_cols * patches_rows, patches_cols, patches_rows 107 | 108 | 109 | def index_to_rc(index, ncols): 110 | row = index // ncols 111 | col = index - ncols * row 112 | return col, row 113 | 114 | 115 | def rc_to_index(row, col, ncols): 116 | return row * ncols + col 117 | 118 | 119 | def merge_patches(output_img, patches, patches_cols, patch_size, stride, agg_fn='mean'): 120 | # This is INCREDIBLY slow in pure Python. There is likely a better approach, but in 121 | # lieu of that, the Cython version in utils_cython is fast enough for this purpose. 122 | oh, ow = output_img.shape[:2] 123 | if isinstance(patch_size, numbers.Number): 124 | pw = ph = patch_size, patch_size 125 | else: 126 | pw, ph = patch_size 127 | oh = (oh - ph) // stride * stride + ph 128 | ow = (ow - pw) // stride * stride + pw 129 | patches_rows = patches.shape[0] // patches_cols 130 | print(patches_rows, patches_cols, oh, ow, patches.shape) 131 | for y in range(0, oh): 132 | pjl = max((y - ph) // stride + 1, 0) 133 | pju = min(y // stride + 1, patches_rows) 134 | for x in range(0, ow): 135 | pil = max((x - pw) // stride + 1, 0) 136 | piu = min(x // stride + 1, patches_cols) 137 | agg = np.zeros(output_img.shape[-1], dtype=np.uint32) 138 | agg_count = 0 139 | for pj in range(pjl, pju): 140 | for pi in range(pil, piu): 141 | px = x - pi * stride 142 | py = y - pj * stride 143 | agg += patches[pi + pj * patches_cols][py, px, :] 144 | agg_count += 1 145 | pa = agg // agg_count 146 | output_img[y, x, :] = pa.astype(output_img.dtype) 147 | 148 | 149 | def patch_view(input_img, patch_size, stride, flatten=True): 150 | num_chan = input_img.shape[-1] 151 | if isinstance(patch_size, numbers.Number): 152 | patch_shape = (patch_size, patch_size, num_chan) 153 | else: 154 | patch_shape = (patch_size[1], patch_size[0], num_chan) 155 | # shape should be (h, w, c) 156 | assert patch_shape[-1] == input_img.shape[-1] 157 | patches = extract_patches(input_img, patch_shape, stride) 158 | patch_rowcol = patches.shape[:2] 159 | if flatten: 160 | # Note, this causes data in view to be copied to a new array 161 | patches = patches.reshape([-1] + list(patch_shape)) 162 | return patches, patch_rowcol 163 | 164 | 165 | def get_outdir(path, *paths): 166 | outdir = os.path.join(path, *paths) 167 | if not os.path.exists(outdir): 168 | os.makedirs(outdir) 169 | return outdir 170 | -------------------------------------------------------------------------------- /utils_cython.pyx: -------------------------------------------------------------------------------- 1 | """ Cython patch merge 2 | Some code I hacked together to (more quickly) merge overlapping patches 3 | into an image. 4 | 5 | Goal is to reverse sklearn.feature_extraction.image.extract_patches 6 | 7 | Works for the most part but has some issues for certain image size vs patch/stride 8 | sizes. Need to spend more time verifying correctness of bounds. 9 | """ 10 | import numpy as np 11 | cimport numpy as cnp 12 | import cython 13 | from libc.string cimport memset 14 | from cpython cimport array 15 | import array 16 | import numbers 17 | 18 | cdef inline int int_max(int a, int b): return a if a >= b else b 19 | cdef inline int int_min(int a, int b): return a if a <= b else b 20 | 21 | 22 | @cython.overflowcheck(False) # turn off bounds-checking for entire function 23 | @cython.boundscheck(False) # turn off bounds-checking for entire function 24 | @cython.wraparound(False) # turn off negative index wrapping for entire function 25 | @cython.cdivision(True) # turn off negative index wrapping for entire function 26 | def merge_patches_uint8( 27 | cnp.uint8_t[:, :, :] out_img, 28 | cnp.uint8_t[:, :, :, :] patches, 29 | int patches_cols, patch_size, int stride): 30 | 31 | cdef int oh = out_img.shape[0] 32 | cdef int ow = out_img.shape[1] 33 | cdef int oc = out_img.shape[2] 34 | cdef int pw 35 | cdef int ph 36 | if isinstance(patch_size, numbers.Number): 37 | pw = ph = patch_size 38 | else: 39 | pw = patch_size[0] 40 | ph = patch_size[1] 41 | oh = (oh - ph) / stride * stride + ph 42 | ow = (ow - pw) / stride * stride + pw 43 | cdef int patches_rows = patches.shape[0] / patches_cols 44 | cdef int y, x 45 | cdef int pi, pj 46 | cdef int py, px 47 | cdef int pjl, pju 48 | cdef int pil, piu 49 | cdef int[:] agg = array.array('i', [0] * oc) 50 | cdef int agg_count 51 | cdef int c 52 | for y in range(0, oh): 53 | pjl = int_max((y - ph) / stride + 1, 0) 54 | pju = int_min(y / stride + 1, patches_rows) 55 | for x in range(0, ow): 56 | pil = int_max((x - pw) / stride + 1, 0) 57 | piu = int_min(x / stride + 1, patches_cols) 58 | memset(&agg[0], 0, oc * sizeof(cnp.int32_t)) 59 | agg_count = 0 60 | for pj in range(pjl, pju): 61 | for pi in range(pil, piu): 62 | px = x - pi * stride 63 | py = y - pj * stride 64 | for c in range(oc): 65 | agg[c] = agg[c] + patches[pi + pj * patches_cols][py, px, c] 66 | agg_count += 1 67 | for c in range(oc): 68 | out_img[y, x, c] = (agg[c] / agg_count) 69 | 70 | 71 | @cython.overflowcheck(False) # turn off bounds-checking for entire function 72 | @cython.boundscheck(False) # turn off bounds-checking for entire function 73 | @cython.wraparound(False) # turn off negative index wrapping for entire function 74 | @cython.cdivision(True) # turn off negative index wrapping for entire function 75 | def merge_patches_float32( 76 | cnp.float32_t[:, :, :] out_img, 77 | cnp.float32_t[:, :, :, :] patches, 78 | int patches_cols, patch_size, int stride): 79 | 80 | cdef int oh = out_img.shape[0] 81 | cdef int ow = out_img.shape[1] 82 | cdef int oc = out_img.shape[2] 83 | cdef int pw 84 | cdef int ph 85 | if isinstance(patch_size, numbers.Number): 86 | pw = ph = patch_size 87 | else: 88 | pw = patch_size[0] 89 | ph = patch_size[1] 90 | oh = (oh - ph) / stride * stride + ph 91 | ow = (ow - pw) / stride * stride + pw 92 | cdef int patches_rows = patches.shape[0] / patches_cols 93 | cdef int y, x 94 | cdef int pi, pj 95 | cdef int py, px 96 | cdef int pjl, pju 97 | cdef int pil, piu 98 | cdef double[:] agg = array.array('d', [0] * oc) 99 | cdef int agg_count 100 | cdef int c 101 | #cdef double temp 102 | for y in range(0, oh): 103 | pjl = int_max((y - ph) / stride + 1, 0) 104 | pju = int_min(y / stride + 1, patches_rows) 105 | for x in range(0, ow): 106 | pil = int_max((x - pw) / stride + 1, 0) 107 | piu = int_min(x / stride + 1, patches_cols) 108 | #memset(&agg[0], 0, oc * sizeof(cnp.float64_t)) 109 | for c in range(oc): 110 | agg[c] = 0.0 111 | agg_count = 0 112 | for pj in range(pjl, pju): 113 | for pi in range(pil, piu): 114 | px = x - pi * stride 115 | py = y - pj * stride 116 | for c in range(oc): 117 | agg[c] = agg[c] + patches[pi + pj * patches_cols][py, px, c] 118 | #temp = patches[pi + pj * patches_cols][py, px, c] 119 | #if temp < 0: 120 | # temp = 0.0 121 | #agg[c] *= temp 122 | agg_count += 1 123 | for c in range(oc): 124 | out_img[y, x, c] = (agg[c] / agg_count) 125 | #out_img[y, x, c] = (pow(agg[c], 1.0 / agg_count)) 126 | --------------------------------------------------------------------------------