├── .ipynb_checkpoints
└── DeepSegmentation-checkpoint.ipynb
├── Data
├── CLASSES.pkl
├── classid_to_wnid.pkl
├── validation_1.pkl
├── validation_10.pkl
├── validation_11.pkl
├── validation_12.pkl
├── validation_13.pkl
├── validation_2.pkl
├── validation_3.pkl
├── validation_4.pkl
├── validation_5.pkl
├── validation_6.pkl
├── validation_7.pkl
├── validation_8.pkl
├── validation_9.pkl
├── validation_bottle.pkl
├── validation_cat.pkl
└── validation_cycles.pkl
├── Images
├── apple.jpg
├── beach.jpg
├── beetroot.jpg
├── cat.jpg
├── dog.jpg
├── dolph.jpg
├── einstein.jpg
├── kitten.jpg
├── orange.jpg
├── pig.html
├── pig.jpg
├── pig_with_dog.jpg
├── rose.jpg
└── sky.jpg
├── Localization.ipynb
├── Poster.pdf
├── README.md
├── Validation.ipynb
├── deconv_utils.py
├── frameworkpython
├── library
├── __init__.py
├── classifiers
│ ├── __init__.py
│ └── pretrained_vgg16.py
├── data_utils.py
├── fast_layers.py
├── frameworkpython
├── im2col.py
├── im2col_cython.c
├── im2col_cython.pyx
├── image_utils.py
├── layer_utils.py
├── layers.py
├── localization.py
└── setup.py
└── validation_script.py
/Data/CLASSES.pkl:
--------------------------------------------------------------------------------
1 | (lp0
2 | S'tench, Tinca tinca'
3 | p1
4 | aS'goldfish, Carassius auratus'
5 | p2
6 | aS'great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias'
7 | p3
8 | aS'tiger shark, Galeocerdo cuvieri'
9 | p4
10 | aS'hammerhead, hammerhead shark'
11 | p5
12 | aS'electric ray, crampfish, numbfish, torpedo'
13 | p6
14 | aS'stingray'
15 | p7
16 | aS'cock'
17 | p8
18 | aS'hen'
19 | p9
20 | aS'ostrich, Struthio camelus'
21 | p10
22 | aS'brambling, Fringilla montifringilla'
23 | p11
24 | aS'goldfinch, Carduelis carduelis'
25 | p12
26 | aS'house finch, linnet, Carpodacus mexicanus'
27 | p13
28 | aS'junco, snowbird'
29 | p14
30 | aS'indigo bunting, indigo finch, indigo bird, Passerina cyanea'
31 | p15
32 | aS'robin, American robin, Turdus migratorius'
33 | p16
34 | aS'bulbul'
35 | p17
36 | aS'jay'
37 | p18
38 | aS'magpie'
39 | p19
40 | aS'chickadee'
41 | p20
42 | aS'water ouzel, dipper'
43 | p21
44 | aS'kite'
45 | p22
46 | aS'bald eagle, American eagle, Haliaeetus leucocephalus'
47 | p23
48 | aS'vulture'
49 | p24
50 | aS'great grey owl, great gray owl, Strix nebulosa'
51 | p25
52 | aS'European fire salamander, Salamandra salamandra'
53 | p26
54 | aS'common newt, Triturus vulgaris'
55 | p27
56 | aS'eft'
57 | p28
58 | aS'spotted salamander, Ambystoma maculatum'
59 | p29
60 | aS'axolotl, mud puppy, Ambystoma mexicanum'
61 | p30
62 | aS'bullfrog, Rana catesbeiana'
63 | p31
64 | aS'tree frog, tree-frog'
65 | p32
66 | aS'tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui'
67 | p33
68 | aS'loggerhead, loggerhead turtle, Caretta caretta'
69 | p34
70 | aS'leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea'
71 | p35
72 | aS'mud turtle'
73 | p36
74 | aS'terrapin'
75 | p37
76 | aS'box turtle, box tortoise'
77 | p38
78 | aS'banded gecko'
79 | p39
80 | aS'common iguana, iguana, Iguana iguana'
81 | p40
82 | aS'American chameleon, anole, Anolis carolinensis'
83 | p41
84 | aS'whiptail, whiptail lizard'
85 | p42
86 | aS'agama'
87 | p43
88 | aS'frilled lizard, Chlamydosaurus kingi'
89 | p44
90 | aS'alligator lizard'
91 | p45
92 | aS'Gila monster, Heloderma suspectum'
93 | p46
94 | aS'green lizard, Lacerta viridis'
95 | p47
96 | aS'African chameleon, Chamaeleo chamaeleon'
97 | p48
98 | aS'Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis'
99 | p49
100 | aS'African crocodile, Nile crocodile, Crocodylus niloticus'
101 | p50
102 | aS'American alligator, Alligator mississipiensis'
103 | p51
104 | aS'triceratops'
105 | p52
106 | aS'thunder snake, worm snake, Carphophis amoenus'
107 | p53
108 | aS'ringneck snake, ring-necked snake, ring snake'
109 | p54
110 | aS'hognose snake, puff adder, sand viper'
111 | p55
112 | aS'green snake, grass snake'
113 | p56
114 | aS'king snake, kingsnake'
115 | p57
116 | aS'garter snake, grass snake'
117 | p58
118 | aS'water snake'
119 | p59
120 | aS'vine snake'
121 | p60
122 | aS'night snake, Hypsiglena torquata'
123 | p61
124 | aS'boa constrictor, Constrictor constrictor'
125 | p62
126 | aS'rock python, rock snake, Python sebae'
127 | p63
128 | aS'Indian cobra, Naja naja'
129 | p64
130 | aS'green mamba'
131 | p65
132 | aS'sea snake'
133 | p66
134 | aS'horned viper, cerastes, sand viper, horned asp, Cerastes cornutus'
135 | p67
136 | aS'diamondback, diamondback rattlesnake, Crotalus adamanteus'
137 | p68
138 | aS'sidewinder, horned rattlesnake, Crotalus cerastes'
139 | p69
140 | aS'trilobite'
141 | p70
142 | aS'harvestman, daddy longlegs, Phalangium opilio'
143 | p71
144 | aS'scorpion'
145 | p72
146 | aS'black and gold garden spider, Argiope aurantia'
147 | p73
148 | aS'barn spider, Araneus cavaticus'
149 | p74
150 | aS'garden spider, Aranea diademata'
151 | p75
152 | aS'black widow, Latrodectus mactans'
153 | p76
154 | aS'tarantula'
155 | p77
156 | aS'wolf spider, hunting spider'
157 | p78
158 | aS'tick'
159 | p79
160 | aS'centipede'
161 | p80
162 | aS'black grouse'
163 | p81
164 | aS'ptarmigan'
165 | p82
166 | aS'ruffed grouse, partridge, Bonasa umbellus'
167 | p83
168 | aS'prairie chicken, prairie grouse, prairie fowl'
169 | p84
170 | aS'peacock'
171 | p85
172 | aS'quail'
173 | p86
174 | aS'partridge'
175 | p87
176 | aS'African grey, African gray, Psittacus erithacus'
177 | p88
178 | aS'macaw'
179 | p89
180 | aS'sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita'
181 | p90
182 | aS'lorikeet'
183 | p91
184 | aS'coucal'
185 | p92
186 | aS'bee eater'
187 | p93
188 | aS'hornbill'
189 | p94
190 | aS'hummingbird'
191 | p95
192 | aS'jacamar'
193 | p96
194 | aS'toucan'
195 | p97
196 | aS'drake'
197 | p98
198 | aS'red-breasted merganser, Mergus serrator'
199 | p99
200 | aS'goose'
201 | p100
202 | aS'black swan, Cygnus atratus'
203 | p101
204 | aS'tusker'
205 | p102
206 | aS'echidna, spiny anteater, anteater'
207 | p103
208 | aS'platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus'
209 | p104
210 | aS'wallaby, brush kangaroo'
211 | p105
212 | aS'koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus'
213 | p106
214 | aS'wombat'
215 | p107
216 | aS'jellyfish'
217 | p108
218 | aS'sea anemone, anemone'
219 | p109
220 | aS'brain coral'
221 | p110
222 | aS'flatworm, platyhelminth'
223 | p111
224 | aS'nematode, nematode worm, roundworm'
225 | p112
226 | aS'conch'
227 | p113
228 | aS'snail'
229 | p114
230 | aS'slug'
231 | p115
232 | aS'sea slug, nudibranch'
233 | p116
234 | aS'chiton, coat-of-mail shell, sea cradle, polyplacophore'
235 | p117
236 | aS'chambered nautilus, pearly nautilus, nautilus'
237 | p118
238 | aS'Dungeness crab, Cancer magister'
239 | p119
240 | aS'rock crab, Cancer irroratus'
241 | p120
242 | aS'fiddler crab'
243 | p121
244 | aS'king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica'
245 | p122
246 | aS'American lobster, Northern lobster, Maine lobster, Homarus americanus'
247 | p123
248 | aS'spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish'
249 | p124
250 | aS'crayfish, crawfish, crawdad, crawdaddy'
251 | p125
252 | aS'hermit crab'
253 | p126
254 | aS'isopod'
255 | p127
256 | aS'white stork, Ciconia ciconia'
257 | p128
258 | aS'black stork, Ciconia nigra'
259 | p129
260 | aS'spoonbill'
261 | p130
262 | aS'flamingo'
263 | p131
264 | aS'little blue heron, Egretta caerulea'
265 | p132
266 | aS'American egret, great white heron, Egretta albus'
267 | p133
268 | aS'bittern'
269 | p134
270 | aS'crane'
271 | p135
272 | aS'limpkin, Aramus pictus'
273 | p136
274 | aS'European gallinule, Porphyrio porphyrio'
275 | p137
276 | aS'American coot, marsh hen, mud hen, water hen, Fulica americana'
277 | p138
278 | aS'bustard'
279 | p139
280 | aS'ruddy turnstone, Arenaria interpres'
281 | p140
282 | aS'red-backed sandpiper, dunlin, Erolia alpina'
283 | p141
284 | aS'redshank, Tringa totanus'
285 | p142
286 | aS'dowitcher'
287 | p143
288 | aS'oystercatcher, oyster catcher'
289 | p144
290 | aS'pelican'
291 | p145
292 | aS'king penguin, Aptenodytes patagonica'
293 | p146
294 | aS'albatross, mollymawk'
295 | p147
296 | aS'grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus'
297 | p148
298 | aS'killer whale, killer, orca, grampus, sea wolf, Orcinus orca'
299 | p149
300 | aS'dugong, Dugong dugon'
301 | p150
302 | aS'sea lion'
303 | p151
304 | aS'Chihuahua'
305 | p152
306 | aS'Japanese spaniel'
307 | p153
308 | aS'Maltese dog, Maltese terrier, Maltese'
309 | p154
310 | aS'Pekinese, Pekingese, Peke'
311 | p155
312 | aS'Shih-Tzu'
313 | p156
314 | aS'Blenheim spaniel'
315 | p157
316 | aS'papillon'
317 | p158
318 | aS'toy terrier'
319 | p159
320 | aS'Rhodesian ridgeback'
321 | p160
322 | aS'Afghan hound, Afghan'
323 | p161
324 | aS'basset, basset hound'
325 | p162
326 | aS'beagle'
327 | p163
328 | aS'bloodhound, sleuthhound'
329 | p164
330 | aS'bluetick'
331 | p165
332 | aS'black-and-tan coonhound'
333 | p166
334 | aS'Walker hound, Walker foxhound'
335 | p167
336 | aS'English foxhound'
337 | p168
338 | aS'redbone'
339 | p169
340 | aS'borzoi, Russian wolfhound'
341 | p170
342 | aS'Irish wolfhound'
343 | p171
344 | aS'Italian greyhound'
345 | p172
346 | aS'whippet'
347 | p173
348 | aS'Ibizan hound, Ibizan Podenco'
349 | p174
350 | aS'Norwegian elkhound, elkhound'
351 | p175
352 | aS'otterhound, otter hound'
353 | p176
354 | aS'Saluki, gazelle hound'
355 | p177
356 | aS'Scottish deerhound, deerhound'
357 | p178
358 | aS'Weimaraner'
359 | p179
360 | aS'Staffordshire bullterrier, Staffordshire bull terrier'
361 | p180
362 | aS'American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier'
363 | p181
364 | aS'Bedlington terrier'
365 | p182
366 | aS'Border terrier'
367 | p183
368 | aS'Kerry blue terrier'
369 | p184
370 | aS'Irish terrier'
371 | p185
372 | aS'Norfolk terrier'
373 | p186
374 | aS'Norwich terrier'
375 | p187
376 | aS'Yorkshire terrier'
377 | p188
378 | aS'wire-haired fox terrier'
379 | p189
380 | aS'Lakeland terrier'
381 | p190
382 | aS'Sealyham terrier, Sealyham'
383 | p191
384 | aS'Airedale, Airedale terrier'
385 | p192
386 | aS'cairn, cairn terrier'
387 | p193
388 | aS'Australian terrier'
389 | p194
390 | aS'Dandie Dinmont, Dandie Dinmont terrier'
391 | p195
392 | aS'Boston bull, Boston terrier'
393 | p196
394 | aS'miniature schnauzer'
395 | p197
396 | aS'giant schnauzer'
397 | p198
398 | aS'standard schnauzer'
399 | p199
400 | aS'Scotch terrier, Scottish terrier, Scottie'
401 | p200
402 | aS'Tibetan terrier, chrysanthemum dog'
403 | p201
404 | aS'silky terrier, Sydney silky'
405 | p202
406 | aS'soft-coated wheaten terrier'
407 | p203
408 | aS'West Highland white terrier'
409 | p204
410 | aS'Lhasa, Lhasa apso'
411 | p205
412 | aS'flat-coated retriever'
413 | p206
414 | aS'curly-coated retriever'
415 | p207
416 | aS'golden retriever'
417 | p208
418 | aS'Labrador retriever'
419 | p209
420 | aS'Chesapeake Bay retriever'
421 | p210
422 | aS'German short-haired pointer'
423 | p211
424 | aS'vizsla, Hungarian pointer'
425 | p212
426 | aS'English setter'
427 | p213
428 | aS'Irish setter, red setter'
429 | p214
430 | aS'Gordon setter'
431 | p215
432 | aS'Brittany spaniel'
433 | p216
434 | aS'clumber, clumber spaniel'
435 | p217
436 | aS'English springer, English springer spaniel'
437 | p218
438 | aS'Welsh springer spaniel'
439 | p219
440 | aS'cocker spaniel, English cocker spaniel, cocker'
441 | p220
442 | aS'Sussex spaniel'
443 | p221
444 | aS'Irish water spaniel'
445 | p222
446 | aS'kuvasz'
447 | p223
448 | aS'schipperke'
449 | p224
450 | aS'groenendael'
451 | p225
452 | aS'malinois'
453 | p226
454 | aS'briard'
455 | p227
456 | aS'kelpie'
457 | p228
458 | aS'komondor'
459 | p229
460 | aS'Old English sheepdog, bobtail'
461 | p230
462 | aS'Shetland sheepdog, Shetland sheep dog, Shetland'
463 | p231
464 | aS'collie'
465 | p232
466 | aS'Border collie'
467 | p233
468 | aS'Bouvier des Flandres, Bouviers des Flandres'
469 | p234
470 | aS'Rottweiler'
471 | p235
472 | aS'German shepherd, German shepherd dog, German police dog, alsatian'
473 | p236
474 | aS'Doberman, Doberman pinscher'
475 | p237
476 | aS'miniature pinscher'
477 | p238
478 | aS'Greater Swiss Mountain dog'
479 | p239
480 | aS'Bernese mountain dog'
481 | p240
482 | aS'Appenzeller'
483 | p241
484 | aS'EntleBucher'
485 | p242
486 | aS'boxer'
487 | p243
488 | aS'bull mastiff'
489 | p244
490 | aS'Tibetan mastiff'
491 | p245
492 | aS'French bulldog'
493 | p246
494 | aS'Great Dane'
495 | p247
496 | aS'Saint Bernard, St Bernard'
497 | p248
498 | aS'Eskimo dog, husky'
499 | p249
500 | aS'malamute, malemute, Alaskan malamute'
501 | p250
502 | aS'Siberian husky'
503 | p251
504 | aS'dalmatian, coach dog, carriage dog'
505 | p252
506 | aS'affenpinscher, monkey pinscher, monkey dog'
507 | p253
508 | aS'basenji'
509 | p254
510 | aS'pug, pug-dog'
511 | p255
512 | aS'Leonberg'
513 | p256
514 | aS'Newfoundland, Newfoundland dog'
515 | p257
516 | aS'Great Pyrenees'
517 | p258
518 | aS'Samoyed, Samoyede'
519 | p259
520 | aS'Pomeranian'
521 | p260
522 | aS'chow, chow chow'
523 | p261
524 | aS'keeshond'
525 | p262
526 | aS'Brabancon griffon'
527 | p263
528 | aS'Pembroke, Pembroke Welsh corgi'
529 | p264
530 | aS'Cardigan, Cardigan Welsh corgi'
531 | p265
532 | aS'toy poodle'
533 | p266
534 | aS'miniature poodle'
535 | p267
536 | aS'standard poodle'
537 | p268
538 | aS'Mexican hairless'
539 | p269
540 | aS'timber wolf, grey wolf, gray wolf, Canis lupus'
541 | p270
542 | aS'white wolf, Arctic wolf, Canis lupus tundrarum'
543 | p271
544 | aS'red wolf, maned wolf, Canis rufus, Canis niger'
545 | p272
546 | aS'coyote, prairie wolf, brush wolf, Canis latrans'
547 | p273
548 | aS'dingo, warrigal, warragal, Canis dingo'
549 | p274
550 | aS'dhole, Cuon alpinus'
551 | p275
552 | aS'African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus'
553 | p276
554 | aS'hyena, hyaena'
555 | p277
556 | aS'red fox, Vulpes vulpes'
557 | p278
558 | aS'kit fox, Vulpes macrotis'
559 | p279
560 | aS'Arctic fox, white fox, Alopex lagopus'
561 | p280
562 | aS'grey fox, gray fox, Urocyon cinereoargenteus'
563 | p281
564 | aS'tabby, tabby cat'
565 | p282
566 | aS'tiger cat'
567 | p283
568 | aS'Persian cat'
569 | p284
570 | aS'Siamese cat, Siamese'
571 | p285
572 | aS'Egyptian cat'
573 | p286
574 | aS'cougar, puma, catamount, mountain lion, painter, panther, Felis concolor'
575 | p287
576 | aS'lynx, catamount'
577 | p288
578 | aS'leopard, Panthera pardus'
579 | p289
580 | aS'snow leopard, ounce, Panthera uncia'
581 | p290
582 | aS'jaguar, panther, Panthera onca, Felis onca'
583 | p291
584 | aS'lion, king of beasts, Panthera leo'
585 | p292
586 | aS'tiger, Panthera tigris'
587 | p293
588 | aS'cheetah, chetah, Acinonyx jubatus'
589 | p294
590 | aS'brown bear, bruin, Ursus arctos'
591 | p295
592 | aS'American black bear, black bear, Ursus americanus, Euarctos americanus'
593 | p296
594 | aS'ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus'
595 | p297
596 | aS'sloth bear, Melursus ursinus, Ursus ursinus'
597 | p298
598 | aS'mongoose'
599 | p299
600 | aS'meerkat, mierkat'
601 | p300
602 | aS'tiger beetle'
603 | p301
604 | aS'ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle'
605 | p302
606 | aS'ground beetle, carabid beetle'
607 | p303
608 | aS'long-horned beetle, longicorn, longicorn beetle'
609 | p304
610 | aS'leaf beetle, chrysomelid'
611 | p305
612 | aS'dung beetle'
613 | p306
614 | aS'rhinoceros beetle'
615 | p307
616 | aS'weevil'
617 | p308
618 | aS'fly'
619 | p309
620 | aS'bee'
621 | p310
622 | aS'ant, emmet, pismire'
623 | p311
624 | aS'grasshopper, hopper'
625 | p312
626 | aS'cricket'
627 | p313
628 | aS'walking stick, walkingstick, stick insect'
629 | p314
630 | aS'cockroach, roach'
631 | p315
632 | aS'mantis, mantid'
633 | p316
634 | aS'cicada, cicala'
635 | p317
636 | aS'leafhopper'
637 | p318
638 | aS'lacewing, lacewing fly'
639 | p319
640 | aS"dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk"
641 | p320
642 | aS'damselfly'
643 | p321
644 | aS'admiral'
645 | p322
646 | aS'ringlet, ringlet butterfly'
647 | p323
648 | aS'monarch, monarch butterfly, milkweed butterfly, Danaus plexippus'
649 | p324
650 | aS'cabbage butterfly'
651 | p325
652 | aS'sulphur butterfly, sulfur butterfly'
653 | p326
654 | aS'lycaenid, lycaenid butterfly'
655 | p327
656 | aS'starfish, sea star'
657 | p328
658 | aS'sea urchin'
659 | p329
660 | aS'sea cucumber, holothurian'
661 | p330
662 | aS'wood rabbit, cottontail, cottontail rabbit'
663 | p331
664 | aS'hare'
665 | p332
666 | aS'Angora, Angora rabbit'
667 | p333
668 | aS'hamster'
669 | p334
670 | aS'porcupine, hedgehog'
671 | p335
672 | aS'fox squirrel, eastern fox squirrel, Sciurus niger'
673 | p336
674 | aS'marmot'
675 | p337
676 | aS'beaver'
677 | p338
678 | aS'guinea pig, Cavia cobaya'
679 | p339
680 | aS'sorrel'
681 | p340
682 | aS'zebra'
683 | p341
684 | aS'hog, pig, grunter, squealer, Sus scrofa'
685 | p342
686 | aS'wild boar, boar, Sus scrofa'
687 | p343
688 | aS'warthog'
689 | p344
690 | aS'hippopotamus, hippo, river horse, Hippopotamus amphibius'
691 | p345
692 | aS'ox'
693 | p346
694 | aS'water buffalo, water ox, Asiatic buffalo, Bubalus bubalis'
695 | p347
696 | aS'bison'
697 | p348
698 | aS'ram, tup'
699 | p349
700 | aS'bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis'
701 | p350
702 | aS'ibex, Capra ibex'
703 | p351
704 | aS'hartebeest'
705 | p352
706 | aS'impala, Aepyceros melampus'
707 | p353
708 | aS'gazelle'
709 | p354
710 | aS'Arabian camel, dromedary, Camelus dromedarius'
711 | p355
712 | aS'llama'
713 | p356
714 | aS'weasel'
715 | p357
716 | aS'mink'
717 | p358
718 | aS'polecat, fitch, foulmart, foumart, Mustela putorius'
719 | p359
720 | aS'black-footed ferret, ferret, Mustela nigripes'
721 | p360
722 | aS'otter'
723 | p361
724 | aS'skunk, polecat, wood pussy'
725 | p362
726 | aS'badger'
727 | p363
728 | aS'armadillo'
729 | p364
730 | aS'three-toed sloth, ai, Bradypus tridactylus'
731 | p365
732 | aS'orangutan, orang, orangutang, Pongo pygmaeus'
733 | p366
734 | aS'gorilla, Gorilla gorilla'
735 | p367
736 | aS'chimpanzee, chimp, Pan troglodytes'
737 | p368
738 | aS'gibbon, Hylobates lar'
739 | p369
740 | aS'siamang, Hylobates syndactylus, Symphalangus syndactylus'
741 | p370
742 | aS'guenon, guenon monkey'
743 | p371
744 | aS'patas, hussar monkey, Erythrocebus patas'
745 | p372
746 | aS'baboon'
747 | p373
748 | aS'macaque'
749 | p374
750 | aS'langur'
751 | p375
752 | aS'colobus, colobus monkey'
753 | p376
754 | aS'proboscis monkey, Nasalis larvatus'
755 | p377
756 | aS'marmoset'
757 | p378
758 | aS'capuchin, ringtail, Cebus capucinus'
759 | p379
760 | aS'howler monkey, howler'
761 | p380
762 | aS'titi, titi monkey'
763 | p381
764 | aS'spider monkey, Ateles geoffroyi'
765 | p382
766 | aS'squirrel monkey, Saimiri sciureus'
767 | p383
768 | aS'Madagascar cat, ring-tailed lemur, Lemur catta'
769 | p384
770 | aS'indri, indris, Indri indri, Indri brevicaudatus'
771 | p385
772 | aS'Indian elephant, Elephas maximus'
773 | p386
774 | aS'African elephant, Loxodonta africana'
775 | p387
776 | aS'lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens'
777 | p388
778 | aS'giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca'
779 | p389
780 | aS'barracouta, snoek'
781 | p390
782 | aS'eel'
783 | p391
784 | aS'coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch'
785 | p392
786 | aS'rock beauty, Holocanthus tricolor'
787 | p393
788 | aS'anemone fish'
789 | p394
790 | aS'sturgeon'
791 | p395
792 | aS'gar, garfish, garpike, billfish, Lepisosteus osseus'
793 | p396
794 | aS'lionfish'
795 | p397
796 | aS'puffer, pufferfish, blowfish, globefish'
797 | p398
798 | aS'abacus'
799 | p399
800 | aS'abaya'
801 | p400
802 | aS"academic gown, academic robe, judge's robe"
803 | p401
804 | aS'accordion, piano accordion, squeeze box'
805 | p402
806 | aS'acoustic guitar'
807 | p403
808 | aS'aircraft carrier, carrier, flattop, attack aircraft carrier'
809 | p404
810 | aS'airliner'
811 | p405
812 | aS'airship, dirigible'
813 | p406
814 | aS'altar'
815 | p407
816 | aS'ambulance'
817 | p408
818 | aS'amphibian, amphibious vehicle'
819 | p409
820 | aS'analog clock'
821 | p410
822 | aS'apiary, bee house'
823 | p411
824 | aS'apron'
825 | p412
826 | aS'ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin'
827 | p413
828 | aS'assault rifle, assault gun'
829 | p414
830 | aS'backpack, back pack, knapsack, packsack, rucksack, haversack'
831 | p415
832 | aS'bakery, bakeshop, bakehouse'
833 | p416
834 | aS'balance beam, beam'
835 | p417
836 | aS'balloon'
837 | p418
838 | aS'ballpoint, ballpoint pen, ballpen, Biro'
839 | p419
840 | aS'Band Aid'
841 | p420
842 | aS'banjo'
843 | p421
844 | aS'bannister, banister, balustrade, balusters, handrail'
845 | p422
846 | aS'barbell'
847 | p423
848 | aS'barber chair'
849 | p424
850 | aS'barbershop'
851 | p425
852 | aS'barn'
853 | p426
854 | aS'barometer'
855 | p427
856 | aS'barrel, cask'
857 | p428
858 | aS'barrow, garden cart, lawn cart, wheelbarrow'
859 | p429
860 | aS'baseball'
861 | p430
862 | aS'basketball'
863 | p431
864 | aS'bassinet'
865 | p432
866 | aS'bassoon'
867 | p433
868 | aS'bathing cap, swimming cap'
869 | p434
870 | aS'bath towel'
871 | p435
872 | aS'bathtub, bathing tub, bath, tub'
873 | p436
874 | aS'beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon'
875 | p437
876 | aS'beacon, lighthouse, beacon light, pharos'
877 | p438
878 | aS'beaker'
879 | p439
880 | aS'bearskin, busby, shako'
881 | p440
882 | aS'beer bottle'
883 | p441
884 | aS'beer glass'
885 | p442
886 | aS'bell cote, bell cot'
887 | p443
888 | aS'bib'
889 | p444
890 | aS'bicycle-built-for-two, tandem bicycle, tandem'
891 | p445
892 | aS'bikini, two-piece'
893 | p446
894 | aS'binder, ring-binder'
895 | p447
896 | aS'binoculars, field glasses, opera glasses'
897 | p448
898 | aS'birdhouse'
899 | p449
900 | aS'boathouse'
901 | p450
902 | aS'bobsled, bobsleigh, bob'
903 | p451
904 | aS'bolo tie, bolo, bola tie, bola'
905 | p452
906 | aS'bonnet, poke bonnet'
907 | p453
908 | aS'bookcase'
909 | p454
910 | aS'bookshop, bookstore, bookstall'
911 | p455
912 | aS'bottlecap'
913 | p456
914 | aS'bow'
915 | p457
916 | aS'bow tie, bow-tie, bowtie'
917 | p458
918 | aS'brass, memorial tablet, plaque'
919 | p459
920 | aS'brassiere, bra, bandeau'
921 | p460
922 | aS'breakwater, groin, groyne, mole, bulwark, seawall, jetty'
923 | p461
924 | aS'breastplate, aegis, egis'
925 | p462
926 | aS'broom'
927 | p463
928 | aS'bucket, pail'
929 | p464
930 | aS'buckle'
931 | p465
932 | aS'bulletproof vest'
933 | p466
934 | aS'bullet train, bullet'
935 | p467
936 | aS'butcher shop, meat market'
937 | p468
938 | aS'cab, hack, taxi, taxicab'
939 | p469
940 | aS'caldron, cauldron'
941 | p470
942 | aS'candle, taper, wax light'
943 | p471
944 | aS'cannon'
945 | p472
946 | aS'canoe'
947 | p473
948 | aS'can opener, tin opener'
949 | p474
950 | aS'cardigan'
951 | p475
952 | aS'car mirror'
953 | p476
954 | aS'carousel, carrousel, merry-go-round, roundabout, whirligig'
955 | p477
956 | aS"carpenter's kit, tool kit"
957 | p478
958 | aS'carton'
959 | p479
960 | aS'car wheel'
961 | p480
962 | aS'cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM'
963 | p481
964 | aS'cassette'
965 | p482
966 | aS'cassette player'
967 | p483
968 | aS'castle'
969 | p484
970 | aS'catamaran'
971 | p485
972 | aS'CD player'
973 | p486
974 | aS'cello, violoncello'
975 | p487
976 | aS'cellular telephone, cellular phone, cellphone, cell, mobile phone'
977 | p488
978 | aS'chain'
979 | p489
980 | aS'chainlink fence'
981 | p490
982 | aS'chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour'
983 | p491
984 | aS'chain saw, chainsaw'
985 | p492
986 | aS'chest'
987 | p493
988 | aS'chiffonier, commode'
989 | p494
990 | aS'chime, bell, gong'
991 | p495
992 | aS'china cabinet, china closet'
993 | p496
994 | aS'Christmas stocking'
995 | p497
996 | aS'church, church building'
997 | p498
998 | aS'cinema, movie theater, movie theatre, movie house, picture palace'
999 | p499
1000 | aS'cleaver, meat cleaver, chopper'
1001 | p500
1002 | aS'cliff dwelling'
1003 | p501
1004 | aS'cloak'
1005 | p502
1006 | aS'clog, geta, patten, sabot'
1007 | p503
1008 | aS'cocktail shaker'
1009 | p504
1010 | aS'coffee mug'
1011 | p505
1012 | aS'coffeepot'
1013 | p506
1014 | aS'coil, spiral, volute, whorl, helix'
1015 | p507
1016 | aS'combination lock'
1017 | p508
1018 | aS'computer keyboard, keypad'
1019 | p509
1020 | aS'confectionery, confectionary, candy store'
1021 | p510
1022 | aS'container ship, containership, container vessel'
1023 | p511
1024 | aS'convertible'
1025 | p512
1026 | aS'corkscrew, bottle screw'
1027 | p513
1028 | aS'cornet, horn, trumpet, trump'
1029 | p514
1030 | aS'cowboy boot'
1031 | p515
1032 | aS'cowboy hat, ten-gallon hat'
1033 | p516
1034 | aS'cradle'
1035 | p517
1036 | aS'crane'
1037 | p518
1038 | aS'crash helmet'
1039 | p519
1040 | aS'crate'
1041 | p520
1042 | aS'crib, cot'
1043 | p521
1044 | aS'Crock Pot'
1045 | p522
1046 | aS'croquet ball'
1047 | p523
1048 | aS'crutch'
1049 | p524
1050 | aS'cuirass'
1051 | p525
1052 | aS'dam, dike, dyke'
1053 | p526
1054 | aS'desk'
1055 | p527
1056 | aS'desktop computer'
1057 | p528
1058 | aS'dial telephone, dial phone'
1059 | p529
1060 | aS'diaper, nappy, napkin'
1061 | p530
1062 | aS'digital clock'
1063 | p531
1064 | aS'digital watch'
1065 | p532
1066 | aS'dining table, board'
1067 | p533
1068 | aS'dishrag, dishcloth'
1069 | p534
1070 | aS'dishwasher, dish washer, dishwashing machine'
1071 | p535
1072 | aS'disk brake, disc brake'
1073 | p536
1074 | aS'dock, dockage, docking facility'
1075 | p537
1076 | aS'dogsled, dog sled, dog sleigh'
1077 | p538
1078 | aS'dome'
1079 | p539
1080 | aS'doormat, welcome mat'
1081 | p540
1082 | aS'drilling platform, offshore rig'
1083 | p541
1084 | aS'drum, membranophone, tympan'
1085 | p542
1086 | aS'drumstick'
1087 | p543
1088 | aS'dumbbell'
1089 | p544
1090 | aS'Dutch oven'
1091 | p545
1092 | aS'electric fan, blower'
1093 | p546
1094 | aS'electric guitar'
1095 | p547
1096 | aS'electric locomotive'
1097 | p548
1098 | aS'entertainment center'
1099 | p549
1100 | aS'envelope'
1101 | p550
1102 | aS'espresso maker'
1103 | p551
1104 | aS'face powder'
1105 | p552
1106 | aS'feather boa, boa'
1107 | p553
1108 | aS'file, file cabinet, filing cabinet'
1109 | p554
1110 | aS'fireboat'
1111 | p555
1112 | aS'fire engine, fire truck'
1113 | p556
1114 | aS'fire screen, fireguard'
1115 | p557
1116 | aS'flagpole, flagstaff'
1117 | p558
1118 | aS'flute, transverse flute'
1119 | p559
1120 | aS'folding chair'
1121 | p560
1122 | aS'football helmet'
1123 | p561
1124 | aS'forklift'
1125 | p562
1126 | aS'fountain'
1127 | p563
1128 | aS'fountain pen'
1129 | p564
1130 | aS'four-poster'
1131 | p565
1132 | aS'freight car'
1133 | p566
1134 | aS'French horn, horn'
1135 | p567
1136 | aS'frying pan, frypan, skillet'
1137 | p568
1138 | aS'fur coat'
1139 | p569
1140 | aS'garbage truck, dustcart'
1141 | p570
1142 | aS'gasmask, respirator, gas helmet'
1143 | p571
1144 | aS'gas pump, gasoline pump, petrol pump, island dispenser'
1145 | p572
1146 | aS'goblet'
1147 | p573
1148 | aS'go-kart'
1149 | p574
1150 | aS'golf ball'
1151 | p575
1152 | aS'golfcart, golf cart'
1153 | p576
1154 | aS'gondola'
1155 | p577
1156 | aS'gong, tam-tam'
1157 | p578
1158 | aS'gown'
1159 | p579
1160 | aS'grand piano, grand'
1161 | p580
1162 | aS'greenhouse, nursery, glasshouse'
1163 | p581
1164 | aS'grille, radiator grille'
1165 | p582
1166 | aS'grocery store, grocery, food market, market'
1167 | p583
1168 | aS'guillotine'
1169 | p584
1170 | aS'hair slide'
1171 | p585
1172 | aS'hair spray'
1173 | p586
1174 | aS'half track'
1175 | p587
1176 | aS'hammer'
1177 | p588
1178 | aS'hamper'
1179 | p589
1180 | aS'hand blower, blow dryer, blow drier, hair dryer, hair drier'
1181 | p590
1182 | aS'hand-held computer, hand-held microcomputer'
1183 | p591
1184 | aS'handkerchief, hankie, hanky, hankey'
1185 | p592
1186 | aS'hard disc, hard disk, fixed disk'
1187 | p593
1188 | aS'harmonica, mouth organ, harp, mouth harp'
1189 | p594
1190 | aS'harp'
1191 | p595
1192 | aS'harvester, reaper'
1193 | p596
1194 | aS'hatchet'
1195 | p597
1196 | aS'holster'
1197 | p598
1198 | aS'home theater, home theatre'
1199 | p599
1200 | aS'honeycomb'
1201 | p600
1202 | aS'hook, claw'
1203 | p601
1204 | aS'hoopskirt, crinoline'
1205 | p602
1206 | aS'horizontal bar, high bar'
1207 | p603
1208 | aS'horse cart, horse-cart'
1209 | p604
1210 | aS'hourglass'
1211 | p605
1212 | aS'iPod'
1213 | p606
1214 | aS'iron, smoothing iron'
1215 | p607
1216 | aS"jack-o'-lantern"
1217 | p608
1218 | aS'jean, blue jean, denim'
1219 | p609
1220 | aS'jeep, landrover'
1221 | p610
1222 | aS'jersey, T-shirt, tee shirt'
1223 | p611
1224 | aS'jigsaw puzzle'
1225 | p612
1226 | aS'jinrikisha, ricksha, rickshaw'
1227 | p613
1228 | aS'joystick'
1229 | p614
1230 | aS'kimono'
1231 | p615
1232 | aS'knee pad'
1233 | p616
1234 | aS'knot'
1235 | p617
1236 | aS'lab coat, laboratory coat'
1237 | p618
1238 | aS'ladle'
1239 | p619
1240 | aS'lampshade, lamp shade'
1241 | p620
1242 | aS'laptop, laptop computer'
1243 | p621
1244 | aS'lawn mower, mower'
1245 | p622
1246 | aS'lens cap, lens cover'
1247 | p623
1248 | aS'letter opener, paper knife, paperknife'
1249 | p624
1250 | aS'library'
1251 | p625
1252 | aS'lifeboat'
1253 | p626
1254 | aS'lighter, light, igniter, ignitor'
1255 | p627
1256 | aS'limousine, limo'
1257 | p628
1258 | aS'liner, ocean liner'
1259 | p629
1260 | aS'lipstick, lip rouge'
1261 | p630
1262 | aS'Loafer'
1263 | p631
1264 | aS'lotion'
1265 | p632
1266 | aS'loudspeaker, speaker, speaker unit, loudspeaker system, speaker system'
1267 | p633
1268 | aS"loupe, jeweler's loupe"
1269 | p634
1270 | aS'lumbermill, sawmill'
1271 | p635
1272 | aS'magnetic compass'
1273 | p636
1274 | aS'mailbag, postbag'
1275 | p637
1276 | aS'mailbox, letter box'
1277 | p638
1278 | aS'maillot'
1279 | p639
1280 | aS'maillot, tank suit'
1281 | p640
1282 | aS'manhole cover'
1283 | p641
1284 | aS'maraca'
1285 | p642
1286 | aS'marimba, xylophone'
1287 | p643
1288 | aS'mask'
1289 | p644
1290 | aS'matchstick'
1291 | p645
1292 | aS'maypole'
1293 | p646
1294 | aS'maze, labyrinth'
1295 | p647
1296 | aS'measuring cup'
1297 | p648
1298 | aS'medicine chest, medicine cabinet'
1299 | p649
1300 | aS'megalith, megalithic structure'
1301 | p650
1302 | aS'microphone, mike'
1303 | p651
1304 | aS'microwave, microwave oven'
1305 | p652
1306 | aS'military uniform'
1307 | p653
1308 | aS'milk can'
1309 | p654
1310 | aS'minibus'
1311 | p655
1312 | aS'miniskirt, mini'
1313 | p656
1314 | aS'minivan'
1315 | p657
1316 | aS'missile'
1317 | p658
1318 | aS'mitten'
1319 | p659
1320 | aS'mixing bowl'
1321 | p660
1322 | aS'mobile home, manufactured home'
1323 | p661
1324 | aS'Model T'
1325 | p662
1326 | aS'modem'
1327 | p663
1328 | aS'monastery'
1329 | p664
1330 | aS'monitor'
1331 | p665
1332 | aS'moped'
1333 | p666
1334 | aS'mortar'
1335 | p667
1336 | aS'mortarboard'
1337 | p668
1338 | aS'mosque'
1339 | p669
1340 | aS'mosquito net'
1341 | p670
1342 | aS'motor scooter, scooter'
1343 | p671
1344 | aS'mountain bike, all-terrain bike, off-roader'
1345 | p672
1346 | aS'mountain tent'
1347 | p673
1348 | aS'mouse, computer mouse'
1349 | p674
1350 | aS'mousetrap'
1351 | p675
1352 | aS'moving van'
1353 | p676
1354 | aS'muzzle'
1355 | p677
1356 | aS'nail'
1357 | p678
1358 | aS'neck brace'
1359 | p679
1360 | aS'necklace'
1361 | p680
1362 | aS'nipple'
1363 | p681
1364 | aS'notebook, notebook computer'
1365 | p682
1366 | aS'obelisk'
1367 | p683
1368 | aS'oboe, hautboy, hautbois'
1369 | p684
1370 | aS'ocarina, sweet potato'
1371 | p685
1372 | aS'odometer, hodometer, mileometer, milometer'
1373 | p686
1374 | aS'oil filter'
1375 | p687
1376 | aS'organ, pipe organ'
1377 | p688
1378 | aS'oscilloscope, scope, cathode-ray oscilloscope, CRO'
1379 | p689
1380 | aS'overskirt'
1381 | p690
1382 | aS'oxcart'
1383 | p691
1384 | aS'oxygen mask'
1385 | p692
1386 | aS'packet'
1387 | p693
1388 | aS'paddle, boat paddle'
1389 | p694
1390 | aS'paddlewheel, paddle wheel'
1391 | p695
1392 | aS'padlock'
1393 | p696
1394 | aS'paintbrush'
1395 | p697
1396 | aS"pajama, pyjama, pj's, jammies"
1397 | p698
1398 | aS'palace'
1399 | p699
1400 | aS'panpipe, pandean pipe, syrinx'
1401 | p700
1402 | aS'paper towel'
1403 | p701
1404 | aS'parachute, chute'
1405 | p702
1406 | aS'parallel bars, bars'
1407 | p703
1408 | aS'park bench'
1409 | p704
1410 | aS'parking meter'
1411 | p705
1412 | aS'passenger car, coach, carriage'
1413 | p706
1414 | aS'patio, terrace'
1415 | p707
1416 | aS'pay-phone, pay-station'
1417 | p708
1418 | aS'pedestal, plinth, footstall'
1419 | p709
1420 | aS'pencil box, pencil case'
1421 | p710
1422 | aS'pencil sharpener'
1423 | p711
1424 | aS'perfume, essence'
1425 | p712
1426 | aS'Petri dish'
1427 | p713
1428 | aS'photocopier'
1429 | p714
1430 | aS'pick, plectrum, plectron'
1431 | p715
1432 | aS'pickelhaube'
1433 | p716
1434 | aS'picket fence, paling'
1435 | p717
1436 | aS'pickup, pickup truck'
1437 | p718
1438 | aS'pier'
1439 | p719
1440 | aS'piggy bank, penny bank'
1441 | p720
1442 | aS'pill bottle'
1443 | p721
1444 | aS'pillow'
1445 | p722
1446 | aS'ping-pong ball'
1447 | p723
1448 | aS'pinwheel'
1449 | p724
1450 | aS'pirate, pirate ship'
1451 | p725
1452 | aS'pitcher, ewer'
1453 | p726
1454 | aS"plane, carpenter's plane, woodworking plane"
1455 | p727
1456 | aS'planetarium'
1457 | p728
1458 | aS'plastic bag'
1459 | p729
1460 | aS'plate rack'
1461 | p730
1462 | aS'plow, plough'
1463 | p731
1464 | aS"plunger, plumber's helper"
1465 | p732
1466 | aS'Polaroid camera, Polaroid Land camera'
1467 | p733
1468 | aS'pole'
1469 | p734
1470 | aS'police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria'
1471 | p735
1472 | aS'poncho'
1473 | p736
1474 | aS'pool table, billiard table, snooker table'
1475 | p737
1476 | aS'pop bottle, soda bottle'
1477 | p738
1478 | aS'pot, flowerpot'
1479 | p739
1480 | aS"potter's wheel"
1481 | p740
1482 | aS'power drill'
1483 | p741
1484 | aS'prayer rug, prayer mat'
1485 | p742
1486 | aS'printer'
1487 | p743
1488 | aS'prison, prison house'
1489 | p744
1490 | aS'projectile, missile'
1491 | p745
1492 | aS'projector'
1493 | p746
1494 | aS'puck, hockey puck'
1495 | p747
1496 | aS'punching bag, punch bag, punching ball, punchball'
1497 | p748
1498 | aS'purse'
1499 | p749
1500 | aS'quill, quill pen'
1501 | p750
1502 | aS'quilt, comforter, comfort, puff'
1503 | p751
1504 | aS'racer, race car, racing car'
1505 | p752
1506 | aS'racket, racquet'
1507 | p753
1508 | aS'radiator'
1509 | p754
1510 | aS'radio, wireless'
1511 | p755
1512 | aS'radio telescope, radio reflector'
1513 | p756
1514 | aS'rain barrel'
1515 | p757
1516 | aS'recreational vehicle, RV, R.V.'
1517 | p758
1518 | aS'reel'
1519 | p759
1520 | aS'reflex camera'
1521 | p760
1522 | aS'refrigerator, icebox'
1523 | p761
1524 | aS'remote control, remote'
1525 | p762
1526 | aS'restaurant, eating house, eating place, eatery'
1527 | p763
1528 | aS'revolver, six-gun, six-shooter'
1529 | p764
1530 | aS'rifle'
1531 | p765
1532 | aS'rocking chair, rocker'
1533 | p766
1534 | aS'rotisserie'
1535 | p767
1536 | aS'rubber eraser, rubber, pencil eraser'
1537 | p768
1538 | aS'rugby ball'
1539 | p769
1540 | aS'rule, ruler'
1541 | p770
1542 | aS'running shoe'
1543 | p771
1544 | aS'safe'
1545 | p772
1546 | aS'safety pin'
1547 | p773
1548 | aS'saltshaker, salt shaker'
1549 | p774
1550 | aS'sandal'
1551 | p775
1552 | aS'sarong'
1553 | p776
1554 | aS'sax, saxophone'
1555 | p777
1556 | aS'scabbard'
1557 | p778
1558 | aS'scale, weighing machine'
1559 | p779
1560 | aS'school bus'
1561 | p780
1562 | aS'schooner'
1563 | p781
1564 | aS'scoreboard'
1565 | p782
1566 | aS'screen, CRT screen'
1567 | p783
1568 | aS'screw'
1569 | p784
1570 | aS'screwdriver'
1571 | p785
1572 | aS'seat belt, seatbelt'
1573 | p786
1574 | aS'sewing machine'
1575 | p787
1576 | aS'shield, buckler'
1577 | p788
1578 | aS'shoe shop, shoe-shop, shoe store'
1579 | p789
1580 | aS'shoji'
1581 | p790
1582 | aS'shopping basket'
1583 | p791
1584 | aS'shopping cart'
1585 | p792
1586 | aS'shovel'
1587 | p793
1588 | aS'shower cap'
1589 | p794
1590 | aS'shower curtain'
1591 | p795
1592 | aS'ski'
1593 | p796
1594 | aS'ski mask'
1595 | p797
1596 | aS'sleeping bag'
1597 | p798
1598 | aS'slide rule, slipstick'
1599 | p799
1600 | aS'sliding door'
1601 | p800
1602 | aS'slot, one-armed bandit'
1603 | p801
1604 | aS'snorkel'
1605 | p802
1606 | aS'snowmobile'
1607 | p803
1608 | aS'snowplow, snowplough'
1609 | p804
1610 | aS'soap dispenser'
1611 | p805
1612 | aS'soccer ball'
1613 | p806
1614 | aS'sock'
1615 | p807
1616 | aS'solar dish, solar collector, solar furnace'
1617 | p808
1618 | aS'sombrero'
1619 | p809
1620 | aS'soup bowl'
1621 | p810
1622 | aS'space bar'
1623 | p811
1624 | aS'space heater'
1625 | p812
1626 | aS'space shuttle'
1627 | p813
1628 | aS'spatula'
1629 | p814
1630 | aS'speedboat'
1631 | p815
1632 | aS"spider web, spider's web"
1633 | p816
1634 | aS'spindle'
1635 | p817
1636 | aS'sports car, sport car'
1637 | p818
1638 | aS'spotlight, spot'
1639 | p819
1640 | aS'stage'
1641 | p820
1642 | aS'steam locomotive'
1643 | p821
1644 | aS'steel arch bridge'
1645 | p822
1646 | aS'steel drum'
1647 | p823
1648 | aS'stethoscope'
1649 | p824
1650 | aS'stole'
1651 | p825
1652 | aS'stone wall'
1653 | p826
1654 | aS'stopwatch, stop watch'
1655 | p827
1656 | aS'stove'
1657 | p828
1658 | aS'strainer'
1659 | p829
1660 | aS'streetcar, tram, tramcar, trolley, trolley car'
1661 | p830
1662 | aS'stretcher'
1663 | p831
1664 | aS'studio couch, day bed'
1665 | p832
1666 | aS'stupa, tope'
1667 | p833
1668 | aS'submarine, pigboat, sub, U-boat'
1669 | p834
1670 | aS'suit, suit of clothes'
1671 | p835
1672 | aS'sundial'
1673 | p836
1674 | aS'sunglass'
1675 | p837
1676 | aS'sunglasses, dark glasses, shades'
1677 | p838
1678 | aS'sunscreen, sunblock, sun blocker'
1679 | p839
1680 | aS'suspension bridge'
1681 | p840
1682 | aS'swab, swob, mop'
1683 | p841
1684 | aS'sweatshirt'
1685 | p842
1686 | aS'swimming trunks, bathing trunks'
1687 | p843
1688 | aS'swing'
1689 | p844
1690 | aS'switch, electric switch, electrical switch'
1691 | p845
1692 | aS'syringe'
1693 | p846
1694 | aS'table lamp'
1695 | p847
1696 | aS'tank, army tank, armored combat vehicle, armoured combat vehicle'
1697 | p848
1698 | aS'tape player'
1699 | p849
1700 | aS'teapot'
1701 | p850
1702 | aS'teddy, teddy bear'
1703 | p851
1704 | aS'television, television system'
1705 | p852
1706 | aS'tennis ball'
1707 | p853
1708 | aS'thatch, thatched roof'
1709 | p854
1710 | aS'theater curtain, theatre curtain'
1711 | p855
1712 | aS'thimble'
1713 | p856
1714 | aS'thresher, thrasher, threshing machine'
1715 | p857
1716 | aS'throne'
1717 | p858
1718 | aS'tile roof'
1719 | p859
1720 | aS'toaster'
1721 | p860
1722 | aS'tobacco shop, tobacconist shop, tobacconist'
1723 | p861
1724 | aS'toilet seat'
1725 | p862
1726 | aS'torch'
1727 | p863
1728 | aS'totem pole'
1729 | p864
1730 | aS'tow truck, tow car, wrecker'
1731 | p865
1732 | aS'toyshop'
1733 | p866
1734 | aS'tractor'
1735 | p867
1736 | aS'trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi'
1737 | p868
1738 | aS'tray'
1739 | p869
1740 | aS'trench coat'
1741 | p870
1742 | aS'tricycle, trike, velocipede'
1743 | p871
1744 | aS'trimaran'
1745 | p872
1746 | aS'tripod'
1747 | p873
1748 | aS'triumphal arch'
1749 | p874
1750 | aS'trolleybus, trolley coach, trackless trolley'
1751 | p875
1752 | aS'trombone'
1753 | p876
1754 | aS'tub, vat'
1755 | p877
1756 | aS'turnstile'
1757 | p878
1758 | aS'typewriter keyboard'
1759 | p879
1760 | aS'umbrella'
1761 | p880
1762 | aS'unicycle, monocycle'
1763 | p881
1764 | aS'upright, upright piano'
1765 | p882
1766 | aS'vacuum, vacuum cleaner'
1767 | p883
1768 | aS'vase'
1769 | p884
1770 | aS'vault'
1771 | p885
1772 | aS'velvet'
1773 | p886
1774 | aS'vending machine'
1775 | p887
1776 | aS'vestment'
1777 | p888
1778 | aS'viaduct'
1779 | p889
1780 | aS'violin, fiddle'
1781 | p890
1782 | aS'volleyball'
1783 | p891
1784 | aS'waffle iron'
1785 | p892
1786 | aS'wall clock'
1787 | p893
1788 | aS'wallet, billfold, notecase, pocketbook'
1789 | p894
1790 | aS'wardrobe, closet, press'
1791 | p895
1792 | aS'warplane, military plane'
1793 | p896
1794 | aS'washbasin, handbasin, washbowl, lavabo, wash-hand basin'
1795 | p897
1796 | aS'washer, automatic washer, washing machine'
1797 | p898
1798 | aS'water bottle'
1799 | p899
1800 | aS'water jug'
1801 | p900
1802 | aS'water tower'
1803 | p901
1804 | aS'whiskey jug'
1805 | p902
1806 | aS'whistle'
1807 | p903
1808 | aS'wig'
1809 | p904
1810 | aS'window screen'
1811 | p905
1812 | aS'window shade'
1813 | p906
1814 | aS'Windsor tie'
1815 | p907
1816 | aS'wine bottle'
1817 | p908
1818 | aS'wing'
1819 | p909
1820 | aS'wok'
1821 | p910
1822 | aS'wooden spoon'
1823 | p911
1824 | aS'wool, woolen, woollen'
1825 | p912
1826 | aS'worm fence, snake fence, snake-rail fence, Virginia fence'
1827 | p913
1828 | aS'wreck'
1829 | p914
1830 | aS'yawl'
1831 | p915
1832 | aS'yurt'
1833 | p916
1834 | aS'web site, website, internet site, site'
1835 | p917
1836 | aS'comic book'
1837 | p918
1838 | aS'crossword puzzle, crossword'
1839 | p919
1840 | aS'street sign'
1841 | p920
1842 | aS'traffic light, traffic signal, stoplight'
1843 | p921
1844 | aS'book jacket, dust cover, dust jacket, dust wrapper'
1845 | p922
1846 | aS'menu'
1847 | p923
1848 | aS'plate'
1849 | p924
1850 | aS'guacamole'
1851 | p925
1852 | aS'consomme'
1853 | p926
1854 | aS'hot pot, hotpot'
1855 | p927
1856 | aS'trifle'
1857 | p928
1858 | aS'ice cream, icecream'
1859 | p929
1860 | aS'ice lolly, lolly, lollipop, popsicle'
1861 | p930
1862 | aS'French loaf'
1863 | p931
1864 | aS'bagel, beigel'
1865 | p932
1866 | aS'pretzel'
1867 | p933
1868 | aS'cheeseburger'
1869 | p934
1870 | aS'hotdog, hot dog, red hot'
1871 | p935
1872 | aS'mashed potato'
1873 | p936
1874 | aS'head cabbage'
1875 | p937
1876 | aS'broccoli'
1877 | p938
1878 | aS'cauliflower'
1879 | p939
1880 | aS'zucchini, courgette'
1881 | p940
1882 | aS'spaghetti squash'
1883 | p941
1884 | aS'acorn squash'
1885 | p942
1886 | aS'butternut squash'
1887 | p943
1888 | aS'cucumber, cuke'
1889 | p944
1890 | aS'artichoke, globe artichoke'
1891 | p945
1892 | aS'bell pepper'
1893 | p946
1894 | aS'cardoon'
1895 | p947
1896 | aS'mushroom'
1897 | p948
1898 | aS'Granny Smith'
1899 | p949
1900 | aS'strawberry'
1901 | p950
1902 | aS'orange'
1903 | p951
1904 | aS'lemon'
1905 | p952
1906 | aS'fig'
1907 | p953
1908 | aS'pineapple, ananas'
1909 | p954
1910 | aS'banana'
1911 | p955
1912 | aS'jackfruit, jak, jack'
1913 | p956
1914 | aS'custard apple'
1915 | p957
1916 | aS'pomegranate'
1917 | p958
1918 | aS'hay'
1919 | p959
1920 | aS'carbonara'
1921 | p960
1922 | aS'chocolate sauce, chocolate syrup'
1923 | p961
1924 | aS'dough'
1925 | p962
1926 | aS'meat loaf, meatloaf'
1927 | p963
1928 | aS'pizza, pizza pie'
1929 | p964
1930 | aS'potpie'
1931 | p965
1932 | aS'burrito'
1933 | p966
1934 | aS'red wine'
1935 | p967
1936 | aS'espresso'
1937 | p968
1938 | aS'cup'
1939 | p969
1940 | aS'eggnog'
1941 | p970
1942 | aS'alp'
1943 | p971
1944 | aS'bubble'
1945 | p972
1946 | aS'cliff, drop, drop-off'
1947 | p973
1948 | aS'coral reef'
1949 | p974
1950 | aS'geyser'
1951 | p975
1952 | aS'lakeside, lakeshore'
1953 | p976
1954 | aS'promontory, headland, head, foreland'
1955 | p977
1956 | aS'sandbar, sand bar'
1957 | p978
1958 | aS'seashore, coast, seacoast, sea-coast'
1959 | p979
1960 | aS'valley, vale'
1961 | p980
1962 | aS'volcano'
1963 | p981
1964 | aS'ballplayer, baseball player'
1965 | p982
1966 | aS'groom, bridegroom'
1967 | p983
1968 | aS'scuba diver'
1969 | p984
1970 | aS'rapeseed'
1971 | p985
1972 | aS'daisy'
1973 | p986
1974 | aS"yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum"
1975 | p987
1976 | aS'corn'
1977 | p988
1978 | aS'acorn'
1979 | p989
1980 | aS'hip, rose hip, rosehip'
1981 | p990
1982 | aS'buckeye, horse chestnut, conker'
1983 | p991
1984 | aS'coral fungus'
1985 | p992
1986 | aS'agaric'
1987 | p993
1988 | aS'gyromitra'
1989 | p994
1990 | aS'stinkhorn, carrion fungus'
1991 | p995
1992 | aS'earthstar'
1993 | p996
1994 | aS'hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa'
1995 | p997
1996 | aS'bolete'
1997 | p998
1998 | aS'ear, spike, capitulum'
1999 | p999
2000 | aS'toilet tissue, toilet paper, bathroom tissue'
2001 | p1000
2002 | a.
--------------------------------------------------------------------------------
/Data/classid_to_wnid.pkl:
--------------------------------------------------------------------------------
1 | (lp0
2 | S'n01440764'
3 | p1
4 | aS'n01443537'
5 | p2
6 | aS'n01484850'
7 | p3
8 | aS'n01491361'
9 | p4
10 | aS'n01494475'
11 | p5
12 | aS'n01496331'
13 | p6
14 | aS'n01498041'
15 | p7
16 | aS'n01514668'
17 | p8
18 | aS'n01514859'
19 | p9
20 | aS'n01518878'
21 | p10
22 | aS'n01530575'
23 | p11
24 | aS'n01531178'
25 | p12
26 | aS'n01532829'
27 | p13
28 | aS'n01534433'
29 | p14
30 | aS'n01537544'
31 | p15
32 | aS'n01558993'
33 | p16
34 | aS'n01560419'
35 | p17
36 | aS'n01580077'
37 | p18
38 | aS'n01582220'
39 | p19
40 | aS'n01592084'
41 | p20
42 | aS'n01601694'
43 | p21
44 | aS'n01608432'
45 | p22
46 | aS'n01614925'
47 | p23
48 | aS'n01616318'
49 | p24
50 | aS'n01622779'
51 | p25
52 | aS'n01629819'
53 | p26
54 | aS'n01630670'
55 | p27
56 | aS'n01631663'
57 | p28
58 | aS'n01632458'
59 | p29
60 | aS'n01632777'
61 | p30
62 | aS'n01641577'
63 | p31
64 | aS'n01644373'
65 | p32
66 | aS'n01644900'
67 | p33
68 | aS'n01664065'
69 | p34
70 | aS'n01665541'
71 | p35
72 | aS'n01667114'
73 | p36
74 | aS'n01667778'
75 | p37
76 | aS'n01669191'
77 | p38
78 | aS'n01675722'
79 | p39
80 | aS'n01677366'
81 | p40
82 | aS'n01682714'
83 | p41
84 | aS'n01685808'
85 | p42
86 | aS'n01687978'
87 | p43
88 | aS'n01688243'
89 | p44
90 | aS'n01689811'
91 | p45
92 | aS'n01692333'
93 | p46
94 | aS'n01693334'
95 | p47
96 | aS'n01694178'
97 | p48
98 | aS'n01695060'
99 | p49
100 | aS'n01697457'
101 | p50
102 | aS'n01698640'
103 | p51
104 | aS'n01704323'
105 | p52
106 | aS'n01728572'
107 | p53
108 | aS'n01728920'
109 | p54
110 | aS'n01729322'
111 | p55
112 | aS'n01729977'
113 | p56
114 | aS'n01734418'
115 | p57
116 | aS'n01735189'
117 | p58
118 | aS'n01737021'
119 | p59
120 | aS'n01739381'
121 | p60
122 | aS'n01740131'
123 | p61
124 | aS'n01742172'
125 | p62
126 | aS'n01744401'
127 | p63
128 | aS'n01748264'
129 | p64
130 | aS'n01749939'
131 | p65
132 | aS'n01751748'
133 | p66
134 | aS'n01753488'
135 | p67
136 | aS'n01755581'
137 | p68
138 | aS'n01756291'
139 | p69
140 | aS'n01768244'
141 | p70
142 | aS'n01770081'
143 | p71
144 | aS'n01770393'
145 | p72
146 | aS'n01773157'
147 | p73
148 | aS'n01773549'
149 | p74
150 | aS'n01773797'
151 | p75
152 | aS'n01774384'
153 | p76
154 | aS'n01774750'
155 | p77
156 | aS'n01775062'
157 | p78
158 | aS'n01776313'
159 | p79
160 | aS'n01784675'
161 | p80
162 | aS'n01795545'
163 | p81
164 | aS'n01796340'
165 | p82
166 | aS'n01797886'
167 | p83
168 | aS'n01798484'
169 | p84
170 | aS'n01806143'
171 | p85
172 | aS'n01806567'
173 | p86
174 | aS'n01807496'
175 | p87
176 | aS'n01817953'
177 | p88
178 | aS'n01818515'
179 | p89
180 | aS'n01819313'
181 | p90
182 | aS'n01820546'
183 | p91
184 | aS'n01824575'
185 | p92
186 | aS'n01828970'
187 | p93
188 | aS'n01829413'
189 | p94
190 | aS'n01833805'
191 | p95
192 | aS'n01843065'
193 | p96
194 | aS'n01843383'
195 | p97
196 | aS'n01847000'
197 | p98
198 | aS'n01855032'
199 | p99
200 | aS'n01855672'
201 | p100
202 | aS'n01860187'
203 | p101
204 | aS'n01871265'
205 | p102
206 | aS'n01872401'
207 | p103
208 | aS'n01873310'
209 | p104
210 | aS'n01877812'
211 | p105
212 | aS'n01882714'
213 | p106
214 | aS'n01883070'
215 | p107
216 | aS'n01910747'
217 | p108
218 | aS'n01914609'
219 | p109
220 | aS'n01917289'
221 | p110
222 | aS'n01924916'
223 | p111
224 | aS'n01930112'
225 | p112
226 | aS'n01943899'
227 | p113
228 | aS'n01944390'
229 | p114
230 | aS'n01945685'
231 | p115
232 | aS'n01950731'
233 | p116
234 | aS'n01955084'
235 | p117
236 | aS'n01968897'
237 | p118
238 | aS'n01978287'
239 | p119
240 | aS'n01978455'
241 | p120
242 | aS'n01980166'
243 | p121
244 | aS'n01981276'
245 | p122
246 | aS'n01983481'
247 | p123
248 | aS'n01984695'
249 | p124
250 | aS'n01985128'
251 | p125
252 | aS'n01986214'
253 | p126
254 | aS'n01990800'
255 | p127
256 | aS'n02002556'
257 | p128
258 | aS'n02002724'
259 | p129
260 | aS'n02006656'
261 | p130
262 | aS'n02007558'
263 | p131
264 | aS'n02009229'
265 | p132
266 | aS'n02009912'
267 | p133
268 | aS'n02011460'
269 | p134
270 | aS'n03126707'
271 | p135
272 | aS'n02013706'
273 | p136
274 | aS'n02017213'
275 | p137
276 | aS'n02018207'
277 | p138
278 | aS'n02018795'
279 | p139
280 | aS'n02025239'
281 | p140
282 | aS'n02027492'
283 | p141
284 | aS'n02028035'
285 | p142
286 | aS'n02033041'
287 | p143
288 | aS'n02037110'
289 | p144
290 | aS'n02051845'
291 | p145
292 | aS'n02056570'
293 | p146
294 | aS'n02058221'
295 | p147
296 | aS'n02066245'
297 | p148
298 | aS'n02071294'
299 | p149
300 | aS'n02074367'
301 | p150
302 | aS'n02077923'
303 | p151
304 | aS'n02085620'
305 | p152
306 | aS'n02085782'
307 | p153
308 | aS'n02085936'
309 | p154
310 | aS'n02086079'
311 | p155
312 | aS'n02086240'
313 | p156
314 | aS'n02086646'
315 | p157
316 | aS'n02086910'
317 | p158
318 | aS'n02087046'
319 | p159
320 | aS'n02087394'
321 | p160
322 | aS'n02088094'
323 | p161
324 | aS'n02088238'
325 | p162
326 | aS'n02088364'
327 | p163
328 | aS'n02088466'
329 | p164
330 | aS'n02088632'
331 | p165
332 | aS'n02089078'
333 | p166
334 | aS'n02089867'
335 | p167
336 | aS'n02089973'
337 | p168
338 | aS'n02090379'
339 | p169
340 | aS'n02090622'
341 | p170
342 | aS'n02090721'
343 | p171
344 | aS'n02091032'
345 | p172
346 | aS'n02091134'
347 | p173
348 | aS'n02091244'
349 | p174
350 | aS'n02091467'
351 | p175
352 | aS'n02091635'
353 | p176
354 | aS'n02091831'
355 | p177
356 | aS'n02092002'
357 | p178
358 | aS'n02092339'
359 | p179
360 | aS'n02093256'
361 | p180
362 | aS'n02093428'
363 | p181
364 | aS'n02093647'
365 | p182
366 | aS'n02093754'
367 | p183
368 | aS'n02093859'
369 | p184
370 | aS'n02093991'
371 | p185
372 | aS'n02094114'
373 | p186
374 | aS'n02094258'
375 | p187
376 | aS'n02094433'
377 | p188
378 | aS'n02095314'
379 | p189
380 | aS'n02095570'
381 | p190
382 | aS'n02095889'
383 | p191
384 | aS'n02096051'
385 | p192
386 | aS'n02096177'
387 | p193
388 | aS'n02096294'
389 | p194
390 | aS'n02096437'
391 | p195
392 | aS'n02096585'
393 | p196
394 | aS'n02097047'
395 | p197
396 | aS'n02097130'
397 | p198
398 | aS'n02097209'
399 | p199
400 | aS'n02097298'
401 | p200
402 | aS'n02097474'
403 | p201
404 | aS'n02097658'
405 | p202
406 | aS'n02098105'
407 | p203
408 | aS'n02098286'
409 | p204
410 | aS'n02098413'
411 | p205
412 | aS'n02099267'
413 | p206
414 | aS'n02099429'
415 | p207
416 | aS'n02099601'
417 | p208
418 | aS'n02099712'
419 | p209
420 | aS'n02099849'
421 | p210
422 | aS'n02100236'
423 | p211
424 | aS'n02100583'
425 | p212
426 | aS'n02100735'
427 | p213
428 | aS'n02100877'
429 | p214
430 | aS'n02101006'
431 | p215
432 | aS'n02101388'
433 | p216
434 | aS'n02101556'
435 | p217
436 | aS'n02102040'
437 | p218
438 | aS'n02102177'
439 | p219
440 | aS'n02102318'
441 | p220
442 | aS'n02102480'
443 | p221
444 | aS'n02102973'
445 | p222
446 | aS'n02104029'
447 | p223
448 | aS'n02104365'
449 | p224
450 | aS'n02105056'
451 | p225
452 | aS'n02105162'
453 | p226
454 | aS'n02105251'
455 | p227
456 | aS'n02105412'
457 | p228
458 | aS'n02105505'
459 | p229
460 | aS'n02105641'
461 | p230
462 | aS'n02105855'
463 | p231
464 | aS'n02106030'
465 | p232
466 | aS'n02106166'
467 | p233
468 | aS'n02106382'
469 | p234
470 | aS'n02106550'
471 | p235
472 | aS'n02106662'
473 | p236
474 | aS'n02107142'
475 | p237
476 | aS'n02107312'
477 | p238
478 | aS'n02107574'
479 | p239
480 | aS'n02107683'
481 | p240
482 | aS'n02107908'
483 | p241
484 | aS'n02108000'
485 | p242
486 | aS'n02108089'
487 | p243
488 | aS'n02108422'
489 | p244
490 | aS'n02108551'
491 | p245
492 | aS'n02108915'
493 | p246
494 | aS'n02109047'
495 | p247
496 | aS'n02109525'
497 | p248
498 | aS'n02109961'
499 | p249
500 | aS'n02110063'
501 | p250
502 | aS'n02110185'
503 | p251
504 | aS'n02110341'
505 | p252
506 | aS'n02110627'
507 | p253
508 | aS'n02110806'
509 | p254
510 | aS'n02110958'
511 | p255
512 | aS'n02111129'
513 | p256
514 | aS'n02111277'
515 | p257
516 | aS'n02111500'
517 | p258
518 | aS'n02111889'
519 | p259
520 | aS'n02112018'
521 | p260
522 | aS'n02112137'
523 | p261
524 | aS'n02112350'
525 | p262
526 | aS'n02112706'
527 | p263
528 | aS'n02113023'
529 | p264
530 | aS'n02113186'
531 | p265
532 | aS'n02113624'
533 | p266
534 | aS'n02113712'
535 | p267
536 | aS'n02113799'
537 | p268
538 | aS'n02113978'
539 | p269
540 | aS'n02114367'
541 | p270
542 | aS'n02114548'
543 | p271
544 | aS'n02114712'
545 | p272
546 | aS'n02114855'
547 | p273
548 | aS'n02115641'
549 | p274
550 | aS'n02115913'
551 | p275
552 | aS'n02116738'
553 | p276
554 | aS'n02117135'
555 | p277
556 | aS'n02119022'
557 | p278
558 | aS'n02119789'
559 | p279
560 | aS'n02120079'
561 | p280
562 | aS'n02120505'
563 | p281
564 | aS'n02123045'
565 | p282
566 | aS'n02123159'
567 | p283
568 | aS'n02123394'
569 | p284
570 | aS'n02123597'
571 | p285
572 | aS'n02124075'
573 | p286
574 | aS'n02125311'
575 | p287
576 | aS'n02127052'
577 | p288
578 | aS'n02128385'
579 | p289
580 | aS'n02128757'
581 | p290
582 | aS'n02128925'
583 | p291
584 | aS'n02129165'
585 | p292
586 | aS'n02129604'
587 | p293
588 | aS'n02130308'
589 | p294
590 | aS'n02132136'
591 | p295
592 | aS'n02133161'
593 | p296
594 | aS'n02134084'
595 | p297
596 | aS'n02134418'
597 | p298
598 | aS'n02137549'
599 | p299
600 | aS'n02138441'
601 | p300
602 | aS'n02165105'
603 | p301
604 | aS'n02165456'
605 | p302
606 | aS'n02167151'
607 | p303
608 | aS'n02168699'
609 | p304
610 | aS'n02169497'
611 | p305
612 | aS'n02172182'
613 | p306
614 | aS'n02174001'
615 | p307
616 | aS'n02177972'
617 | p308
618 | aS'n02190166'
619 | p309
620 | aS'n02206856'
621 | p310
622 | aS'n02219486'
623 | p311
624 | aS'n02226429'
625 | p312
626 | aS'n02229544'
627 | p313
628 | aS'n02231487'
629 | p314
630 | aS'n02233338'
631 | p315
632 | aS'n02236044'
633 | p316
634 | aS'n02256656'
635 | p317
636 | aS'n02259212'
637 | p318
638 | aS'n02264363'
639 | p319
640 | aS'n02268443'
641 | p320
642 | aS'n02268853'
643 | p321
644 | aS'n02276258'
645 | p322
646 | aS'n02277742'
647 | p323
648 | aS'n02279972'
649 | p324
650 | aS'n02280649'
651 | p325
652 | aS'n02281406'
653 | p326
654 | aS'n02281787'
655 | p327
656 | aS'n02317335'
657 | p328
658 | aS'n02319095'
659 | p329
660 | aS'n02321529'
661 | p330
662 | aS'n02325366'
663 | p331
664 | aS'n02326432'
665 | p332
666 | aS'n02328150'
667 | p333
668 | aS'n02342885'
669 | p334
670 | aS'n02346627'
671 | p335
672 | aS'n02356798'
673 | p336
674 | aS'n02361337'
675 | p337
676 | aS'n02363005'
677 | p338
678 | aS'n02364673'
679 | p339
680 | aS'n02389026'
681 | p340
682 | aS'n02391049'
683 | p341
684 | aS'n02395406'
685 | p342
686 | aS'n02396427'
687 | p343
688 | aS'n02397096'
689 | p344
690 | aS'n02398521'
691 | p345
692 | aS'n02403003'
693 | p346
694 | aS'n02408429'
695 | p347
696 | aS'n02410509'
697 | p348
698 | aS'n02412080'
699 | p349
700 | aS'n02415577'
701 | p350
702 | aS'n02417914'
703 | p351
704 | aS'n02422106'
705 | p352
706 | aS'n02422699'
707 | p353
708 | aS'n02423022'
709 | p354
710 | aS'n02437312'
711 | p355
712 | aS'n02437616'
713 | p356
714 | aS'n02441942'
715 | p357
716 | aS'n02442845'
717 | p358
718 | aS'n02443114'
719 | p359
720 | aS'n02443484'
721 | p360
722 | aS'n02444819'
723 | p361
724 | aS'n02445715'
725 | p362
726 | aS'n02447366'
727 | p363
728 | aS'n02454379'
729 | p364
730 | aS'n02457408'
731 | p365
732 | aS'n02480495'
733 | p366
734 | aS'n02480855'
735 | p367
736 | aS'n02481823'
737 | p368
738 | aS'n02483362'
739 | p369
740 | aS'n02483708'
741 | p370
742 | aS'n02484975'
743 | p371
744 | aS'n02486261'
745 | p372
746 | aS'n02486410'
747 | p373
748 | aS'n02487347'
749 | p374
750 | aS'n02488291'
751 | p375
752 | aS'n02488702'
753 | p376
754 | aS'n02489166'
755 | p377
756 | aS'n02490219'
757 | p378
758 | aS'n02492035'
759 | p379
760 | aS'n02492660'
761 | p380
762 | aS'n02493509'
763 | p381
764 | aS'n02493793'
765 | p382
766 | aS'n02494079'
767 | p383
768 | aS'n02497673'
769 | p384
770 | aS'n02500267'
771 | p385
772 | aS'n02504013'
773 | p386
774 | aS'n02504458'
775 | p387
776 | aS'n02509815'
777 | p388
778 | aS'n02510455'
779 | p389
780 | aS'n02514041'
781 | p390
782 | aS'n02526121'
783 | p391
784 | aS'n02536864'
785 | p392
786 | aS'n02606052'
787 | p393
788 | aS'n02607072'
789 | p394
790 | aS'n02640242'
791 | p395
792 | aS'n02641379'
793 | p396
794 | aS'n02643566'
795 | p397
796 | aS'n02655020'
797 | p398
798 | aS'n02666196'
799 | p399
800 | aS'n02667093'
801 | p400
802 | aS'n02669723'
803 | p401
804 | aS'n02672831'
805 | p402
806 | aS'n02676566'
807 | p403
808 | aS'n02687172'
809 | p404
810 | aS'n02690373'
811 | p405
812 | aS'n02692877'
813 | p406
814 | aS'n02699494'
815 | p407
816 | aS'n02701002'
817 | p408
818 | aS'n02704792'
819 | p409
820 | aS'n02708093'
821 | p410
822 | aS'n02727426'
823 | p411
824 | aS'n02730930'
825 | p412
826 | aS'n02747177'
827 | p413
828 | aS'n02749479'
829 | p414
830 | aS'n02769748'
831 | p415
832 | aS'n02776631'
833 | p416
834 | aS'n02777292'
835 | p417
836 | aS'n02782093'
837 | p418
838 | aS'n02783161'
839 | p419
840 | aS'n02786058'
841 | p420
842 | aS'n02787622'
843 | p421
844 | aS'n02788148'
845 | p422
846 | aS'n02790996'
847 | p423
848 | aS'n02791124'
849 | p424
850 | aS'n02791270'
851 | p425
852 | aS'n02793495'
853 | p426
854 | aS'n02794156'
855 | p427
856 | aS'n02795169'
857 | p428
858 | aS'n02797295'
859 | p429
860 | aS'n02799071'
861 | p430
862 | aS'n02802426'
863 | p431
864 | aS'n02804414'
865 | p432
866 | aS'n02804610'
867 | p433
868 | aS'n02807133'
869 | p434
870 | aS'n02808304'
871 | p435
872 | aS'n02808440'
873 | p436
874 | aS'n02814533'
875 | p437
876 | aS'n02814860'
877 | p438
878 | aS'n02815834'
879 | p439
880 | aS'n02817516'
881 | p440
882 | aS'n02823428'
883 | p441
884 | aS'n02823750'
885 | p442
886 | aS'n02825657'
887 | p443
888 | aS'n02834397'
889 | p444
890 | aS'n02835271'
891 | p445
892 | aS'n02837789'
893 | p446
894 | aS'n02840245'
895 | p447
896 | aS'n02841315'
897 | p448
898 | aS'n02843684'
899 | p449
900 | aS'n02859443'
901 | p450
902 | aS'n02860847'
903 | p451
904 | aS'n02865351'
905 | p452
906 | aS'n02869837'
907 | p453
908 | aS'n02870880'
909 | p454
910 | aS'n02871525'
911 | p455
912 | aS'n02877765'
913 | p456
914 | aS'n02879718'
915 | p457
916 | aS'n02883205'
917 | p458
918 | aS'n02892201'
919 | p459
920 | aS'n02892767'
921 | p460
922 | aS'n02894605'
923 | p461
924 | aS'n02895154'
925 | p462
926 | aS'n02906734'
927 | p463
928 | aS'n02909870'
929 | p464
930 | aS'n02910353'
931 | p465
932 | aS'n02916936'
933 | p466
934 | aS'n02917067'
935 | p467
936 | aS'n02927161'
937 | p468
938 | aS'n02930766'
939 | p469
940 | aS'n02939185'
941 | p470
942 | aS'n02948072'
943 | p471
944 | aS'n02950826'
945 | p472
946 | aS'n02951358'
947 | p473
948 | aS'n02951585'
949 | p474
950 | aS'n02963159'
951 | p475
952 | aS'n02965783'
953 | p476
954 | aS'n02966193'
955 | p477
956 | aS'n02966687'
957 | p478
958 | aS'n02971356'
959 | p479
960 | aS'n02974003'
961 | p480
962 | aS'n02977058'
963 | p481
964 | aS'n02978881'
965 | p482
966 | aS'n02979186'
967 | p483
968 | aS'n02980441'
969 | p484
970 | aS'n02981792'
971 | p485
972 | aS'n02988304'
973 | p486
974 | aS'n02992211'
975 | p487
976 | aS'n02992529'
977 | p488
978 | aS'n02999410'
979 | p489
980 | aS'n03000134'
981 | p490
982 | aS'n03000247'
983 | p491
984 | aS'n03000684'
985 | p492
986 | aS'n03014705'
987 | p493
988 | aS'n03016953'
989 | p494
990 | aS'n03017168'
991 | p495
992 | aS'n03018349'
993 | p496
994 | aS'n03026506'
995 | p497
996 | aS'n03028079'
997 | p498
998 | aS'n03032252'
999 | p499
1000 | aS'n03041632'
1001 | p500
1002 | aS'n03042490'
1003 | p501
1004 | aS'n03045698'
1005 | p502
1006 | aS'n03047690'
1007 | p503
1008 | aS'n03062245'
1009 | p504
1010 | aS'n03063599'
1011 | p505
1012 | aS'n03063689'
1013 | p506
1014 | aS'n03065424'
1015 | p507
1016 | aS'n03075370'
1017 | p508
1018 | aS'n03085013'
1019 | p509
1020 | aS'n03089624'
1021 | p510
1022 | aS'n03095699'
1023 | p511
1024 | aS'n03100240'
1025 | p512
1026 | aS'n03109150'
1027 | p513
1028 | aS'n03110669'
1029 | p514
1030 | aS'n03124043'
1031 | p515
1032 | aS'n03124170'
1033 | p516
1034 | aS'n03125729'
1035 | p517
1036 | ag135
1037 | aS'n03127747'
1038 | p518
1039 | aS'n03127925'
1040 | p519
1041 | aS'n03131574'
1042 | p520
1043 | aS'n03133878'
1044 | p521
1045 | aS'n03134739'
1046 | p522
1047 | aS'n03141823'
1048 | p523
1049 | aS'n03146219'
1050 | p524
1051 | aS'n03160309'
1052 | p525
1053 | aS'n03179701'
1054 | p526
1055 | aS'n03180011'
1056 | p527
1057 | aS'n03187595'
1058 | p528
1059 | aS'n03188531'
1060 | p529
1061 | aS'n03196217'
1062 | p530
1063 | aS'n03197337'
1064 | p531
1065 | aS'n03201208'
1066 | p532
1067 | aS'n03207743'
1068 | p533
1069 | aS'n03207941'
1070 | p534
1071 | aS'n03208938'
1072 | p535
1073 | aS'n03216828'
1074 | p536
1075 | aS'n03218198'
1076 | p537
1077 | aS'n03220513'
1078 | p538
1079 | aS'n03223299'
1080 | p539
1081 | aS'n03240683'
1082 | p540
1083 | aS'n03249569'
1084 | p541
1085 | aS'n03250847'
1086 | p542
1087 | aS'n03255030'
1088 | p543
1089 | aS'n03259280'
1090 | p544
1091 | aS'n03271574'
1092 | p545
1093 | aS'n03272010'
1094 | p546
1095 | aS'n03272562'
1096 | p547
1097 | aS'n03290653'
1098 | p548
1099 | aS'n03291819'
1100 | p549
1101 | aS'n03297495'
1102 | p550
1103 | aS'n03314780'
1104 | p551
1105 | aS'n03325584'
1106 | p552
1107 | aS'n03337140'
1108 | p553
1109 | aS'n03344393'
1110 | p554
1111 | aS'n03345487'
1112 | p555
1113 | aS'n03347037'
1114 | p556
1115 | aS'n03355925'
1116 | p557
1117 | aS'n03372029'
1118 | p558
1119 | aS'n03376595'
1120 | p559
1121 | aS'n03379051'
1122 | p560
1123 | aS'n03384352'
1124 | p561
1125 | aS'n03388043'
1126 | p562
1127 | aS'n03388183'
1128 | p563
1129 | aS'n03388549'
1130 | p564
1131 | aS'n03393912'
1132 | p565
1133 | aS'n03394916'
1134 | p566
1135 | aS'n03400231'
1136 | p567
1137 | aS'n03404251'
1138 | p568
1139 | aS'n03417042'
1140 | p569
1141 | aS'n03424325'
1142 | p570
1143 | aS'n03425413'
1144 | p571
1145 | aS'n03443371'
1146 | p572
1147 | aS'n03444034'
1148 | p573
1149 | aS'n03445777'
1150 | p574
1151 | aS'n03445924'
1152 | p575
1153 | aS'n03447447'
1154 | p576
1155 | aS'n03447721'
1156 | p577
1157 | aS'n03450230'
1158 | p578
1159 | aS'n03452741'
1160 | p579
1161 | aS'n03457902'
1162 | p580
1163 | aS'n03459775'
1164 | p581
1165 | aS'n03461385'
1166 | p582
1167 | aS'n03467068'
1168 | p583
1169 | aS'n03476684'
1170 | p584
1171 | aS'n03476991'
1172 | p585
1173 | aS'n03478589'
1174 | p586
1175 | aS'n03481172'
1176 | p587
1177 | aS'n03482405'
1178 | p588
1179 | aS'n03483316'
1180 | p589
1181 | aS'n03485407'
1182 | p590
1183 | aS'n03485794'
1184 | p591
1185 | aS'n03492542'
1186 | p592
1187 | aS'n03494278'
1188 | p593
1189 | aS'n03495258'
1190 | p594
1191 | aS'n03496892'
1192 | p595
1193 | aS'n03498962'
1194 | p596
1195 | aS'n03527444'
1196 | p597
1197 | aS'n03529860'
1198 | p598
1199 | aS'n03530642'
1200 | p599
1201 | aS'n03532672'
1202 | p600
1203 | aS'n03534580'
1204 | p601
1205 | aS'n03535780'
1206 | p602
1207 | aS'n03538406'
1208 | p603
1209 | aS'n03544143'
1210 | p604
1211 | aS'n03584254'
1212 | p605
1213 | aS'n03584829'
1214 | p606
1215 | aS'n03590841'
1216 | p607
1217 | aS'n03594734'
1218 | p608
1219 | aS'n03594945'
1220 | p609
1221 | aS'n03595614'
1222 | p610
1223 | aS'n03598930'
1224 | p611
1225 | aS'n03599486'
1226 | p612
1227 | aS'n03602883'
1228 | p613
1229 | aS'n03617480'
1230 | p614
1231 | aS'n03623198'
1232 | p615
1233 | aS'n03627232'
1234 | p616
1235 | aS'n03630383'
1236 | p617
1237 | aS'n03633091'
1238 | p618
1239 | aS'n03637318'
1240 | p619
1241 | aS'n03642806'
1242 | p620
1243 | aS'n03649909'
1244 | p621
1245 | aS'n03657121'
1246 | p622
1247 | aS'n03658185'
1248 | p623
1249 | aS'n03661043'
1250 | p624
1251 | aS'n03662601'
1252 | p625
1253 | aS'n03666591'
1254 | p626
1255 | aS'n03670208'
1256 | p627
1257 | aS'n03673027'
1258 | p628
1259 | aS'n03676483'
1260 | p629
1261 | aS'n03680355'
1262 | p630
1263 | aS'n03690938'
1264 | p631
1265 | aS'n03691459'
1266 | p632
1267 | aS'n03692522'
1268 | p633
1269 | aS'n03697007'
1270 | p634
1271 | aS'n03706229'
1272 | p635
1273 | aS'n03709823'
1274 | p636
1275 | aS'n03710193'
1276 | p637
1277 | aS'n03710637'
1278 | p638
1279 | aS'n03710721'
1280 | p639
1281 | aS'n03717622'
1282 | p640
1283 | aS'n03720891'
1284 | p641
1285 | aS'n03721384'
1286 | p642
1287 | aS'n03724870'
1288 | p643
1289 | aS'n03729826'
1290 | p644
1291 | aS'n03733131'
1292 | p645
1293 | aS'n03733281'
1294 | p646
1295 | aS'n03733805'
1296 | p647
1297 | aS'n03742115'
1298 | p648
1299 | aS'n03743016'
1300 | p649
1301 | aS'n03759954'
1302 | p650
1303 | aS'n03761084'
1304 | p651
1305 | aS'n03763968'
1306 | p652
1307 | aS'n03764736'
1308 | p653
1309 | aS'n03769881'
1310 | p654
1311 | aS'n03770439'
1312 | p655
1313 | aS'n03770679'
1314 | p656
1315 | aS'n03773504'
1316 | p657
1317 | aS'n03775071'
1318 | p658
1319 | aS'n03775546'
1320 | p659
1321 | aS'n03776460'
1322 | p660
1323 | aS'n03777568'
1324 | p661
1325 | aS'n03777754'
1326 | p662
1327 | aS'n03781244'
1328 | p663
1329 | aS'n03782006'
1330 | p664
1331 | aS'n03785016'
1332 | p665
1333 | aS'n03786901'
1334 | p666
1335 | aS'n03787032'
1336 | p667
1337 | aS'n03788195'
1338 | p668
1339 | aS'n03788365'
1340 | p669
1341 | aS'n03791053'
1342 | p670
1343 | aS'n03792782'
1344 | p671
1345 | aS'n03792972'
1346 | p672
1347 | aS'n03793489'
1348 | p673
1349 | aS'n03794056'
1350 | p674
1351 | aS'n03796401'
1352 | p675
1353 | aS'n03803284'
1354 | p676
1355 | aS'n03804744'
1356 | p677
1357 | aS'n03814639'
1358 | p678
1359 | aS'n03814906'
1360 | p679
1361 | aS'n03825788'
1362 | p680
1363 | aS'n03832673'
1364 | p681
1365 | aS'n03837869'
1366 | p682
1367 | aS'n03838899'
1368 | p683
1369 | aS'n03840681'
1370 | p684
1371 | aS'n03841143'
1372 | p685
1373 | aS'n03843555'
1374 | p686
1375 | aS'n03854065'
1376 | p687
1377 | aS'n03857828'
1378 | p688
1379 | aS'n03866082'
1380 | p689
1381 | aS'n03868242'
1382 | p690
1383 | aS'n03868863'
1384 | p691
1385 | aS'n03871628'
1386 | p692
1387 | aS'n03873416'
1388 | p693
1389 | aS'n03874293'
1390 | p694
1391 | aS'n03874599'
1392 | p695
1393 | aS'n03876231'
1394 | p696
1395 | aS'n03877472'
1396 | p697
1397 | aS'n03877845'
1398 | p698
1399 | aS'n03884397'
1400 | p699
1401 | aS'n03887697'
1402 | p700
1403 | aS'n03888257'
1404 | p701
1405 | aS'n03888605'
1406 | p702
1407 | aS'n03891251'
1408 | p703
1409 | aS'n03891332'
1410 | p704
1411 | aS'n03895866'
1412 | p705
1413 | aS'n03899768'
1414 | p706
1415 | aS'n03902125'
1416 | p707
1417 | aS'n03903868'
1418 | p708
1419 | aS'n03908618'
1420 | p709
1421 | aS'n03908714'
1422 | p710
1423 | aS'n03916031'
1424 | p711
1425 | aS'n03920288'
1426 | p712
1427 | aS'n03924679'
1428 | p713
1429 | aS'n03929660'
1430 | p714
1431 | aS'n03929855'
1432 | p715
1433 | aS'n03930313'
1434 | p716
1435 | aS'n03930630'
1436 | p717
1437 | aS'n03933933'
1438 | p718
1439 | aS'n03935335'
1440 | p719
1441 | aS'n03937543'
1442 | p720
1443 | aS'n03938244'
1444 | p721
1445 | aS'n03942813'
1446 | p722
1447 | aS'n03944341'
1448 | p723
1449 | aS'n03947888'
1450 | p724
1451 | aS'n03950228'
1452 | p725
1453 | aS'n03954731'
1454 | p726
1455 | aS'n03956157'
1456 | p727
1457 | aS'n03958227'
1458 | p728
1459 | aS'n03961711'
1460 | p729
1461 | aS'n03967562'
1462 | p730
1463 | aS'n03970156'
1464 | p731
1465 | aS'n03976467'
1466 | p732
1467 | aS'n03976657'
1468 | p733
1469 | aS'n03977966'
1470 | p734
1471 | aS'n03980874'
1472 | p735
1473 | aS'n03982430'
1474 | p736
1475 | aS'n03983396'
1476 | p737
1477 | aS'n03991062'
1478 | p738
1479 | aS'n03992509'
1480 | p739
1481 | aS'n03995372'
1482 | p740
1483 | aS'n03998194'
1484 | p741
1485 | aS'n04004767'
1486 | p742
1487 | aS'n04005630'
1488 | p743
1489 | aS'n04008634'
1490 | p744
1491 | aS'n04009552'
1492 | p745
1493 | aS'n04019541'
1494 | p746
1495 | aS'n04023962'
1496 | p747
1497 | aS'n04026417'
1498 | p748
1499 | aS'n04033901'
1500 | p749
1501 | aS'n04033995'
1502 | p750
1503 | aS'n04037443'
1504 | p751
1505 | aS'n04039381'
1506 | p752
1507 | aS'n04040759'
1508 | p753
1509 | aS'n04041544'
1510 | p754
1511 | aS'n04044716'
1512 | p755
1513 | aS'n04049303'
1514 | p756
1515 | aS'n04065272'
1516 | p757
1517 | aS'n04067472'
1518 | p758
1519 | aS'n04069434'
1520 | p759
1521 | aS'n04070727'
1522 | p760
1523 | aS'n04074963'
1524 | p761
1525 | aS'n04081281'
1526 | p762
1527 | aS'n04086273'
1528 | p763
1529 | aS'n04090263'
1530 | p764
1531 | aS'n04099969'
1532 | p765
1533 | aS'n04111531'
1534 | p766
1535 | aS'n04116512'
1536 | p767
1537 | aS'n04118538'
1538 | p768
1539 | aS'n04118776'
1540 | p769
1541 | aS'n04120489'
1542 | p770
1543 | aS'n04125021'
1544 | p771
1545 | aS'n04127249'
1546 | p772
1547 | aS'n04131690'
1548 | p773
1549 | aS'n04133789'
1550 | p774
1551 | aS'n04136333'
1552 | p775
1553 | aS'n04141076'
1554 | p776
1555 | aS'n04141327'
1556 | p777
1557 | aS'n04141975'
1558 | p778
1559 | aS'n04146614'
1560 | p779
1561 | aS'n04147183'
1562 | p780
1563 | aS'n04149813'
1564 | p781
1565 | aS'n04152593'
1566 | p782
1567 | aS'n04153751'
1568 | p783
1569 | aS'n04154565'
1570 | p784
1571 | aS'n04162706'
1572 | p785
1573 | aS'n04179913'
1574 | p786
1575 | aS'n04192698'
1576 | p787
1577 | aS'n04200800'
1578 | p788
1579 | aS'n04201297'
1580 | p789
1581 | aS'n04204238'
1582 | p790
1583 | aS'n04204347'
1584 | p791
1585 | aS'n04208210'
1586 | p792
1587 | aS'n04209133'
1588 | p793
1589 | aS'n04209239'
1590 | p794
1591 | aS'n04228054'
1592 | p795
1593 | aS'n04229816'
1594 | p796
1595 | aS'n04235860'
1596 | p797
1597 | aS'n04238763'
1598 | p798
1599 | aS'n04239074'
1600 | p799
1601 | aS'n04243546'
1602 | p800
1603 | aS'n04251144'
1604 | p801
1605 | aS'n04252077'
1606 | p802
1607 | aS'n04252225'
1608 | p803
1609 | aS'n04254120'
1610 | p804
1611 | aS'n04254680'
1612 | p805
1613 | aS'n04254777'
1614 | p806
1615 | aS'n04258138'
1616 | p807
1617 | aS'n04259630'
1618 | p808
1619 | aS'n04263257'
1620 | p809
1621 | aS'n04264628'
1622 | p810
1623 | aS'n04265275'
1624 | p811
1625 | aS'n04266014'
1626 | p812
1627 | aS'n04270147'
1628 | p813
1629 | aS'n04273569'
1630 | p814
1631 | aS'n04275548'
1632 | p815
1633 | aS'n04277352'
1634 | p816
1635 | aS'n04285008'
1636 | p817
1637 | aS'n04286575'
1638 | p818
1639 | aS'n04296562'
1640 | p819
1641 | aS'n04310018'
1642 | p820
1643 | aS'n04311004'
1644 | p821
1645 | aS'n04311174'
1646 | p822
1647 | aS'n04317175'
1648 | p823
1649 | aS'n04325704'
1650 | p824
1651 | aS'n04326547'
1652 | p825
1653 | aS'n04328186'
1654 | p826
1655 | aS'n04330267'
1656 | p827
1657 | aS'n04332243'
1658 | p828
1659 | aS'n04335435'
1660 | p829
1661 | aS'n04336792'
1662 | p830
1663 | aS'n04344873'
1664 | p831
1665 | aS'n04346328'
1666 | p832
1667 | aS'n04347754'
1668 | p833
1669 | aS'n04350905'
1670 | p834
1671 | aS'n04355338'
1672 | p835
1673 | aS'n04355933'
1674 | p836
1675 | aS'n04356056'
1676 | p837
1677 | aS'n04357314'
1678 | p838
1679 | aS'n04366367'
1680 | p839
1681 | aS'n04367480'
1682 | p840
1683 | aS'n04370456'
1684 | p841
1685 | aS'n04371430'
1686 | p842
1687 | aS'n04371774'
1688 | p843
1689 | aS'n04372370'
1690 | p844
1691 | aS'n04376876'
1692 | p845
1693 | aS'n04380533'
1694 | p846
1695 | aS'n04389033'
1696 | p847
1697 | aS'n04392985'
1698 | p848
1699 | aS'n04398044'
1700 | p849
1701 | aS'n04399382'
1702 | p850
1703 | aS'n04404412'
1704 | p851
1705 | aS'n04409515'
1706 | p852
1707 | aS'n04417672'
1708 | p853
1709 | aS'n04418357'
1710 | p854
1711 | aS'n04423845'
1712 | p855
1713 | aS'n04428191'
1714 | p856
1715 | aS'n04429376'
1716 | p857
1717 | aS'n04435653'
1718 | p858
1719 | aS'n04442312'
1720 | p859
1721 | aS'n04443257'
1722 | p860
1723 | aS'n04447861'
1724 | p861
1725 | aS'n04456115'
1726 | p862
1727 | aS'n04458633'
1728 | p863
1729 | aS'n04461696'
1730 | p864
1731 | aS'n04462240'
1732 | p865
1733 | aS'n04465501'
1734 | p866
1735 | aS'n04467665'
1736 | p867
1737 | aS'n04476259'
1738 | p868
1739 | aS'n04479046'
1740 | p869
1741 | aS'n04482393'
1742 | p870
1743 | aS'n04483307'
1744 | p871
1745 | aS'n04485082'
1746 | p872
1747 | aS'n04486054'
1748 | p873
1749 | aS'n04487081'
1750 | p874
1751 | aS'n04487394'
1752 | p875
1753 | aS'n04493381'
1754 | p876
1755 | aS'n04501370'
1756 | p877
1757 | aS'n04505470'
1758 | p878
1759 | aS'n04507155'
1760 | p879
1761 | aS'n04509417'
1762 | p880
1763 | aS'n04515003'
1764 | p881
1765 | aS'n04517823'
1766 | p882
1767 | aS'n04522168'
1768 | p883
1769 | aS'n04523525'
1770 | p884
1771 | aS'n04525038'
1772 | p885
1773 | aS'n04525305'
1774 | p886
1775 | aS'n04532106'
1776 | p887
1777 | aS'n04532670'
1778 | p888
1779 | aS'n04536866'
1780 | p889
1781 | aS'n04540053'
1782 | p890
1783 | aS'n04542943'
1784 | p891
1785 | aS'n04548280'
1786 | p892
1787 | aS'n04548362'
1788 | p893
1789 | aS'n04550184'
1790 | p894
1791 | aS'n04552348'
1792 | p895
1793 | aS'n04553703'
1794 | p896
1795 | aS'n04554684'
1796 | p897
1797 | aS'n04557648'
1798 | p898
1799 | aS'n04560804'
1800 | p899
1801 | aS'n04562935'
1802 | p900
1803 | aS'n04579145'
1804 | p901
1805 | aS'n04579432'
1806 | p902
1807 | aS'n04584207'
1808 | p903
1809 | aS'n04589890'
1810 | p904
1811 | aS'n04590129'
1812 | p905
1813 | aS'n04591157'
1814 | p906
1815 | aS'n04591713'
1816 | p907
1817 | aS'n04592741'
1818 | p908
1819 | aS'n04596742'
1820 | p909
1821 | aS'n04597913'
1822 | p910
1823 | aS'n04599235'
1824 | p911
1825 | aS'n04604644'
1826 | p912
1827 | aS'n04606251'
1828 | p913
1829 | aS'n04612504'
1830 | p914
1831 | aS'n04613696'
1832 | p915
1833 | aS'n06359193'
1834 | p916
1835 | aS'n06596364'
1836 | p917
1837 | aS'n06785654'
1838 | p918
1839 | aS'n06794110'
1840 | p919
1841 | aS'n06874185'
1842 | p920
1843 | aS'n07248320'
1844 | p921
1845 | aS'n07565083'
1846 | p922
1847 | aS'n07579787'
1848 | p923
1849 | aS'n07583066'
1850 | p924
1851 | aS'n07584110'
1852 | p925
1853 | aS'n07590611'
1854 | p926
1855 | aS'n07613480'
1856 | p927
1857 | aS'n07614500'
1858 | p928
1859 | aS'n07615774'
1860 | p929
1861 | aS'n07684084'
1862 | p930
1863 | aS'n07693725'
1864 | p931
1865 | aS'n07695742'
1866 | p932
1867 | aS'n07697313'
1868 | p933
1869 | aS'n07697537'
1870 | p934
1871 | aS'n07711569'
1872 | p935
1873 | aS'n07714571'
1874 | p936
1875 | aS'n07714990'
1876 | p937
1877 | aS'n07715103'
1878 | p938
1879 | aS'n07716358'
1880 | p939
1881 | aS'n07716906'
1882 | p940
1883 | aS'n07717410'
1884 | p941
1885 | aS'n07717556'
1886 | p942
1887 | aS'n07718472'
1888 | p943
1889 | aS'n07718747'
1890 | p944
1891 | aS'n07720875'
1892 | p945
1893 | aS'n07730033'
1894 | p946
1895 | aS'n07734744'
1896 | p947
1897 | aS'n07742313'
1898 | p948
1899 | aS'n07745940'
1900 | p949
1901 | aS'n07747607'
1902 | p950
1903 | aS'n07749582'
1904 | p951
1905 | aS'n07753113'
1906 | p952
1907 | aS'n07753275'
1908 | p953
1909 | aS'n07753592'
1910 | p954
1911 | aS'n07754684'
1912 | p955
1913 | aS'n07760859'
1914 | p956
1915 | aS'n07768694'
1916 | p957
1917 | aS'n07802026'
1918 | p958
1919 | aS'n07831146'
1920 | p959
1921 | aS'n07836838'
1922 | p960
1923 | aS'n07860988'
1924 | p961
1925 | aS'n07871810'
1926 | p962
1927 | aS'n07873807'
1928 | p963
1929 | aS'n07875152'
1930 | p964
1931 | aS'n07880968'
1932 | p965
1933 | aS'n07892512'
1934 | p966
1935 | aS'n07920052'
1936 | p967
1937 | aS'n07930864'
1938 | p968
1939 | aS'n07932039'
1940 | p969
1941 | aS'n09193705'
1942 | p970
1943 | aS'n09229709'
1944 | p971
1945 | aS'n09246464'
1946 | p972
1947 | aS'n09256479'
1948 | p973
1949 | aS'n09288635'
1950 | p974
1951 | aS'n09332890'
1952 | p975
1953 | aS'n09399592'
1954 | p976
1955 | aS'n09421951'
1956 | p977
1957 | aS'n09428293'
1958 | p978
1959 | aS'n09468604'
1960 | p979
1961 | aS'n09472597'
1962 | p980
1963 | aS'n09835506'
1964 | p981
1965 | aS'n10148035'
1966 | p982
1967 | aS'n10565667'
1968 | p983
1969 | aS'n11879895'
1970 | p984
1971 | aS'n11939491'
1972 | p985
1973 | aS'n12057211'
1974 | p986
1975 | aS'n12144580'
1976 | p987
1977 | aS'n12267677'
1978 | p988
1979 | aS'n12620546'
1980 | p989
1981 | aS'n12768682'
1982 | p990
1983 | aS'n12985857'
1984 | p991
1985 | aS'n12998815'
1986 | p992
1987 | aS'n13037406'
1988 | p993
1989 | aS'n13040303'
1990 | p994
1991 | aS'n13044778'
1992 | p995
1993 | aS'n13052670'
1994 | p996
1995 | aS'n13054560'
1996 | p997
1997 | aS'n13133613'
1998 | p998
1999 | aS'n15075141'
2000 | p999
2001 | a.
--------------------------------------------------------------------------------
/Images/apple.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/apple.jpg
--------------------------------------------------------------------------------
/Images/beach.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/beach.jpg
--------------------------------------------------------------------------------
/Images/beetroot.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/beetroot.jpg
--------------------------------------------------------------------------------
/Images/cat.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/cat.jpg
--------------------------------------------------------------------------------
/Images/dog.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/dog.jpg
--------------------------------------------------------------------------------
/Images/dolph.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/dolph.jpg
--------------------------------------------------------------------------------
/Images/einstein.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/einstein.jpg
--------------------------------------------------------------------------------
/Images/kitten.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/kitten.jpg
--------------------------------------------------------------------------------
/Images/orange.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/orange.jpg
--------------------------------------------------------------------------------
/Images/pig.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | |
38 |
39 |
40 |
41 |
45 |
48 | |
49 |
50 |
51 |
52 |
62 |
63 |
64 |
--------------------------------------------------------------------------------
/Images/pig.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/pig.jpg
--------------------------------------------------------------------------------
/Images/pig_with_dog.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/pig_with_dog.jpg
--------------------------------------------------------------------------------
/Images/rose.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/rose.jpg
--------------------------------------------------------------------------------
/Images/sky.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Images/sky.jpg
--------------------------------------------------------------------------------
/Poster.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/Poster.pdf
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Repository uses code from assignments of Stanford`s course - CS 231n Convolutional Networks
2 |
3 | Setup:
4 |
5 | 1. Inside the library/ directory, run
6 | python setup.py build_ext --inplace
7 |
8 | 2. Download the file vgg16_weights.h5 from https://drive.google.com/file/d/0Bz7KyqmuGsilT0J5dmRCM0ROVHc/view?usp=sharing
9 | Place the downloaded weight file (vgg16_weights.h5) in Data folder
10 |
--------------------------------------------------------------------------------
/Validation.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 37,
6 | "metadata": {
7 | "collapsed": false
8 | },
9 | "outputs": [
10 | {
11 | "name": "stdout",
12 | "output_type": "stream",
13 | "text": [
14 | "The autoreload extension is already loaded. To reload it, use:\n",
15 | " %reload_ext autoreload\n",
16 | "Loaded Model\n",
17 | "Loaded Class File\n",
18 | "Loaded Input Data Set\n",
19 | "[8] 11,349,0.414949\n",
20 | "[9] Skip (208,65): http://farm2.static.flickr.com/1246/905331985_1f0065e586.jpg : URL is Empty(White) Image\n",
21 | "[10] 338,246,0.404768\n"
22 | ]
23 | },
24 | {
25 | "ename": "KeyboardInterrupt",
26 | "evalue": "",
27 | "output_type": "error",
28 | "traceback": [
29 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
30 | "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
31 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 57\u001b[0m \u001b[0;32mprint\u001b[0m \u001b[0;34m\"[%d] Skip (%d,%d): %s : URL is Empty(White) Image\"\u001b[0m\u001b[0;34m%\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mclass_idx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mxmlidx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0murl\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 59\u001b[0;31m \u001b[0mbbox_coords\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbbox\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mim\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mclass_no\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mclass_idx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mn_neurons\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mkmax\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m10\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 60\u001b[0m \u001b[0mxmin_out\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mxmax_out\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mymin_out\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mymax_out\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbbox_coords\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 61\u001b[0m \u001b[0mprecision\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0meval_precision\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbbox_coords\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mxmin\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mxmax\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mymin\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mymax\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
32 | "\u001b[0;32m/Users/deepakmenghani/Acads/Stanford/Q5/CS231n/project/FastLocalization/library/localization.py\u001b[0m in \u001b[0;36mbbox\u001b[0;34m(im, model, layer, n_neurons, kmax, class_no)\u001b[0m\n\u001b[1;32m 22\u001b[0m \u001b[0;31m# k_max = Maximum Number of neurons to evaluate\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 23\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 24\u001b[0;31m \u001b[0mmask\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcache\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_localization_mask\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mim\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlayer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mn_neurons\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkmax\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mclass_no\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 25\u001b[0m \u001b[0mclass_no\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0moriginal_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcache\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 26\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mget_box_from_mask\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmask\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moriginal_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
33 | "\u001b[0;32m/Users/deepakmenghani/Acads/Stanford/Q5/CS231n/project/FastLocalization/library/localization.py\u001b[0m in \u001b[0;36mget_localization_mask\u001b[0;34m(im, model, layer, n_neurons, kmax, class_no)\u001b[0m\n\u001b[1;32m 45\u001b[0m \u001b[0;31m#Get the Filters of Interest in Sorted Order\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 46\u001b[0m \u001b[0mamax\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfilter_of_intr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mactivs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mback_grad\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mkmax\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlayer\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 47\u001b[0;31m \u001b[0mfilter_scores\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_filter_scores\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mamax\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mim\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mactivs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcaches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlayer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mclass_no\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpercentile_thresh\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m40\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0muse_blob\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 48\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 49\u001b[0m \u001b[0;31m#Union Blobs\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
34 | "\u001b[0;32m/Users/deepakmenghani/Acads/Stanford/Q5/CS231n/project/FastLocalization/deconv_utils.pyc\u001b[0m in \u001b[0;36mget_filter_scores\u001b[0;34m(amax, model, im, activs, caches, layer, class_no, percentile_thresh, use_blob, verbose)\u001b[0m\n\u001b[1;32m 28\u001b[0m \u001b[0mfilter_scores\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 29\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mzero\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0my\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mamax\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 30\u001b[0;31m \u001b[0mback_grad\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdeconv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mactivs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcaches\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlayer\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0my\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 31\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0muse_blob\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 32\u001b[0m \u001b[0mblob\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfind_blob\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mback_grad\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mpercentile_thresh\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m80\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
35 | "\u001b[0;32m/Users/deepakmenghani/Acads/Stanford/Q5/CS231n/project/FastLocalization/deconv_utils.pyc\u001b[0m in \u001b[0;36mdeconv\u001b[0;34m(model, activs, caches, layer, neuron)\u001b[0m\n\u001b[1;32m 83\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mreversed\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlayer\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 84\u001b[0m \u001b[0mback_grad\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mback_grad\u001b[0m\u001b[0;34m>\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mback_grad\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 85\u001b[0;31m \u001b[0mback_grad\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbackward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mback_grad\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcaches\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 86\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mback_grad\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 87\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
36 | "\u001b[0;32m/Users/deepakmenghani/Acads/Stanford/Q5/CS231n/project/FastLocalization/library/classifiers/pretrained_vgg16.pyc\u001b[0m in \u001b[0;36mbackward\u001b[0;34m(self, dout, cache)\u001b[0m\n\u001b[1;32m 290\u001b[0m \u001b[0;31m# This is a conv layer\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 291\u001b[0m \u001b[0;32mif\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mmax_pool_layers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 292\u001b[0;31m \u001b[0mtemp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mconv_relu_pool_backward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdnext_a\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlayer_caches\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mj\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 293\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 294\u001b[0m \u001b[0mtemp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mconv_relu_backward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdnext_a\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlayer_caches\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mj\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
37 | "\u001b[0;32m/Users/deepakmenghani/Acads/Stanford/Q5/CS231n/project/FastLocalization/library/layer_utils.pyc\u001b[0m in \u001b[0;36mconv_relu_pool_backward\u001b[0;34m(dout, cache)\u001b[0m\n\u001b[1;32m 138\u001b[0m \u001b[0mds\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmax_pool_backward_fast\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdout\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpool_cache\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 139\u001b[0m \u001b[0mda\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mrelu_backward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mds\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrelu_cache\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 140\u001b[0;31m \u001b[0mdx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdw\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdb\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mconv_backward_fast\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mda\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconv_cache\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 141\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mdx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdw\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdb\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 142\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
38 | "\u001b[0;32m/Users/deepakmenghani/Acads/Stanford/Q5/CS231n/project/FastLocalization/library/fast_layers.pyc\u001b[0m in \u001b[0;36mconv_backward_strides\u001b[0;34m(dout, cache)\u001b[0m\n\u001b[1;32m 97\u001b[0m \u001b[0mdw\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdout_reshaped\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_cols\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mT\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mw\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 98\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 99\u001b[0;31m \u001b[0mdx_cols\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mw\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mF\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mT\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdout_reshaped\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 100\u001b[0m \u001b[0mdx_cols\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mC\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mHH\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mWW\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mN\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mout_h\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mout_w\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 101\u001b[0m \u001b[0mdx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcol2im_6d_cython\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdx_cols\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mN\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mC\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mH\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mW\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mHH\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mWW\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpad\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstride\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
39 | "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
40 | ]
41 | }
42 | ],
43 | "source": [
44 | "import types\n",
45 | "from library.localization import *\n",
46 | "from deconv_utils import *\n",
47 | "from library.image_utils import *\n",
48 | "import csv\n",
49 | "import sys\n",
50 | "\n",
51 | "# for auto-reloading external modules\n",
52 | "# see http://stackoverflow.com/questions/1907993/autoreload-of-modules-in-ipython\n",
53 | "%load_ext autoreload\n",
54 | "%autoreload 2\n",
55 | "\n",
56 | "#Validation & Model File Inputs\n",
57 | "data_file = \"Data/validation_1\"\n",
58 | "data_extn = \".pkl\"\n",
59 | "results_file = data_file + \"_results.csv\"\n",
60 | "model_file = 'Data/vgg16_weights.h5'\n",
61 | "class_file = 'Data/CLASSES.pkl'\n",
62 | "\n",
63 | "#Image Processing Inputs\n",
64 | "Param.num_dilation = 15\n",
65 | "\n",
66 | "start_index = 0\n",
67 | "try:\n",
68 | " #Check if csv file exists and read the last index outputted\n",
69 | " results_csv = open(results_file,'r')\n",
70 | " lastline = results_csv.read().split('\\n')[-2]\n",
71 | " start_index = int(lastline.split(',')[0]) + 1\n",
72 | "except IOError as e:\n",
73 | " pass\n",
74 | "\n",
75 | "#Write to CSV file\n",
76 | "results_csv = open(results_file,'a')\n",
77 | "\n",
78 | "#Load the VGG Model and ImageNet Class Mappings\n",
79 | "from library.classifiers.pretrained_vgg16 import PretrainedVGG\n",
80 | "model = PretrainedVGG(h5_file = model_file)\n",
81 | "print \"Loaded Model\"\n",
82 | "CLASSES = pickle.load(open(class_file))\n",
83 | "print \"Loaded Class File\"\n",
84 | "\n",
85 | "#Read Input File for the candidates\n",
86 | "candidates = pickle.load(open(data_file+data_extn))\n",
87 | "print \"Loaded Input Data Set\"\n",
88 | "\n",
89 | "#Read Input File and start at the index higher than that of last line\n",
90 | "\n",
91 | "for index in range(start_index,len(candidates)):\n",
92 | " ret = candidates[index]\n",
93 | " class_wnid,imgid,class_idx,xmlidx,url,xmin,xmax,ymin,ymax = ret\n",
94 | " im = image_from_url(url)\n",
95 | " if type(im) == types.NoneType:\n",
96 | " #Debug Print\n",
97 | " print \"[%d] Skip (%d,%d): %s : URL is bad.\"%(index, class_idx,xmlidx, url)\n",
98 | " elif np.mean(im)>=253:\n",
99 | " #Debug Print\n",
100 | " print \"[%d] Skip (%d,%d): %s : URL is Empty(White) Image\"%(index, class_idx,xmlidx,url)\n",
101 | " else: \n",
102 | " bbox_coords = bbox(im,model,class_no=class_idx,n_neurons = 5,kmax=10)\n",
103 | " xmin_out, xmax_out, ymin_out, ymax_out = bbox_coords\n",
104 | " precision = eval_precision(bbox_coords, (xmin,xmax,ymin,ymax))\n",
105 | " #Write Results to file\n",
106 | " result_row = \"%d, %s, %d, %d, %f,\"%(index, imgid, class_idx, xmlidx, precision)\n",
107 | " result_row += \"%d, %d, %d, %d,\"%(xmin, xmax, ymin, ymax,)\n",
108 | " result_row += \"%d, %d, %d, %d\"%(xmin_out, xmax_out, ymin_out, ymax_out)\n",
109 | " result_row += \"\\n\"\n",
110 | " results_csv.write(result_row)\n",
111 | " results_csv.flush()\n",
112 | " #Debug Print\n",
113 | " print \"[%d] %d,%d,%f\"%(index, class_idx,xmlidx,precision)"
114 | ]
115 | },
116 | {
117 | "cell_type": "code",
118 | "execution_count": null,
119 | "metadata": {
120 | "collapsed": true
121 | },
122 | "outputs": [],
123 | "source": []
124 | }
125 | ],
126 | "metadata": {
127 | "kernelspec": {
128 | "display_name": "Python 2",
129 | "language": "python",
130 | "name": "python2"
131 | },
132 | "language_info": {
133 | "codemirror_mode": {
134 | "name": "ipython",
135 | "version": 2
136 | },
137 | "file_extension": ".py",
138 | "mimetype": "text/x-python",
139 | "name": "python",
140 | "nbconvert_exporter": "python",
141 | "pygments_lexer": "ipython2",
142 | "version": "2.7.8"
143 | }
144 | },
145 | "nbformat": 4,
146 | "nbformat_minor": 0
147 | }
148 |
--------------------------------------------------------------------------------
/deconv_utils.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import numpy as np
3 | import matplotlib.pyplot as plt
4 | from scipy.misc import imread, imresize
5 | from scipy import ndimage
6 |
7 | #Global Variables
8 |
9 | #Parameters to Process blob
10 | class Param:
11 | process_conv_size = (4,4)
12 | num_dilation = 20
13 | num_erosion = 15
14 |
15 | def get_filter_scores2(amax, model, im, activs,caches,layer, class_no, percentile_thresh=80):
16 | filter_scores = []
17 | for zero,i,x,y in amax:
18 | #print i,(x,y)
19 | back_grad = deconv(model,activs,caches,layer,(0,i,x,y))
20 | xmin,xmax,ymin,ymax = find_box(back_grad,percentile_thresh=80)
21 | if (xmin,xmax,ymin,ymax) == (0,0,0,0):
22 | continue
23 | n_score = get_score(im,class_no,model, xmin,xmax,ymin,ymax)
24 | filter_scores += [[i,n_score,xmin,xmax,ymin,ymax]]
25 | return filter_scores
26 |
27 | def get_filter_scores(amax, model, im, activs,caches,layer, class_no, percentile_thresh=80,use_blob = False, verbose=False):
28 | filter_scores = []
29 | for zero,i,x,y in amax:
30 | back_grad = deconv(model,activs,caches,layer,(0,i,x,y))
31 | plt.figure()
32 | plt.imshow(back_grad[0].transpose(1,2,0))
33 | if use_blob:
34 | blob = find_blob(back_grad,percentile_thresh=80)
35 | else:
36 | xmin,xmax,ymin,ymax = find_box(back_grad,percentile_thresh=percentile_thresh)
37 | blob = np.zeros(im.shape)
38 | blob[0,:,xmin:(xmax+1),ymin:(ymax+1)] = 1
39 | if np.sum(blob) == 0:
40 | continue
41 | n_score = get_score(im,class_no,model, mask = blob)
42 | if (verbose):
43 | print "score =" ,n_score
44 | if use_blob:
45 | filter_scores += [[i,n_score,blob,0,0,0,0]]
46 | else:
47 | filter_scores += [[i,n_score,blob,xmin,xmax,ymin,ymax]]
48 | return filter_scores
49 |
50 | def get_fast_filter_scores(amax, model, im, activs,caches,layer, class_no, percentile_thresh=80,use_blob = False, verbose=False):
51 | filter_scores = []
52 | k=0
53 | for zero,i,x,y in amax:
54 | #if np.sum(activs[layer][0,i]>0)
55 | back_grad = deconv(model,activs,caches,layer,(0,i,x,y))
56 | if use_blob:
57 | blob = find_blob(back_grad,percentile_thresh=80)
58 | else:
59 | xmin,xmax,ymin,ymax = find_box(back_grad,percentile_thresh=percentile_thresh)
60 | blob = np.zeros(im.shape)
61 | blob[0,:,xmin:(xmax+1),ymin:(ymax+1)] = 1
62 | if np.sum(blob) == 0:
63 | continue
64 | n_score = -k
65 | if verbose:
66 | print "score =" ,n_score
67 | if use_blob:
68 | filter_scores += [[i,n_score,blob,0,0,0,0]]
69 | else:
70 | filter_scores += [[i,n_score,blob,xmin,xmax,ymin,ymax]]
71 | k=k+1
72 | return filter_scores
73 |
74 | def get_backgrad (activs, model, class_no, layer, caches):
75 | back_grad= np.zeros(activs[15].shape)
76 | back_grad[0,class_no]=1
77 | for i in reversed(range(layer,16)):
78 | back_grad = (back_grad>0)*back_grad
79 | back_grad, _ = model.backward(back_grad,caches[i])
80 | return back_grad
81 |
82 | def deconv(model,activs,caches,layer,neuron):
83 | back_grad = np.zeros(activs[layer].shape)
84 | back_grad[neuron] = 1
85 | for i in reversed(range(layer+1)):
86 | back_grad = (back_grad>0)*back_grad
87 | back_grad, _ = model.backward(back_grad,caches[i])
88 | return back_grad
89 |
90 | #Define Function for deconv
91 | def deconv_2(model,activs, caches,layer,neuron,slayer):
92 | back_grad = np.zeros(activs[slayer].shape)
93 | print back_grad.shape
94 | if(len(neuron)==3):
95 | back_grad[neuron[0],neuron[1]] = 1
96 | else:
97 | back_grad[neuron] = 1
98 | for i in reversed(range(layer,slayer+1)):
99 | back_grad = (back_grad>0)*back_grad
100 | back_grad, _ = model.backward(back_grad,caches[i])
101 | return back_grad
102 |
103 | #Takes the processed image to give activations and caches for entire forward pass
104 | def get_activs(model, im, num_layers=16):
105 | activ = im
106 | caches = []
107 | activs = []
108 | for i in range(num_layers):
109 | out,cache = model.forward(activ,start = i, end=i)
110 | activ = out;
111 | activs += [activ]
112 | caches += [cache]
113 | return activs,caches
114 |
115 | ###############TO-DO###################
116 | def deconv_batch(model, ims, layer=10):
117 | #Function to get deconv of a batch of images
118 | #ims : N X 3 X 224 X 224
119 | pass
120 |
121 | ##############TO DO###########
122 | #Function to get blob
123 | def get_blob():
124 | pass
125 |
126 | #Image Utilities
127 | def load_image(imgf):
128 | im = imread(imgf)
129 | im = resize_image(im)
130 | return process_image(im)
131 |
132 | def load_image_cv2(imgf):
133 | im = cv2.imread(imgf)
134 | im = resize_image(im)
135 | return process_image(im)
136 |
137 | def resize_image(im):
138 | return cv2.resize(im, (224, 224)).astype(np.float32)
139 |
140 | def process_image(im):
141 | im[:,:,0] -= 103.939
142 | im[:,:,1] -= 116.779
143 | im[:,:,2] -= 123.68
144 | im = im.transpose((2,0,1))
145 | im = np.expand_dims(im, axis=0)
146 | return im
147 |
148 | def deprocess_image(img):
149 | im = img[0].transpose(1,2,0)
150 | im[:,:,0] += 103.939
151 | im[:,:,1] += 116.779
152 | im[:,:,2] += 123.68
153 | im = im.astype(np.uint8)
154 | return im
155 |
156 | #Takes input
157 | def plot_image(im):
158 | im = deprocess_image(im)
159 | plt.figure()
160 | plt.imshow(im)
161 |
162 | def plot_image_cv2(im):
163 | im = deprocess_image(im)
164 | im = cv2.cvtColor(im, cv2.cv.CV_BGR2RGB)
165 | plt.figure()
166 | plt.imshow(im)
167 | #cv.imshow("Image",im)
168 |
169 | #import matplotlib.pyplot as plt
170 | def grid_plot_activs(act):
171 | grid = visualize_grid((act).transpose(1,2,3,0))
172 | plt.imshow(grid.transpose(2,0,1)[0])
173 | #plt.axis('off')
174 | plt.gcf().set_size_inches(10, 10)
175 | plt.show()
176 |
177 | def find_box(back_grad, percentile_thresh = 40):
178 | meanimg = np.mean(back_grad[0],axis=0)
179 | if np.sum(abs(meanimg)) < 1e-16:
180 | return 0,0,0,0
181 | thresh = np.percentile(meanimg[meanimg>0],[percentile_thresh])[0]
182 | meanimg = np.mean(back_grad[0],axis=0)
183 | threshimg = np.mean(back_grad[0],axis=0)>thresh
184 | #plt.imshow(np.mean(back_grad[0],axis=0)>thresh)
185 | idxs = np.where(np.sum(threshimg,axis = 1)>0)[0]
186 | xmin,xmax = min(idxs),max(idxs)
187 | idxs = np.where(np.sum(threshimg,axis = 0)>0)[0]
188 | ymin,ymax = min(idxs),max(idxs)
189 | return xmin,xmax,ymin,ymax
190 |
191 | #Get Score for a given image and class number. If mask != 0, it uses the mask area, else defaults to the area specified by the rectangle specified by (xmin,ymin) (xmax,ymax)
192 | def get_score(im,class_no, model, xmin=0,xmax=0,ymin=0,ymax=0,mask=0):
193 | #print xmin,xmax,ymin,ymax
194 | if np.sum(mask )== 0:
195 | mask = np.zeros(im.shape)
196 | mask[0,:,xmin:(xmax+1),ymin:(ymax+1)] = 1
197 | newim = im.copy()*mask
198 | scores,_ = model.forward(newim)
199 | return scores[0,class_no]
200 |
201 | #Given Activations, Deconv, Layer Number
202 | #k = No of Neurons of Interest
203 | #Returns the filters of interest
204 | def filter_of_intr(activs,back_grad,kmax,layer):
205 | cum =[]
206 | tally = []
207 | tmp1 = activs[layer] *back_grad
208 | k=0
209 | while k0)
235 | for i in range(4):
236 | cim = ndimage.binary_erosion(cim>0)
237 | cim=ndimage.binary_dilation(cim>0)
238 |
239 | filterk = np.ones(Param.process_conv_size);
240 | cim = ndimage.convolve(cim, filterk, mode='constant', cval=0.0)
241 |
242 | for i in range(Param.num_dilation):
243 | cim = ndimage.binary_dilation(cim>0)
244 | for i in range(Param.num_erosion):
245 | cim = ndimage.binary_erosion(cim>0)
246 | return cim
247 |
248 | def find_blob(back_grad, percentile_thresh = 40):
249 | meanimg = np.mean(back_grad[0],axis=0)
250 | if np.sum(abs(meanimg)) < 1e-16:
251 | return 0,0,0,0
252 | thresh = np.percentile(meanimg[meanimg>0],[percentile_thresh])[0]
253 | meanimg = np.mean(back_grad[0],axis=0)
254 | threshimg = np.mean(back_grad[0],axis=0)>thresh
255 |
256 | return process_blob(threshimg)
257 |
--------------------------------------------------------------------------------
/frameworkpython:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # what real Python executable to use
4 | PYVER=2.7
5 | PATHTOPYTHON=/usr/local/bin/
6 | PYTHON=${PATHTOPYTHON}python${PYVER}
7 |
8 | # find the root of the virtualenv, it should be the parent of the dir this script is in
9 | ENV=`$PYTHON -c "import os; print os.path.abspath(os.path.join(os.path.dirname(\"$0\"), '..'))"`
10 |
11 | # now run Python with the virtualenv set as Python's HOME
12 | export PYTHONHOME=$ENV
13 | exec $PYTHON "$@"
14 |
--------------------------------------------------------------------------------
/library/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/library/__init__.py
--------------------------------------------------------------------------------
/library/classifiers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/deepakrox/FastObjectLocalization/5dcb923600b0ca3bb7f855f31bc606d85c3b07e8/library/classifiers/__init__.py
--------------------------------------------------------------------------------
/library/classifiers/pretrained_vgg16.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import h5py
3 |
4 | from library.layers import *
5 | from library.fast_layers import *
6 | from library.layer_utils import *
7 |
8 |
9 | class PretrainedVGG(object):
10 | def __init__(self, dtype=np.float32, num_classes=1000, input_size=224, h5_file=None,verbose=False):
11 | #Input Size is 224X224
12 | #Num classes = 1000 for ImageNet
13 |
14 | self.dtype = dtype
15 | self.conv_params = []
16 | self.input_size = input_size
17 | self.num_classes = num_classes
18 |
19 | # TODO: In the future it would be nice if the architecture could be loaded from
20 | # the HDF5 file rather than being hardcoded. For now this will have to do.
21 |
22 | self.conv_params.append({'stride': 1, 'pad': 1})
23 | self.conv_params.append({'stride': 1, 'pad': 1})
24 |
25 | self.conv_params.append({'stride': 1, 'pad': 1})
26 | self.conv_params.append({'stride': 1, 'pad': 1})
27 |
28 | self.conv_params.append({'stride': 1, 'pad': 1})
29 | self.conv_params.append({'stride': 1, 'pad': 1})
30 | self.conv_params.append({'stride': 1, 'pad': 1})
31 |
32 | self.conv_params.append({'stride': 1, 'pad': 1})
33 | self.conv_params.append({'stride': 1, 'pad': 1})
34 | self.conv_params.append({'stride': 1, 'pad': 1})
35 |
36 | self.conv_params.append({'stride': 1, 'pad': 1})
37 | self.conv_params.append({'stride': 1, 'pad': 1})
38 | self.conv_params.append({'stride': 1, 'pad': 1})
39 |
40 | #Map Describing the layer type
41 | self.layer_map = {1:'conv', 2:'relu', 3:'conv', 4: 'relu', 5:'maxpool',
42 | 6:'conv', 7:'relu', 8:'conv', 9: 'relu', 10:'maxpool',
43 | 11:'conv', 12:'relu', 13:'conv', 14: 'relu', 15:'conv', 16:'relu' ,17:'maxpool',
44 | 18:'conv', 19:'relu', 20:'conv', 21: 'relu', 22:'conv', 23:'relu' ,24:'maxpool',
45 | 25:'conv', 26:'relu', 27:'conv', 28: 'relu', 29:'conv', 30:'relu' ,31:'maxpool',
46 | 32:'affine', 33:'relu', 34:'affine', 35:'relu', 36:'affine',37:'softmax'
47 | }
48 | #self. weight_layers = {1:1,2:3,3:6,4:8,5:11,6:13,7:15,8:18,9:20,10:22,11:25,12:27,13:29,14:32,15:34,16:36}
49 | self.weight_layers_inv = {1:1,3:2,6:3,8:4,11:5,13:6,15:7,18:8,20:9,22:10,25:11,27:12,29:13,32:14,34:15,36:16}
50 |
51 | self.filter_sizes = [3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
52 | self.num_filters = [64, 64, 128, 128, 256, 256, 256, 512, 512, 512, 512, 512, 512]
53 |
54 | hidden_dim_1 = 4096
55 | hidden_dim_2 = 4096
56 |
57 | cur_size = input_size
58 | prev_dim = 3
59 | self.params = {}
60 |
61 | for i, (f, next_dim) in enumerate(zip(self.filter_sizes, self.num_filters)):
62 | self.params['W%d' % (i + 1)] = np.random.randn(next_dim, prev_dim, f, f)
63 | self.params['b%d' % (i + 1)] = np.zeros(next_dim)
64 | prev_dim = next_dim
65 |
66 | cur_size = cur_size/32
67 | # Add a fully-connected layers
68 | fan_in = cur_size * cur_size * self.num_filters[-1]
69 | self.params['W%d' % (i + 2)] = np.zeros((fan_in,hidden_dim_1))
70 | self.params['b%d' % (i + 2)] = np.zeros(hidden_dim_1)
71 |
72 | self.params['W%d' % (i + 3)] = np.zeros((hidden_dim_1, hidden_dim_2))
73 | self.params['b%d' % (i + 3)] = np.zeros(hidden_dim_2)
74 |
75 | self.params['W%d' % (i + 4)] = np.zeros((hidden_dim_2, num_classes))
76 | self.params['b%d' % (i + 4)] = np.zeros(num_classes)
77 |
78 | for k, v in self.params.iteritems():
79 | self.params[k] = v.astype(dtype)
80 |
81 | if h5_file is not None:
82 | self.load_weights(h5_file, verbose)
83 | pass
84 |
85 |
86 | def load_weights(self, h5_file, verbose=False):
87 | """
88 | Load pretrained weights from an HDF5 file.
89 |
90 | Inputs:
91 | - h5_file: Path to the HDF5 file where pretrained weights are stored.
92 | - verbose: Whether to print debugging info
93 | """
94 | with h5py.File(h5_file, 'r') as f:
95 | for k, v in f.iteritems():
96 | layer_no = int(k.split('_')[1])
97 | if(layer_no in self.weight_layers_inv.keys() ):
98 | for key,value in v.iteritems():
99 | wt_layer_no = self.weight_layers_inv[layer_no]
100 | if('param_0' in value.name):
101 | param_name = 'W%d' % wt_layer_no
102 | param_value = np.asarray(value.value)
103 | if self.layer_map[layer_no] == 'conv':
104 | NF, D,W,H = param_value.shape
105 | param_value[:,:,range(W-1,-1,-1),:] =param_value.copy()
106 | param_value[:,:,:,range(H-1,-1,-1)] =param_value.copy()
107 | elif ('param_1' in value.name):
108 | param_name = 'b%d' % wt_layer_no
109 | param_value = np.asarray(value.value)
110 | if verbose: print param_name, self.params[param_name].shape
111 | if param_value.shape == self.params[param_name].shape:
112 | self.params[param_name] = param_value
113 | elif param_value.T.shape == self.params[param_name].shape:
114 | asself.params[param_name] = param_value.T
115 | else:
116 | raise ValueError('shapes for %s do not match' % param_name)
117 | for k, v in self.params.iteritems():
118 | self.params[k] = v.astype(self.dtype)
119 |
120 |
121 | def forward(self, X, start=None, end=None, mode='test'):
122 | """
123 | Run part of the model forward, starting and ending at an arbitrary layer,
124 | in either training mode or testing mode.
125 |
126 | You can pass arbitrary input to the starting layer, and you will receive
127 | output from the ending layer and a cache object that can be used to run
128 | the model backward over the same set of layers.
129 |
130 | For the purposes of this function, a "layer" is one of the following blocks:
131 |
132 | [conv3-64 - relu] X 2
133 | MaxPool
134 | [conv3-128 - relu] X 2
135 | MaxPool
136 | [conv3-256 - relu] X 3
137 | MaxPool
138 | [conv3-512 - relu] X 2
139 | [conv1-512 - relu] X 1
140 | MaxPool
141 | [conv3-512 - relu] X 2
142 | [conv1-512 - relu] X 1
143 | MaxPool
144 | [affine - relu] X 2
145 | [affine]
146 | Softmax
147 |
148 | Layer 0
149 | Layer1 - Conv3-64
150 | Layer2 - Relu
151 | Layer3 - Conv3-64
152 | Layer4 - Relu
153 | Layer5 - Maxpool
154 |
155 | Layer6 - Conv3-128
156 | Layer7 - Relu
157 | Layer8 - Conv3-128
158 | 9 - Relu
159 | 10 - Maxpool
160 |
161 | 11 - Conv3-256
162 | 12 - Relu
163 | 13 - Conv3-256
164 | 14 - Relu
165 | 15 - Conv3-256
166 | 16 - Relu
167 | 17 - Maxpool
168 |
169 | 18 - conv3-512
170 | 19 - Relu
171 | 20 - Conv3-512
172 | 21 - Relu
173 | 22 - Conv1-512
174 | 23 - Relu
175 | 24 - Maxpool
176 |
177 | 25 - conv3-512
178 | 26 - Relu
179 | 27 - Conv3-512
180 | 28 - Relu
181 |
182 | 29 - Conv3-512
183 | 30 - Relu
184 | 31 - Maxpool
185 |
186 | 32 - Affine FC-4096
187 | 33 - Relu
188 | 34 - Affine FC-4096
189 | 35 - Relu
190 | 36 - Affine FC-1000
191 |
192 | 37 - Softmax
193 |
194 |
195 | Inputs:
196 | - X: The input to the starting layer. If start=0, then this should be an
197 | array of shape (N, C, 64, 64).
198 | - start: The index of the layer to start from. start=0 starts from the first
199 | convolutional layer. Default is 0.
200 | - end: The index of the layer to end at. start=11 ends at the last
201 | fully-connected layer, returning class scores. Default is 11.
202 | - mode: The mode to use, either 'test' or 'train'. We need this because
203 | batch normalization behaves differently at training time and test time.
204 |
205 | Returns:
206 | - out: Output from the end layer.
207 | - cache: A cache object that can be passed to the backward method to run the
208 | network backward over the same range of layers.
209 | """
210 | max_pool_layers = [2,4,7,10,13]
211 | X = X.astype(self.dtype)
212 | if start is None: start = 0
213 | if end is None: end = len(self.conv_params) + 2
214 | layer_caches = []
215 | pool_params = {'stride':2 , 'pool_height':2, 'pool_width':2 }
216 | prev_a = X
217 | for i in xrange(start, end + 1):
218 | i1 = i + 1
219 | if 0 <= i < len(self.conv_params):
220 | # This is a conv layer
221 | w, b = self.params['W%d' % i1], self.params['b%d' % i1]
222 | conv_param = self.conv_params[i]
223 | if((i+1) in max_pool_layers):
224 | next_a, cache = conv_relu_pool_forward(prev_a, w, b, conv_param, pool_params)
225 | else:
226 | next_a, cache = conv_relu_forward(prev_a, w, b, conv_param)
227 | elif i < len(self.conv_params) + 2:
228 | # This is the fully-connected hidden layer
229 | w, b = self.params['W%d' % i1], self.params['b%d' % i1]
230 | prev_a = prev_a.reshape((-1,w.shape[0]))
231 | next_a, cache = affine_relu_forward(prev_a, w, b)
232 | elif i == len(self.conv_params) + 2:
233 | # This is the last fully-connectsed layer that produces scores
234 | w, b = self.params['W%d' % i1], self.params['b%d' % i1]
235 | next_a, cache = affine_forward(prev_a, w, b)
236 | else:
237 | raise ValueError('Invalid layer index %d' % i)
238 |
239 | layer_caches.append(cache)
240 | prev_a = next_a
241 |
242 | out = prev_a
243 | cache = (start, end, layer_caches)
244 | return out, cache
245 |
246 |
247 | def backward(self, dout, cache):
248 | """
249 | Run the model backward over a sequence of layers that were previously run
250 | forward using the self.forward method.
251 |
252 | Inputs:
253 | - dout: Gradient with respect to the ending layer; this should have the same
254 | shape as the out variable returned from the corresponding call to forward.
255 | - cache: A cache object returned from self.forward.
256 |
257 | Returns:
258 | - dX: Gradient with respect to the start layer. This will have the same
259 | shape as the input X passed to self.forward.
260 | - grads: Gradient of all parameters in the layers. For example if you run
261 | forward through two convolutional layers, then on the corresponding call
262 | to backward grads will contain the gradients with respect to the weights,
263 | biases, and spatial batchnorm parameters of those two convolutional
264 | layers. The grads dictionary will therefore contain a subset of the keys
265 | of self.params, and grads[k] and self.params[k] will have the same shape.
266 | """
267 | start, end, layer_caches = cache
268 | dnext_a = dout
269 | grads = {}
270 | max_pool_layers = [2,4,7,10,13]
271 |
272 | j = len(layer_caches) - 1
273 | for i in reversed(range(start, end + 1)):
274 | i1 = i + 1
275 | if i == len(self.conv_params) + 2:
276 | # This is the last fully-connected layer
277 | dprev_a, dw, db = affine_backward(dnext_a, layer_caches[j])
278 | grads['W%d' % i1] = dw
279 | grads['b%d' % i1] = db
280 | elif i >= len(self.conv_params):
281 | # This is the fully-connected hidden layer
282 | j = len(layer_caches) - 1
283 | temp = affine_relu_backward(dnext_a, layer_caches[j])
284 | dprev_a, dw, db = temp
285 | if i == len(self.conv_params):
286 | dprev_a = dprev_a.reshape((dprev_a.shape[0],512,7,7))
287 | grads['W%d' % i1] = dw
288 | grads['b%d' % i1] = db
289 | elif 0 <= i < len(self.conv_params):
290 | # This is a conv layer
291 | if((i+1) in max_pool_layers):
292 | temp = conv_relu_pool_backward(dnext_a,layer_caches[j] )
293 | else:
294 | temp = conv_relu_backward(dnext_a, layer_caches[j])
295 | dprev_a, dw, db = temp
296 | grads['W%d' % i1] = dw
297 | grads['b%d' % i1] = db
298 | else:
299 | raise ValueError('Invalid layer index %d' % i)
300 | dnext_a = dprev_a
301 | j = j-1
302 |
303 | dX = dnext_a
304 | return dX, grads
305 |
306 |
307 | def loss(self, X, y=None):
308 | """
309 | Classification loss used to train the network.
310 |
311 | Inputs:
312 | - X: Array of data, of shape (N, 3, 64, 64)
313 | - y: Array of labels, of shape (N,)
314 |
315 | If y is None, then run a test-time forward pass and return:
316 | - scores: Array of shape (N, 100) giving class scores.
317 |
318 | If y is not None, then run a training-time forward and backward pass and
319 | return a tuple of:
320 | - loss: Scalar giving loss
321 | - grads: Dictionary of gradients, with the same keys as self.params.
322 | """
323 | # Note that we implement this by just caling self.forward and self.backward
324 | mode = 'test' if y is None else 'train'
325 | scores, cache = self.forward(X, mode=mode)
326 | if mode == 'test':
327 | return scores
328 | loss, dscores = softmax_loss(scores, y)
329 | dX, grads = self.backward(dscores, cache)
330 | return loss, grads
331 |
332 |
--------------------------------------------------------------------------------
/library/data_utils.py:
--------------------------------------------------------------------------------
1 | import cPickle as pickle
2 | import numpy as np
3 | import os
4 | from scipy.misc import imread
5 |
6 | def load_CIFAR_batch(filename):
7 | """ load single batch of cifar """
8 | with open(filename, 'rb') as f:
9 | datadict = pickle.load(f)
10 | X = datadict['data']
11 | Y = datadict['labels']
12 | X = X.reshape(10000, 3, 32, 32).transpose(0,2,3,1).astype("float")
13 | Y = np.array(Y)
14 | return X, Y
15 |
16 | def load_CIFAR10(ROOT):
17 | """ load all of cifar """
18 | xs = []
19 | ys = []
20 | for b in range(1,6):
21 | f = os.path.join(ROOT, 'data_batch_%d' % (b, ))
22 | X, Y = load_CIFAR_batch(f)
23 | xs.append(X)
24 | ys.append(Y)
25 | Xtr = np.concatenate(xs)
26 | Ytr = np.concatenate(ys)
27 | del X, Y
28 | Xte, Yte = load_CIFAR_batch(os.path.join(ROOT, 'test_batch'))
29 | return Xtr, Ytr, Xte, Yte
30 |
31 |
32 | def get_CIFAR10_data(num_training=49000, num_validation=1000, num_test=1000,
33 | subtract_mean=True):
34 | """
35 | Load the CIFAR-10 dataset from disk and perform preprocessing to prepare
36 | it for classifiers. These are the same steps as we used for the SVM, but
37 | condensed to a single function.
38 | """
39 | # Load the raw CIFAR-10 data
40 | cifar10_dir = 'cs231n/datasets/cifar-10-batches-py'
41 | X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)
42 |
43 | # Subsample the data
44 | mask = range(num_training, num_training + num_validation)
45 | X_val = X_train[mask]
46 | y_val = y_train[mask]
47 | mask = range(num_training)
48 | X_train = X_train[mask]
49 | y_train = y_train[mask]
50 | mask = range(num_test)
51 | X_test = X_test[mask]
52 | y_test = y_test[mask]
53 |
54 | # Normalize the data: subtract the mean image
55 | if subtract_mean:
56 | mean_image = np.mean(X_train, axis=0)
57 | X_train -= mean_image
58 | X_val -= mean_image
59 | X_test -= mean_image
60 |
61 | # Transpose so that channels come first
62 | X_train = X_train.transpose(0, 3, 1, 2).copy()
63 | X_val = X_val.transpose(0, 3, 1, 2).copy()
64 | X_test = X_test.transpose(0, 3, 1, 2).copy()
65 |
66 | # Package data into a dictionary
67 | return {
68 | 'X_train': X_train, 'y_train': y_train,
69 | 'X_val': X_val, 'y_val': y_val,
70 | 'X_test': X_test, 'y_test': y_test,
71 | }
72 |
73 |
74 | def load_tiny_imagenet(path, dtype=np.float32, subtract_mean=True):
75 | """
76 | Load TinyImageNet. Each of TinyImageNet-100-A, TinyImageNet-100-B, and
77 | TinyImageNet-200 have the same directory structure, so this can be used
78 | to load any of them.
79 |
80 | Inputs:
81 | - path: String giving path to the directory to load.
82 | - dtype: numpy datatype used to load the data.
83 | - subtract_mean: Whether to subtract the mean training image.
84 |
85 | Returns: A dictionary with the following entries:
86 | - class_names: A list where class_names[i] is a list of strings giving the
87 | WordNet names for class i in the loaded dataset.
88 | - X_train: (N_tr, 3, 64, 64) array of training images
89 | - y_train: (N_tr,) array of training labels
90 | - X_val: (N_val, 3, 64, 64) array of validation images
91 | - y_val: (N_val,) array of validation labels
92 | - X_test: (N_test, 3, 64, 64) array of testing images.
93 | - y_test: (N_test,) array of test labels; if test labels are not available
94 | (such as in student code) then y_test will be None.
95 | - mean_image: (3, 64, 64) array giving mean training image
96 | """
97 | # First load wnids
98 | with open(os.path.join(path, 'wnids.txt'), 'r') as f:
99 | wnids = [x.strip() for x in f]
100 |
101 | # Map wnids to integer labels
102 | wnid_to_label = {wnid: i for i, wnid in enumerate(wnids)}
103 |
104 | # Use words.txt to get names for each class
105 | with open(os.path.join(path, 'words.txt'), 'r') as f:
106 | wnid_to_words = dict(line.split('\t') for line in f)
107 | for wnid, words in wnid_to_words.iteritems():
108 | wnid_to_words[wnid] = [w.strip() for w in words.split(',')]
109 | class_names = [wnid_to_words[wnid] for wnid in wnids]
110 |
111 | # Next load training data.
112 | X_train = []
113 | y_train = []
114 | for i, wnid in enumerate(wnids):
115 | if (i + 1) % 20 == 0:
116 | print 'loading training data for synset %d / %d' % (i + 1, len(wnids))
117 | # To figure out the filenames we need to open the boxes file
118 | boxes_file = os.path.join(path, 'train', wnid, '%s_boxes.txt' % wnid)
119 | with open(boxes_file, 'r') as f:
120 | filenames = [x.split('\t')[0] for x in f]
121 | num_images = len(filenames)
122 |
123 | X_train_block = np.zeros((num_images, 3, 64, 64), dtype=dtype)
124 | y_train_block = wnid_to_label[wnid] * np.ones(num_images, dtype=np.int64)
125 | for j, img_file in enumerate(filenames):
126 | img_file = os.path.join(path, 'train', wnid, 'images', img_file)
127 | img = imread(img_file)
128 | if img.ndim == 2:
129 | ## grayscale file
130 | img.shape = (64, 64, 1)
131 | X_train_block[j] = img.transpose(2, 0, 1)
132 | X_train.append(X_train_block)
133 | y_train.append(y_train_block)
134 |
135 | # We need to concatenate all training data
136 | X_train = np.concatenate(X_train, axis=0)
137 | y_train = np.concatenate(y_train, axis=0)
138 |
139 | # Next load validation data
140 | with open(os.path.join(path, 'val', 'val_annotations.txt'), 'r') as f:
141 | img_files = []
142 | val_wnids = []
143 | for line in f:
144 | img_file, wnid = line.split('\t')[:2]
145 | img_files.append(img_file)
146 | val_wnids.append(wnid)
147 | num_val = len(img_files)
148 | y_val = np.array([wnid_to_label[wnid] for wnid in val_wnids])
149 | X_val = np.zeros((num_val, 3, 64, 64), dtype=dtype)
150 | for i, img_file in enumerate(img_files):
151 | img_file = os.path.join(path, 'val', 'images', img_file)
152 | img = imread(img_file)
153 | if img.ndim == 2:
154 | img.shape = (64, 64, 1)
155 | X_val[i] = img.transpose(2, 0, 1)
156 |
157 | # Next load test images
158 | # Students won't have test labels, so we need to iterate over files in the
159 | # images directory.
160 | img_files = os.listdir(os.path.join(path, 'test', 'images'))
161 | X_test = np.zeros((len(img_files), 3, 64, 64), dtype=dtype)
162 | for i, img_file in enumerate(img_files):
163 | img_file = os.path.join(path, 'test', 'images', img_file)
164 | img = imread(img_file)
165 | if img.ndim == 2:
166 | img.shape = (64, 64, 1)
167 | X_test[i] = img.transpose(2, 0, 1)
168 |
169 | y_test = None
170 | y_test_file = os.path.join(path, 'test', 'test_annotations.txt')
171 | if os.path.isfile(y_test_file):
172 | with open(y_test_file, 'r') as f:
173 | img_file_to_wnid = {}
174 | for line in f:
175 | line = line.split('\t')
176 | img_file_to_wnid[line[0]] = line[1]
177 | y_test = [wnid_to_label[img_file_to_wnid[img_file]] for img_file in img_files]
178 | y_test = np.array(y_test)
179 |
180 | mean_image = X_train.mean(axis=0)
181 | if subtract_mean:
182 | X_train -= mean_image[None]
183 | X_val -= mean_image[None]
184 | X_test -= mean_image[None]
185 |
186 | return {
187 | 'class_names': class_names,
188 | 'X_train': X_train,
189 | 'y_train': y_train,
190 | 'X_val': X_val,
191 | 'y_val': y_val,
192 | 'X_test': X_test,
193 | 'y_test': y_test,
194 | 'class_names': class_names,
195 | 'mean_image': mean_image,
196 | }
197 |
198 |
199 | def load_models(models_dir):
200 | """
201 | Load saved models from disk. This will attempt to unpickle all files in a
202 | directory; any files that give errors on unpickling (such as README.txt) will
203 | be skipped.
204 |
205 | Inputs:
206 | - models_dir: String giving the path to a directory containing model files.
207 | Each model file is a pickled dictionary with a 'model' field.
208 |
209 | Returns:
210 | A dictionary mapping model file names to models.
211 | """
212 | models = {}
213 | for model_file in os.listdir(models_dir):
214 | with open(os.path.join(models_dir, model_file), 'rb') as f:
215 | try:
216 | models[model_file] = pickle.load(f)['model']
217 | except pickle.UnpicklingError:
218 | continue
219 | return models
220 |
--------------------------------------------------------------------------------
/library/fast_layers.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | try:
3 | from library.im2col_cython import col2im_cython, im2col_cython
4 | from library.im2col_cython import col2im_6d_cython
5 | except ImportError:
6 | print 'run the following from the cs231n directory and try again:'
7 | print 'python setup.py build_ext --inplace'
8 | print 'You may also need to restart your iPython kernel'
9 |
10 | from library.im2col import *
11 |
12 |
13 | def conv_forward_im2col(x, w, b, conv_param):
14 | """
15 | A fast implementation of the forward pass for a convolutional layer
16 | based on im2col and col2im.
17 | """
18 | N, C, H, W = x.shape
19 | num_filters, _, filter_height, filter_width = w.shape
20 | stride, pad = conv_param['stride'], conv_param['pad']
21 |
22 | # Check dimensions
23 | assert (W + 2 * pad - filter_width) % stride == 0, 'width does not work'
24 | assert (H + 2 * pad - filter_height) % stride == 0, 'height does not work'
25 |
26 | # Create output
27 | out_height = (H + 2 * pad - filter_height) / stride + 1
28 | out_width = (W + 2 * pad - filter_width) / stride + 1
29 | out = np.zeros((N, num_filters, out_height, out_width), dtype=x.dtype)
30 |
31 | # x_cols = im2col_indices(x, w.shape[2], w.shape[3], pad, stride)
32 | x_cols = im2col_cython(x, w.shape[2], w.shape[3], pad, stride)
33 | res = w.reshape((w.shape[0], -1)).dot(x_cols) + b.reshape(-1, 1)
34 |
35 | out = res.reshape(w.shape[0], out.shape[2], out.shape[3], x.shape[0])
36 | out = out.transpose(3, 0, 1, 2)
37 |
38 | cache = (x, w, b, conv_param, x_cols)
39 | return out, cache
40 |
41 |
42 | def conv_forward_strides(x, w, b, conv_param):
43 | N, C, H, W = x.shape
44 | F, _, HH, WW = w.shape
45 | stride, pad = conv_param['stride'], conv_param['pad']
46 |
47 | # Check dimensions
48 | #assert (W + 2 * pad - WW) % stride == 0, 'width does not work'
49 | #assert (H + 2 * pad - HH) % stride == 0, 'height does not work'
50 |
51 | # Pad the input
52 | p = pad
53 | x_padded = np.pad(x, ((0, 0), (0, 0), (p, p), (p, p)), mode='constant')
54 |
55 | # Figure out output dimensions
56 | H += 2 * pad
57 | W += 2 * pad
58 | out_h = (H - HH) / stride + 1
59 | out_w = (W - WW) / stride + 1
60 |
61 | # Perform an im2col operation by picking clever strides
62 | shape = (C, HH, WW, N, out_h, out_w)
63 | strides = (H * W, W, 1, C * H * W, stride * W, stride)
64 | strides = x.itemsize * np.array(strides)
65 | x_stride = np.lib.stride_tricks.as_strided(x_padded,
66 | shape=shape, strides=strides)
67 | x_cols = np.ascontiguousarray(x_stride)
68 | x_cols.shape = (C * HH * WW, N * out_h * out_w)
69 |
70 | # Now all our convolutions are a big matrix multiply
71 | res = w.reshape(F, -1).dot(x_cols) + b.reshape(-1, 1)
72 |
73 | # Reshape the output
74 | res.shape = (F, N, out_h, out_w)
75 | out = res.transpose(1, 0, 2, 3)
76 |
77 | # Be nice and return a contiguous array
78 | # The old version of conv_forward_fast doesn't do this, so for a fair
79 | # comparison we won't either
80 | out = np.ascontiguousarray(out)
81 |
82 | cache = (x, w, b, conv_param, x_cols)
83 | return out, cache
84 |
85 |
86 | def conv_backward_strides(dout, cache):
87 | x, w, b, conv_param, x_cols = cache
88 | stride, pad = conv_param['stride'], conv_param['pad']
89 |
90 | N, C, H, W = x.shape
91 | F, _, HH, WW = w.shape
92 | _, _, out_h, out_w = dout.shape
93 |
94 | db = np.sum(dout, axis=(0, 2, 3))
95 |
96 | dout_reshaped = dout.transpose(1, 0, 2, 3).reshape(F, -1)
97 | dw = dout_reshaped.dot(x_cols.T).reshape(w.shape)
98 |
99 | dx_cols = w.reshape(F, -1).T.dot(dout_reshaped)
100 | dx_cols.shape = (C, HH, WW, N, out_h, out_w)
101 | dx = col2im_6d_cython(dx_cols, N, C, H, W, HH, WW, pad, stride)
102 |
103 | return dx, dw, db
104 |
105 |
106 | def conv_backward_im2col(dout, cache):
107 | """
108 | A fast implementation of the backward pass for a convolutional layer
109 | based on im2col and col2im.
110 | """
111 | x, w, b, conv_param, x_cols = cache
112 | stride, pad = conv_param['stride'], conv_param['pad']
113 |
114 | db = np.sum(dout, axis=(0, 2, 3))
115 |
116 | num_filters, _, filter_height, filter_width = w.shape
117 | dout_reshaped = dout.transpose(1, 2, 3, 0).reshape(num_filters, -1)
118 | dw = dout_reshaped.dot(x_cols.T).reshape(w.shape)
119 |
120 | dx_cols = w.reshape(num_filters, -1).T.dot(dout_reshaped)
121 | # dx = col2im_indices(dx_cols, x.shape, filter_height, filter_width, pad, stride)
122 | dx = col2im_cython(dx_cols, x.shape[0], x.shape[1], x.shape[2], x.shape[3],
123 | filter_height, filter_width, pad, stride)
124 |
125 | return dx, dw, db
126 |
127 |
128 | conv_forward_fast = conv_forward_strides
129 | conv_backward_fast = conv_backward_strides
130 |
131 |
132 | def max_pool_forward_fast(x, pool_param):
133 | """
134 | A fast implementation of the forward pass for a max pooling layer.
135 |
136 | This chooses between the reshape method and the im2col method. If the pooling
137 | regions are square and tile the input image, then we can use the reshape
138 | method which is very fast. Otherwise we fall back on the im2col method, which
139 | is not much faster than the naive method.
140 | """
141 | N, C, H, W = x.shape
142 | pool_height, pool_width = pool_param['pool_height'], pool_param['pool_width']
143 | stride = pool_param['stride']
144 |
145 | same_size = pool_height == pool_width == stride
146 | tiles = H % pool_height == 0 and W % pool_width == 0
147 | if same_size and tiles:
148 | out, reshape_cache = max_pool_forward_reshape(x, pool_param)
149 | cache = ('reshape', reshape_cache)
150 | else:
151 | out, im2col_cache = max_pool_forward_im2col(x, pool_param)
152 | cache = ('im2col', im2col_cache)
153 | return out, cache
154 |
155 |
156 | def max_pool_backward_fast(dout, cache):
157 | """
158 | A fast implementation of the backward pass for a max pooling layer.
159 |
160 | This switches between the reshape method an the im2col method depending on
161 | which method was used to generate the cache.
162 | """
163 | method, real_cache = cache
164 | if method == 'reshape':
165 | return max_pool_backward_reshape(dout, real_cache)
166 | elif method == 'im2col':
167 | return max_pool_backward_im2col(dout, real_cache)
168 | else:
169 | raise ValueError('Unrecognized method "%s"' % method)
170 |
171 |
172 | def max_pool_forward_reshape(x, pool_param):
173 | """
174 | A fast implementation of the forward pass for the max pooling layer that uses
175 | some clever reshaping.
176 |
177 | This can only be used for square pooling regions that tile the input.
178 | """
179 | N, C, H, W = x.shape
180 | pool_height, pool_width = pool_param['pool_height'], pool_param['pool_width']
181 | stride = pool_param['stride']
182 | assert pool_height == pool_width == stride, 'Invalid pool params'
183 | assert H % pool_height == 0
184 | assert W % pool_height == 0
185 | x_reshaped = x.reshape(N, C, H / pool_height, pool_height,
186 | W / pool_width, pool_width)
187 | out = x_reshaped.max(axis=3).max(axis=4)
188 |
189 | cache = (x, x_reshaped, out)
190 | return out, cache
191 |
192 |
193 | def max_pool_backward_reshape(dout, cache):
194 | """
195 | A fast implementation of the backward pass for the max pooling layer that
196 | uses some clever broadcasting and reshaping.
197 |
198 | This can only be used if the forward pass was computed using
199 | max_pool_forward_reshape.
200 |
201 | NOTE: If there are multiple argmaxes, this method will assign gradient to
202 | ALL argmax elements of the input rather than picking one. In this case the
203 | gradient will actually be incorrect. However this is unlikely to occur in
204 | practice, so it shouldn't matter much. One possible solution is to split the
205 | upstream gradient equally among all argmax elements; this should result in a
206 | valid subgradient. You can make this happen by uncommenting the line below;
207 | however this results in a significant performance penalty (about 40% slower)
208 | and is unlikely to matter in practice so we don't do it.
209 | """
210 | x, x_reshaped, out = cache
211 |
212 | dx_reshaped = np.zeros_like(x_reshaped)
213 | out_newaxis = out[:, :, :, np.newaxis, :, np.newaxis]
214 | mask = (x_reshaped == out_newaxis)
215 | dout_newaxis = dout[:, :, :, np.newaxis, :, np.newaxis]
216 | dout_broadcast, _ = np.broadcast_arrays(dout_newaxis, dx_reshaped)
217 | dx_reshaped[mask] = dout_broadcast[mask]
218 | dx_reshaped /= np.sum(mask, axis=(3, 5), keepdims=True)
219 | dx = dx_reshaped.reshape(x.shape)
220 |
221 | return dx
222 |
223 |
224 | def max_pool_forward_im2col(x, pool_param):
225 | """
226 | An implementation of the forward pass for max pooling based on im2col.
227 |
228 | This isn't much faster than the naive version, so it should be avoided if
229 | possible.
230 | """
231 | N, C, H, W = x.shape
232 | pool_height, pool_width = pool_param['pool_height'], pool_param['pool_width']
233 | stride = pool_param['stride']
234 |
235 | assert (H - pool_height) % stride == 0, 'Invalid height'
236 | assert (W - pool_width) % stride == 0, 'Invalid width'
237 |
238 | out_height = (H - pool_height) / stride + 1
239 | out_width = (W - pool_width) / stride + 1
240 |
241 | x_split = x.reshape(N * C, 1, H, W)
242 | x_cols = im2col(x_split, pool_height, pool_width, padding=0, stride=stride)
243 | x_cols_argmax = np.argmax(x_cols, axis=0)
244 | x_cols_max = x_cols[x_cols_argmax, np.arange(x_cols.shape[1])]
245 | out = x_cols_max.reshape(out_height, out_width, N, C).transpose(2, 3, 0, 1)
246 |
247 | cache = (x, x_cols, x_cols_argmax, pool_param)
248 | return out, cache
249 |
250 |
251 | def max_pool_backward_im2col(dout, cache):
252 | """
253 | An implementation of the backward pass for max pooling based on im2col.
254 |
255 | This isn't much faster than the naive version, so it should be avoided if
256 | possible.
257 | """
258 | x, x_cols, x_cols_argmax, pool_param = cache
259 | N, C, H, W = x.shape
260 | pool_height, pool_width = pool_param['pool_height'], pool_param['pool_width']
261 | stride = pool_param['stride']
262 |
263 | dout_reshaped = dout.transpose(2, 3, 0, 1).flatten()
264 | dx_cols = np.zeros_like(x_cols)
265 | dx_cols[x_cols_argmax, np.arange(dx_cols.shape[1])] = dout_reshaped
266 | dx = col2im_indices(dx_cols, (N * C, 1, H, W), pool_height, pool_width,
267 | padding=0, stride=stride)
268 | dx = dx.reshape(x.shape)
269 |
270 | return dx
271 |
--------------------------------------------------------------------------------
/library/frameworkpython:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # what real Python executable to use
4 | PYVER=2.7
5 | PATHTOPYTHON=/usr/local/bin/
6 | PYTHON=${PATHTOPYTHON}python${PYVER}
7 |
8 | # find the root of the virtualenv, it should be the parent of the dir this script is in
9 | ENV=`$PYTHON -c "import os; print os.path.abspath(os.path.join(os.path.dirname(\"$0\"), '..'))"`
10 |
11 | # now run Python with the virtualenv set as Python's HOME
12 | export PYTHONHOME=$ENV
13 | exec $PYTHON "$@"
14 |
--------------------------------------------------------------------------------
/library/im2col.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def get_im2col_indices(x_shape, field_height, field_width, padding=1, stride=1):
5 | # First figure out what the size of the output should be
6 | N, C, H, W = x_shape
7 | assert (H + 2 * padding - field_height) % stride == 0
8 | assert (W + 2 * padding - field_height) % stride == 0
9 | out_height = (H + 2 * padding - field_height) / stride + 1
10 | out_width = (W + 2 * padding - field_width) / stride + 1
11 |
12 | i0 = np.repeat(np.arange(field_height), field_width)
13 | i0 = np.tile(i0, C)
14 | i1 = stride * np.repeat(np.arange(out_height), out_width)
15 | j0 = np.tile(np.arange(field_width), field_height * C)
16 | j1 = stride * np.tile(np.arange(out_width), out_height)
17 | i = i0.reshape(-1, 1) + i1.reshape(1, -1)
18 | j = j0.reshape(-1, 1) + j1.reshape(1, -1)
19 |
20 | k = np.repeat(np.arange(C), field_height * field_width).reshape(-1, 1)
21 |
22 | return (k, i, j)
23 |
24 |
25 | def im2col_indices(x, field_height, field_width, padding=1, stride=1):
26 | """ An implementation of im2col based on some fancy indexing """
27 | # Zero-pad the input
28 | p = padding
29 | x_padded = np.pad(x, ((0, 0), (0, 0), (p, p), (p, p)), mode='constant')
30 |
31 | k, i, j = get_im2col_indices(x.shape, field_height, field_width, padding,
32 | stride)
33 |
34 | cols = x_padded[:, k, i, j]
35 | C = x.shape[1]
36 | cols = cols.transpose(1, 2, 0).reshape(field_height * field_width * C, -1)
37 | return cols
38 |
39 |
40 | def col2im_indices(cols, x_shape, field_height=3, field_width=3, padding=1,
41 | stride=1):
42 | """ An implementation of col2im based on fancy indexing and np.add.at """
43 | N, C, H, W = x_shape
44 | H_padded, W_padded = H + 2 * padding, W + 2 * padding
45 | x_padded = np.zeros((N, C, H_padded, W_padded), dtype=cols.dtype)
46 | k, i, j = get_im2col_indices(x_shape, field_height, field_width, padding,
47 | stride)
48 | cols_reshaped = cols.reshape(C * field_height * field_width, -1, N)
49 | cols_reshaped = cols_reshaped.transpose(2, 0, 1)
50 | np.add.at(x_padded, (slice(None), k, i, j), cols_reshaped)
51 | if padding == 0:
52 | return x_padded
53 | return x_padded[:, :, padding:-padding, padding:-padding]
54 |
55 | pass
56 |
--------------------------------------------------------------------------------
/library/im2col_cython.pyx:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | cimport numpy as np
3 | cimport cython
4 |
5 | # DTYPE = np.float64
6 | # ctypedef np.float64_t DTYPE_t
7 |
8 | ctypedef fused DTYPE_t:
9 | np.float32_t
10 | np.float64_t
11 |
12 | def im2col_cython(np.ndarray[DTYPE_t, ndim=4] x, int field_height,
13 | int field_width, int padding, int stride):
14 | cdef int N = x.shape[0]
15 | cdef int C = x.shape[1]
16 | cdef int H = x.shape[2]
17 | cdef int W = x.shape[3]
18 |
19 | cdef int HH = (H + 2 * padding - field_height) / stride + 1
20 | cdef int WW = (W + 2 * padding - field_width) / stride + 1
21 |
22 | cdef int p = padding
23 | cdef np.ndarray[DTYPE_t, ndim=4] x_padded = np.pad(x,
24 | ((0, 0), (0, 0), (p, p), (p, p)), mode='constant')
25 |
26 | cdef np.ndarray[DTYPE_t, ndim=2] cols = np.zeros(
27 | (C * field_height * field_width, N * HH * WW),
28 | dtype=x.dtype)
29 |
30 | # Moving the inner loop to a C function with no bounds checking works, but does
31 | # not seem to help performance in any measurable way.
32 |
33 | im2col_cython_inner(cols, x_padded, N, C, H, W, HH, WW,
34 | field_height, field_width, padding, stride)
35 | return cols
36 |
37 |
38 | @cython.boundscheck(False)
39 | cdef int im2col_cython_inner(np.ndarray[DTYPE_t, ndim=2] cols,
40 | np.ndarray[DTYPE_t, ndim=4] x_padded,
41 | int N, int C, int H, int W, int HH, int WW,
42 | int field_height, int field_width, int padding, int stride) except? -1:
43 | cdef int c, ii, jj, row, yy, xx, i, col
44 |
45 | for c in range(C):
46 | for yy in range(HH):
47 | for xx in range(WW):
48 | for ii in range(field_height):
49 | for jj in range(field_width):
50 | row = c * field_width * field_height + ii * field_height + jj
51 | for i in range(N):
52 | col = yy * WW * N + xx * N + i
53 | cols[row, col] = x_padded[i, c, stride * yy + ii, stride * xx + jj]
54 |
55 |
56 |
57 | def col2im_cython(np.ndarray[DTYPE_t, ndim=2] cols, int N, int C, int H, int W,
58 | int field_height, int field_width, int padding, int stride):
59 | cdef np.ndarray x = np.empty((N, C, H, W), dtype=cols.dtype)
60 | cdef int HH = (H + 2 * padding - field_height) / stride + 1
61 | cdef int WW = (W + 2 * padding - field_width) / stride + 1
62 | cdef np.ndarray[DTYPE_t, ndim=4] x_padded = np.zeros((N, C, H + 2 * padding, W + 2 * padding),
63 | dtype=cols.dtype)
64 |
65 | # Moving the inner loop to a C-function with no bounds checking improves
66 | # performance quite a bit for col2im.
67 | col2im_cython_inner(cols, x_padded, N, C, H, W, HH, WW,
68 | field_height, field_width, padding, stride)
69 | if padding > 0:
70 | return x_padded[:, :, padding:-padding, padding:-padding]
71 | return x_padded
72 |
73 |
74 | @cython.boundscheck(False)
75 | cdef int col2im_cython_inner(np.ndarray[DTYPE_t, ndim=2] cols,
76 | np.ndarray[DTYPE_t, ndim=4] x_padded,
77 | int N, int C, int H, int W, int HH, int WW,
78 | int field_height, int field_width, int padding, int stride) except? -1:
79 | cdef int c, ii, jj, row, yy, xx, i, col
80 |
81 | for c in range(C):
82 | for ii in range(field_height):
83 | for jj in range(field_width):
84 | row = c * field_width * field_height + ii * field_height + jj
85 | for yy in range(HH):
86 | for xx in range(WW):
87 | for i in range(N):
88 | col = yy * WW * N + xx * N + i
89 | x_padded[i, c, stride * yy + ii, stride * xx + jj] += cols[row, col]
90 |
91 |
92 | @cython.boundscheck(False)
93 | @cython.wraparound(False)
94 | cdef col2im_6d_cython_inner(np.ndarray[DTYPE_t, ndim=6] cols,
95 | np.ndarray[DTYPE_t, ndim=4] x_padded,
96 | int N, int C, int H, int W, int HH, int WW,
97 | int out_h, int out_w, int pad, int stride):
98 |
99 | cdef int c, hh, ww, n, h, w
100 | for n in range(N):
101 | for c in range(C):
102 | for hh in range(HH):
103 | for ww in range(WW):
104 | for h in range(out_h):
105 | for w in range(out_w):
106 | x_padded[n, c, stride * h + hh, stride * w + ww] += cols[c, hh, ww, n, h, w]
107 |
108 |
109 | def col2im_6d_cython(np.ndarray[DTYPE_t, ndim=6] cols, int N, int C, int H, int W,
110 | int HH, int WW, int pad, int stride):
111 | cdef np.ndarray x = np.empty((N, C, H, W), dtype=cols.dtype)
112 | cdef int out_h = (H + 2 * pad - HH) / stride + 1
113 | cdef int out_w = (W + 2 * pad - WW) / stride + 1
114 | cdef np.ndarray[DTYPE_t, ndim=4] x_padded = np.zeros((N, C, H + 2 * pad, W + 2 * pad),
115 | dtype=cols.dtype)
116 |
117 | col2im_6d_cython_inner(cols, x_padded, N, C, H, W, HH, WW, out_h, out_w, pad, stride)
118 |
119 | if pad > 0:
120 | return x_padded[:, :, pad:-pad, pad:-pad]
121 | return x_padded
122 |
--------------------------------------------------------------------------------
/library/image_utils.py:
--------------------------------------------------------------------------------
1 | import urllib2, os, tempfile
2 | import sys
3 | import cv2
4 | from socket import error as SocketError
5 |
6 | def image_from_url(url):
7 | """
8 | Read an image from a URL. Returns a numpy array with the pixel data.
9 | We write the image to a temporary file then read it back. Kinda gross.
10 | """
11 | try:
12 | f = urllib2.urlopen(url)
13 | _, fname = tempfile.mkstemp()
14 | with open(fname, 'wb') as ff:
15 | ff.write(f.read())
16 | img = cv2.imread(fname)
17 | os.remove(fname)
18 | return img
19 | except urllib2.URLError as e:
20 | print 'URL Error: ', e.reason, url
21 | except urllib2.HTTPError as e:
22 | print 'HTTP Error: ', e.code, url
23 | except ValueError as e:
24 | print 'Value Error: ', url
25 | except SocketError as e:
26 | print 'Socket Error: ', url
27 | except httplib.BadStatusLine as e:
28 | print 'Bad StatusLine Error: ', url
29 |
--------------------------------------------------------------------------------
/library/layer_utils.py:
--------------------------------------------------------------------------------
1 | from library.layers import *
2 | from library.fast_layers import *
3 |
4 |
5 | def affine_relu_forward(x, w, b):
6 | """
7 | Convenience layer that perorms an affine transform followed by a ReLU
8 |
9 | Inputs:
10 | - x: Input to the affine layer
11 | - w, b: Weights for the affine layer
12 |
13 | Returns a tuple of:
14 | - out: Output from the ReLU
15 | - cache: Object to give to the backward pass
16 | """
17 | a, fc_cache = affine_forward(x, w, b)
18 | out, relu_cache = relu_forward(a)
19 | cache = (fc_cache, relu_cache)
20 | return out, cache
21 |
22 |
23 | def affine_relu_backward(dout, cache):
24 | """
25 | Backward pass for the affine-relu convenience layer
26 | """
27 | fc_cache, relu_cache = cache
28 | da = relu_backward(dout, relu_cache)
29 | dx, dw, db = affine_backward(da, fc_cache)
30 | return dx, dw, db
31 |
32 |
33 | def affine_bn_relu_forward(x, w, b, gamma, beta, bn_param):
34 | """
35 | Convenience layer that performs an affine transform, batch normalization,
36 | and ReLU.
37 |
38 | Inputs:
39 | - x: Array of shape (N, D1); input to the affine layer
40 | - w, b: Arrays of shape (D2, D2) and (D2,) giving the weight and bias for
41 | the affine transform.
42 | - gamma, beta: Arrays of shape (D2,) and (D2,) giving scale and shift
43 | parameters for batch normalization.
44 | - bn_param: Dictionary of parameters for batch normalization.
45 |
46 | Returns:
47 | - out: Output from ReLU, of shape (N, D2)
48 | - cache: Object to give to the backward pass.
49 | """
50 | a, fc_cache = affine_forward(x, w, b)
51 | a_bn, bn_cache = batchnorm_forward(a, gamma, beta, bn_param)
52 | out, relu_cache = relu_forward(a_bn)
53 | cache = (fc_cache, bn_cache, relu_cache)
54 | return out, cache
55 |
56 |
57 | def affine_bn_relu_backward(dout, cache):
58 | """
59 | Backward pass for the affine-batchnorm-relu convenience layer.
60 | """
61 | fc_cache, bn_cache, relu_cache = cache
62 | da_bn = relu_backward(dout, relu_cache)
63 | da, dgamma, dbeta = batchnorm_backward(da_bn, bn_cache)
64 | dx, dw, db = affine_backward(da, fc_cache)
65 | return dx, dw, db, dgamma, dbeta
66 |
67 |
68 | def conv_relu_forward(x, w, b, conv_param):
69 | """
70 | A convenience layer that performs a convolution followed by a ReLU.
71 |
72 | Inputs:
73 | - x: Input to the convolutional layer
74 | - w, b, conv_param: Weights and parameters for the convolutional layer
75 |
76 | Returns a tuple of:
77 | - out: Output from the ReLU
78 | - cache: Object to give to the backward pass
79 | """
80 | a, conv_cache = conv_forward_fast(x, w, b, conv_param)
81 | out, relu_cache = relu_forward(a)
82 | cache = (conv_cache, relu_cache)
83 | return out, cache
84 |
85 |
86 | def conv_relu_backward(dout, cache):
87 | """
88 | Backward pass for the conv-relu convenience layer.
89 | """
90 | conv_cache, relu_cache = cache
91 | da = relu_backward(dout, relu_cache)
92 | dx, dw, db = conv_backward_fast(da, conv_cache)
93 | return dx, dw, db
94 |
95 |
96 | def conv_bn_relu_forward(x, w, b, gamma, beta, conv_param, bn_param):
97 | a, conv_cache = conv_forward_fast(x, w, b, conv_param)
98 | an, bn_cache = spatial_batchnorm_forward(a, gamma, beta, bn_param)
99 | out, relu_cache = relu_forward(an)
100 | cache = (conv_cache, bn_cache, relu_cache)
101 | return out, cache
102 |
103 |
104 | def conv_bn_relu_backward(dout, cache):
105 | conv_cache, bn_cache, relu_cache = cache
106 | dan = relu_backward(dout, relu_cache)
107 | da, dgamma, dbeta = spatial_batchnorm_backward(dan, bn_cache)
108 | dx, dw, db = conv_backward_fast(da, conv_cache)
109 | return dx, dw, db, dgamma, dbeta
110 |
111 |
112 | def conv_relu_pool_forward(x, w, b, conv_param, pool_param):
113 | """
114 | Convenience layer that performs a convolution, a ReLU, and a pool.
115 |
116 | Inputs:
117 | - x: Input to the convolutional layer
118 | - w, b, conv_param: Weights and parameters for the convolutional layer
119 | - pool_param: Parameters for the pooling layer
120 |
121 | Returns a tuple of:
122 | - out: Output from the pooling layer
123 | - cache: Object to give to the backward pass
124 | """
125 | a, conv_cache = conv_forward_fast(x, w, b, conv_param)
126 | s, relu_cache = relu_forward(a)
127 | out, pool_cache = max_pool_forward_fast(s, pool_param)
128 | cache = (conv_cache, relu_cache, pool_cache)
129 | return out, cache
130 |
131 |
132 | def conv_relu_pool_backward(dout, cache):
133 | """
134 | Backward pass for the conv-relu-pool convenience layer
135 | """
136 | conv_cache, relu_cache, pool_cache = cache
137 | #print pool_cache
138 | ds = max_pool_backward_fast(dout, pool_cache)
139 | da = relu_backward(ds, relu_cache)
140 | dx, dw, db = conv_backward_fast(da, conv_cache)
141 | return dx, dw, db
142 |
143 |
--------------------------------------------------------------------------------
/library/layers.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def affine_forward(x, w, b):
5 | """
6 | Computes the forward pass for an affine (fully-connected) layer.
7 |
8 | The input x has shape (N, d_1, ..., d_k) where x[i] is the ith input.
9 | We multiply this against a weight matrix of shape (D, M) where
10 | D = \prod_i d_i
11 |
12 | Inputs:
13 | x - Input data, of shape (N, d_1, ..., d_k)
14 | w - Weights, of shape (D, M)
15 | b - Biases, of shape (M,)
16 |
17 | Returns a tuple of:
18 | - out: output, of shape (N, M)
19 | - cache: (x, w, b)
20 | """
21 | out = x.reshape(x.shape[0], -1).dot(w) + b
22 | cache = (x, w, b)
23 | return out, cache
24 |
25 |
26 | def affine_backward(dout, cache):
27 | """
28 | Computes the backward pass for an affine layer.
29 |
30 | Inputs:
31 | - dout: Upstream derivative, of shape (N, M)
32 | - cache: Tuple of:
33 | - x: Input data, of shape (N, d_1, ... d_k)
34 | - w: Weights, of shape (D, M)
35 |
36 | Returns a tuple of:
37 | - dx: Gradient with respect to x, of shape (N, d1, ..., d_k)
38 | - dw: Gradient with respect to w, of shape (D, M)
39 | - db: Gradient with respect to b, of shape (M,)
40 | """
41 | x, w, b = cache
42 | dx = dout.dot(w.T).reshape(x.shape)
43 | dw = x.reshape(x.shape[0], -1).T.dot(dout)
44 | db = np.sum(dout, axis=0)
45 | return dx, dw, db
46 |
47 |
48 | def relu_forward(x):
49 | """
50 | Computes the forward pass for a layer of rectified linear units (ReLUs).
51 |
52 | Input:
53 | - x: Inputs, of any shape
54 |
55 | Returns a tuple of:
56 | - out: Output, of the same shape as x
57 | - cache: x
58 | """
59 | out = np.maximum(0, x)
60 | cache = x
61 | return out, cache
62 |
63 |
64 | def relu_backward(dout, cache):
65 | """
66 | Computes the backward pass for a layer of rectified linear units (ReLUs).
67 |
68 | Input:
69 | - dout: Upstream derivatives, of any shape
70 | - cache: Input x, of same shape as dout
71 |
72 | Returns:
73 | - dx: Gradient with respect to x
74 | """
75 | x = cache
76 | dx = np.where(x > 0, dout, 0)
77 | return dx
78 |
79 |
80 | def batchnorm_forward(x, gamma, beta, bn_param):
81 | """
82 | Forward pass for batch normalization.
83 |
84 | During training the sample mean and (uncorrected) sample variance are
85 | computed from minibatch statistics and used to normalize the incoming data.
86 | During training we also keep an exponentially decaying running mean of the mean
87 | and variance of each feature, and these averages are used to normalize data
88 | at test-time.
89 |
90 | At each timestep we update the running averages for mean and variance using
91 | an exponential decay based on the momentum parameter:
92 |
93 | running_mean = momentum * running_mean + (1 - momentum) * sample_mean
94 | running_var = momentum * running_var + (1 - momentum) * sample_var
95 |
96 | Note that the batch normalization paper suggests a different test-time
97 | behavior: they compute sample mean and variance for each feature using a
98 | large number of training images rather than using a running average. For
99 | this implementation we have chosen to use running averages instead since
100 | they do not require an additional estimation step; the torch7 implementation
101 | of batch normalization also uses running averages.
102 |
103 | Input:
104 | - x: Data of shape (N, D)
105 | - gamma: Scale parameter of shape (D,)
106 | - beta: Shift paremeter of shape (D,)
107 | - bn_param: Dictionary with the following keys:
108 | - mode: 'train' or 'test'; required
109 | - eps: Constant for numeric stability
110 | - momentum: Constant for running mean / variance.
111 | - running_mean: Array of shape (D,) giving running mean of features
112 | - running_var Array of shape (D,) giving running variance of features
113 |
114 | Returns a tuple of:
115 | - out: of shape (N, D)
116 | - cache: A tuple of values needed in the backward pass
117 | """
118 | mode = bn_param['mode']
119 | eps = bn_param.get('eps', 1e-5)
120 | momentum = bn_param.get('momentum', 0.9)
121 |
122 | N, D = x.shape
123 | running_mean = bn_param.get('running_mean', np.zeros(D, dtype=x.dtype))
124 | running_var = bn_param.get('running_var', np.zeros(D, dtype=x.dtype))
125 |
126 | out, cache = None, None
127 | if mode == 'train':
128 | # Compute output
129 | mu = x.mean(axis=0)
130 | xc = x - mu
131 | var = np.mean(xc ** 2, axis=0)
132 | std = np.sqrt(var + eps)
133 | xn = xc / std
134 | out = gamma * xn + beta
135 |
136 | cache = (mode, x, gamma, xc, std, xn, out)
137 |
138 | # Update running average of mean
139 | running_mean *= momentum
140 | running_mean += (1 - momentum) * mu
141 |
142 | # Update running average of variance
143 | running_var *= momentum
144 | running_var += (1 - momentum) * var
145 | elif mode == 'test':
146 | # Using running mean and variance to normalize
147 | std = np.sqrt(running_var + eps)
148 | xn = (x - running_mean) / std
149 | out = gamma * xn + beta
150 | cache = (mode, x, xn, gamma, beta, std)
151 | else:
152 | raise ValueError('Invalid forward batchnorm mode "%s"' % mode)
153 |
154 | # Store the updated running means back into bn_param
155 | bn_param['running_mean'] = running_mean
156 | bn_param['running_var'] = running_var
157 |
158 | return out, cache
159 |
160 |
161 | def batchnorm_backward(dout, cache):
162 | """
163 | Backward pass for batch normalization.
164 |
165 | For this implementation, you should write out a computation graph for
166 | batch normalization on paper and propagate gradients backward through
167 | intermediate nodes.
168 |
169 | Inputs:
170 | - dout: Upstream derivatives, of shape (N, D)
171 | - cache: Variable of intermediates from batchnorm_forward.
172 |
173 | Returns a tuple of:
174 | - dx: Gradient with respect to inputs x, of shape (N, D)
175 | - dgamma: Gradient with respect to scale parameter gamma, of shape (D,)
176 | - dbeta: Gradient with respect to shift parameter beta, of shape (D,)
177 | """
178 | mode = cache[0]
179 | if mode == 'train':
180 | mode, x, gamma, xc, std, xn, out = cache
181 |
182 | N = x.shape[0]
183 | dbeta = dout.sum(axis=0)
184 | dgamma = np.sum(xn * dout, axis=0)
185 | dxn = gamma * dout
186 | dxc = dxn / std
187 | dstd = -np.sum((dxn * xc) / (std * std), axis=0)
188 | dvar = 0.5 * dstd / std
189 | dxc += (2.0 / N) * xc * dvar
190 | dmu = np.sum(dxc, axis=0)
191 | dx = dxc - dmu / N
192 | elif mode == 'test':
193 | mode, x, xn, gamma, beta, std = cache
194 | dbeta = dout.sum(axis=0)
195 | dgamma = np.sum(xn * dout, axis=0)
196 | dxn = gamma * dout
197 | dx = dxn / std
198 | else:
199 | raise ValueError(mode)
200 |
201 | return dx, dgamma, dbeta
202 |
203 |
204 | def spatial_batchnorm_forward(x, gamma, beta, bn_param):
205 | """
206 | Computes the forward pass for spatial batch normalization.
207 |
208 | Inputs:
209 | - x: Input data of shape (N, C, H, W)
210 | - gamma: Scale parameter, of shape (C,)
211 | - beta: Shift parameter, of shape (C,)
212 | - bn_param: Dictionary with the following keys:
213 | - mode: 'train' or 'test'; required
214 | - eps: Constant for numeric stability
215 | - momentum: Constant for running mean / variance. momentum=0 means that
216 | old information is discarded completely at every time step, while
217 | momentum=1 means that new information is never incorporated. The
218 | default of momentum=0.9 should work well in most situations.
219 | - running_mean: Array of shape (D,) giving running mean of features
220 | - running_var Array of shape (D,) giving running variance of features
221 |
222 | Returns a tuple of:
223 | - out: Output data, of shape (N, C, H, W)
224 | - cache: Values needed for the backward pass
225 | """
226 | N, C, H, W = x.shape
227 | x_flat = x.transpose(0, 2, 3, 1).reshape(-1, C)
228 | out_flat, cache = batchnorm_forward(x_flat, gamma, beta, bn_param)
229 | out = out_flat.reshape(N, H, W, C).transpose(0, 3, 1, 2)
230 | return out, cache
231 |
232 |
233 | def spatial_batchnorm_backward(dout, cache):
234 | """
235 | Computes the backward pass for spatial batch normalization.
236 |
237 | Inputs:
238 | - dout: Upstream derivatives, of shape (N, C, H, W)
239 | - cache: Values from the forward pass
240 |
241 | Returns a tuple of:
242 | - dx: Gradient with respect to inputs, of shape (N, C, H, W)
243 | - dgamma: Gradient with respect to scale parameter, of shape (C,)
244 | - dbeta: Gradient with respect to shift parameter, of shape (C,)
245 | """
246 | N, C, H, W = dout.shape
247 | dout_flat = dout.transpose(0, 2, 3, 1).reshape(-1, C)
248 | dx_flat, dgamma, dbeta = batchnorm_backward(dout_flat, cache)
249 | dx = dx_flat.reshape(N, H, W, C).transpose(0, 3, 1, 2)
250 | return dx, dgamma, dbeta
251 |
252 |
253 | def svm_loss(x, y):
254 | """
255 | Computes the loss and gradient using for multiclass SVM classification.
256 |
257 | Inputs:
258 | - x: Input data, of shape (N, C) where x[i, j] is the score for the jth class
259 | for the ith input.
260 | - y: Vector of labels, of shape (N,) where y[i] is the label for x[i] and
261 | 0 <= y[i] < C
262 |
263 | Returns a tuple of:
264 | - loss: Scalar giving the loss
265 | - dx: Gradient of the loss with respect to x
266 | """
267 | N = x.shape[0]
268 | correct_class_scores = x[np.arange(N), y]
269 | margins = np.maximum(0, x - correct_class_scores[:, np.newaxis] + 1.0)
270 | margins[np.arange(N), y] = 0
271 | loss = np.sum(margins) / N
272 | num_pos = np.sum(margins > 0, axis=1)
273 | dx = np.zeros_like(x)
274 | dx[margins > 0] = 1
275 | dx[np.arange(N), y] -= num_pos
276 | dx /= N
277 | return loss, dx
278 |
279 |
280 | def softmax_loss(x, y):
281 | """
282 | Computes the loss and gradient for softmax classification.
283 |
284 | Inputs:
285 | - x: Input data, of shape (N, C) where x[i, j] is the score for the jth class
286 | for the ith input.
287 | - y: Vector of labels, of shape (N,) where y[i] is the label for x[i] and
288 | 0 <= y[i] < C
289 |
290 | Returns a tuple of:
291 | - loss: Scalar giving the loss
292 | - dx: Gradient of the loss with respect to x
293 | """
294 | probs = np.exp(x - np.max(x, axis=1, keepdims=True))
295 | probs /= np.sum(probs, axis=1, keepdims=True)
296 | N = x.shape[0]
297 | loss = -np.sum(np.log(probs[np.arange(N), y])) / N
298 | dx = probs.copy()
299 | dx[np.arange(N), y] -= 1
300 | dx /= N
301 | return loss, dx
302 |
303 |
--------------------------------------------------------------------------------
/library/localization.py:
--------------------------------------------------------------------------------
1 | #Load the model and dependencies
2 | import time, os, json
3 | import numpy as np
4 | from scipy.misc import imread, imresize
5 | import matplotlib.pyplot as plt
6 | import pickle
7 | import cv2, numpy as np
8 | from deconv_utils import *
9 | import copy
10 | from library.classifiers.pretrained_vgg16 import PretrainedVGG
11 |
12 | #Global Variables
13 | percentile_threshold = 40
14 |
15 | #Function to get the bounding box given an image
16 | def bbox(im, model, layer=11, n_neurons=5, kmax=10,class_no=0):
17 | #Inputs
18 | # im = Output of Image file read by cv2 - Not Resized
19 | # model = CNN model
20 | # layer = Layer from which the image has to be extracted
21 | # n_neurons = Number of Neurons to use to build the image
22 | # k_max = Maximum Number of neurons to evaluate
23 |
24 | mask, cache = get_localization_mask(im, model, layer, n_neurons, kmax,class_no)
25 | class_no,original_size = cache
26 | return get_box_from_mask(mask, original_size)
27 |
28 | def get_localization_mask(im, model, layer, n_neurons, kmax, class_no=0 ):
29 | #Get localization Mask for Image
30 |
31 | #Inputs
32 | # im = Output of Image file read by cv2 - Not Resized
33 | # model = CNN model
34 |
35 | original_size = im.shape
36 | im = resize_image(im)
37 | im = process_image(im)
38 | activs, caches = get_activs(model, im)
39 |
40 | if (class_no==0):
41 | class_no = np.argmax(activs[15][0])
42 |
43 | back_grad = get_backgrad(activs, model, class_no, layer, caches)
44 |
45 | #Get the Filters of Interest in Sorted Order
46 | amax = filter_of_intr(activs,back_grad,kmax,layer)
47 | filter_scores = get_filter_scores(amax, model, im, activs,caches, layer, class_no, percentile_thresh=40,use_blob=True)
48 |
49 | #Union Blobs
50 | sorted_scores = sorted(filter_scores,key=lambda x:-x[1])
51 | mask = np.zeros(im.shape)
52 | for k in range(n_neurons):
53 | i,n_score,blob,xmin,xmax,ymin,ymax=sorted_scores[k]
54 | mask = (mask+blob)>0
55 |
56 | localization_cache = class_no,original_size
57 | return mask, localization_cache
58 |
59 | def get_box_from_mask(mask, original_size=0):
60 | #To get the co-ordinates of Rectangle around the box
61 | #Inputs
62 | # Mask - N X D X W X H
63 | # Original Size is tuple - N X D_orig X W_orig X H_orig
64 |
65 | mask = mask[0]
66 | flat_mask = np.sum(mask, axis = 0)
67 |
68 | col_max = np.max(flat_mask, axis = 0)
69 | row_max = np.max(flat_mask, axis = 1)
70 |
71 | col_idxs = np.where(col_max>0)
72 | xmin = col_idxs[0][0]
73 | xmax = col_idxs[0][-1]
74 |
75 | row_idxs = np.where(row_max>0)
76 | ymin = row_idxs[0][0]
77 | ymax = row_idxs[0][-1]
78 |
79 | if (original_size!=0):
80 | W_orig, H_orig,_ = original_size
81 | xmin = int(xmin*H_orig/224)
82 | xmax = int(xmax*H_orig/224)
83 | ymin = int(ymin*W_orig/224)
84 | ymax = int(ymax*W_orig/224)
85 |
86 | bbox = (xmin, xmax, ymin, ymax)
87 | return bbox
88 |
89 | def visualize(im, bbox_cords):
90 | xmin, xmax, ymin, ymax = bbox_cords
91 | im = im.astype(np.uint8)
92 | new_im = im.copy()
93 | mask = np.zeros(im.shape)
94 | mask[ymin:ymax,xmin:xmax] = 1
95 | new_im = new_im*mask
96 | new_im = new_im.astype(np.uint8)
97 | new_im = cv2.cvtColor(new_im, cv2.cv.CV_BGR2RGB)
98 | plt.imshow(new_im)
99 |
100 | def calculate_area(c):
101 | xmin,xmax,ymin,ymax = c
102 | return ((xmax-xmin)*(ymax-ymin))
103 |
104 | def calculate_overlap(xL1,xH1, xL2, xH2):
105 | if(xH1>=xL2):
106 | if(xH2>=xH1):
107 | return (xH1-xL2)
108 | else:
109 | #Box 2 axis lies inside 1
110 | return (xH2-xL2)
111 | else:
112 | return 0
113 |
114 | def eval_precision(c1,c2):
115 | xmin1, xmax1, ymin1, ymax1 = c1
116 | xmin2, xmax2, ymin2, ymax2 = c2
117 |
118 | #Order by X
119 | if (xmin2>=xmin1):
120 | x_overlap = calculate_overlap(xmin1,xmax1,xmin2,xmax2)
121 | else:
122 | x_overlap = calculate_overlap(xmin2,xmax2,xmin1,xmax1)
123 |
124 | if (ymin2>=ymin1):
125 | y_overlap = calculate_overlap(ymin1,ymax1,ymin2,ymax2)
126 | else:
127 | y_overlap = calculate_overlap(ymin2,ymax2,ymin1,ymax1)
128 |
129 | intersection = x_overlap*y_overlap
130 | union = calculate_area(c1) + calculate_area(c2) - intersection
131 |
132 | return float(intersection)/float(union)
133 |
134 |
135 | def main():
136 | #Sample Code for an image
137 | #Load the Image
138 | imgf = 'Images/dog.jpg'
139 | im = cv2.imread(imgf)
140 |
141 | #Load the VGG Model and ImageNet Class Mappings
142 | model = PretrainedVGG(h5_file = 'Data/vgg16_weights.h5')
143 | CLASSES = pickle.load(open('Data/CLASSES.pkl'))
144 |
145 | #Layer from which the image will be segment
146 | layer = 11
147 | #No of neurons to evaluate from ranked list
148 | kmax = 10
149 | #No of neurons to use
150 | n_neurons = 5
151 |
152 | #Get Bounding Box
153 | bbox_coords = bbox(im,model,layer,n_neurons,kmax)
154 | #Visualize the Box on the original image
155 | visualize(im, bbox_cords)
156 | return bbox_cords
--------------------------------------------------------------------------------
/library/setup.py:
--------------------------------------------------------------------------------
1 | from distutils.core import setup
2 | from distutils.extension import Extension
3 | from Cython.Build import cythonize
4 | import numpy
5 |
6 | extensions = [
7 | Extension('im2col_cython', ['im2col_cython.pyx'],
8 | include_dirs = [numpy.get_include()]
9 | ),
10 | ]
11 |
12 | setup(
13 | ext_modules = cythonize(extensions),
14 | )
15 |
--------------------------------------------------------------------------------
/validation_script.py:
--------------------------------------------------------------------------------
1 | # Script to Run the validation for Localization Algorithm
2 | # Takes Input a Data file(.pkl) which containts the dataset
3 | # Data set input needed is a pkl file with row format:
4 | # class_wnid,imgid,class_idx,xmlidx,url,xmin,xmax,ymin,ymax = ret
5 | # It uses a VGG16 model and writes the results in the same folder as input with _results.csv as suffix to input file name
6 | # hyper Parameters used by algorithm are currently hardcoded in the script
7 |
8 | import types
9 | from library.localization import *
10 | from deconv_utils import *
11 | from library.image_utils import *
12 | import csv
13 | import sys
14 | import datetime
15 | import time
16 | import os
17 |
18 | #############################################################
19 | #Image Processing Inputs
20 | Param.num_dilation = 15
21 | #############################################################
22 |
23 | def get_time_stamp():
24 | return datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
25 |
26 | def print_and_log(text, filename):
27 | print (get_time_stamp()+text)
28 | filename.write(get_time_stamp()+ " "+text+"\n")
29 |
30 | #HyperParams
31 | kmax_input = 30
32 | n_neurons_input = 5
33 |
34 | #Validation & Model File Inputs
35 | data_file = sys.argv[1]
36 | path = os.path.dirname(data_file)
37 | name = os.path.basename(data_file)
38 | data_extn = ".pkl"
39 | results_path = path + "/results_k"+str(kmax_input)+"_n"+str(n_neurons_input)+"/"
40 | results_file = results_path+ name + "_results.csv"
41 | log_file = results_path+ name + "_log.txt"
42 | model_file = 'Data/vgg16_weights.h5'
43 | class_file = 'Data/CLASSES.pkl'
44 |
45 | start_index = 0
46 | result_csv = None
47 | try:
48 | #Check if csv file exists and read the last index outputted
49 | results_csv = open(results_file,'r')
50 | lastline = results_csv.read()
51 | if lastline!="":
52 | lastline = lastline.split('\n')[-2]
53 | start_index = int(lastline.split(',')[0]) + 1
54 | except IOError as e:
55 | pass
56 | finally:
57 | if type(result_csv)!=types.NoneType:
58 | close(results_csv)
59 |
60 | try:
61 | #Write to CSV file
62 | results_csv = open(results_file,'a')
63 | log_txt = open(log_file,'a')
64 | msg = ("#################################################################################################\n")
65 | print_and_log(msg, log_txt)
66 |
67 | #Load the VGG Model and ImageNet Class Mappings
68 | from library.classifiers.pretrained_vgg16 import PretrainedVGG
69 | model = PretrainedVGG(h5_file = model_file)
70 | print_and_log("Loaded Model", log_txt)
71 |
72 | CLASSES = pickle.load(open(class_file))
73 | print_and_log("Loaded Class File",log_txt)
74 |
75 | #Read Input File for the candidates
76 | candidates = pickle.load(open(data_file+data_extn))
77 | print_and_log("Loaded Input Data Set",log_txt)
78 |
79 | #Read Input File and start at the index higher than that of last line
80 |
81 | for index in range(start_index,len(candidates)):
82 | log_txt.flush()
83 | ret = candidates[index]
84 | class_wnid,imgid,class_idx,xmlidx,url,xmin,xmax,ymin,ymax = ret
85 | im = image_from_url(url)
86 | if type(im) == types.NoneType:
87 | #Debug Print
88 | msg = "[%d]: Skip (%d,%d): %s : URL is bad."%(index, class_idx,xmlidx, url)
89 | print_and_log(msg,log_txt)
90 | elif np.mean(im)>=253:
91 | #Debug Print
92 | msg = "[%d]: Skip (%d,%d): %s : URL is Empty(White) Image"%(index, class_idx,xmlidx,url)
93 | print_and_log(msg,log_txt)
94 | else:
95 | bbox_coords = bbox(im,model,class_no=class_idx,n_neurons = n_neurons_input,kmax=kmax_input)
96 | xmin_out, xmax_out, ymin_out, ymax_out = bbox_coords
97 | precision = eval_precision(bbox_coords, (xmin,xmax,ymin,ymax))
98 | #Write Results to file
99 | result_row = "%d, %s, %d, %d, %f,"%(index, imgid, class_idx, xmlidx, precision)
100 | result_row += "%d, %d, %d, %d,"%(xmin, xmax, ymin, ymax,)
101 | result_row += "%d, %d, %d, %d"%(xmin_out, xmax_out, ymin_out, ymax_out)
102 | result_row += "\n"
103 | results_csv.write(result_row)
104 | results_csv.flush()
105 | #Debug Print
106 | msg = "[%d]: %d,%d,%f"%(index, class_idx,xmlidx,precision)
107 | print_and_log(msg,log_txt)
108 | except IOError as e:
109 | print 'IO Error: ', e.code
110 | finally:
111 | close(results_csv)
112 | close(log_txt)
--------------------------------------------------------------------------------