├── classes.t7 ├── word2vec ├── vectors.h5 ├── vectors.h5.t7 ├── pkl2h5.py └── rows.json ├── scripts ├── plots │ ├── ALL.png │ ├── sem.png │ ├── class.png │ ├── humans.png │ ├── pixels.png │ ├── softsem.png │ └── words.png ├── create_test_data.py ├── preprocess_val.py ├── preprocess_val.py~ ├── preprocess_train_data.py~ ├── #preprocess_train_data.py# ├── preprocess_train_data.py └── compute_RDM.py ├── README.txt ├── imagenet_labels ├── get_label_vectors.py ├── get_label_vectors.py~ ├── words_to_labels.py ├── labels.txt └── ILSVRC2012_mapping.txt ├── main.lua ├── models ├── overfeat.lua ├── ninbn.lua ├── alexnetowt.lua ├── vgg.lua ├── vggbn.lua ├── alexnetowtbn.lua ├── alexnet.lua └── googlenet.lua ├── model.lua ├── eval.lua ├── data.lua ├── util.lua ├── #opts.lua# ├── opts.lua ├── LinearNB.lua ├── word2vec.lua ├── test.lua ├── apply_alexnet.lua ├── README.md ├── donkey.lua ├── train.lua └── dataset.lua /classes.t7: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fh295/semanticCNN/HEAD/classes.t7 -------------------------------------------------------------------------------- /word2vec/vectors.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fh295/semanticCNN/HEAD/word2vec/vectors.h5 -------------------------------------------------------------------------------- /scripts/plots/ALL.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fh295/semanticCNN/HEAD/scripts/plots/ALL.png -------------------------------------------------------------------------------- /scripts/plots/sem.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fh295/semanticCNN/HEAD/scripts/plots/sem.png -------------------------------------------------------------------------------- /word2vec/vectors.h5.t7: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fh295/semanticCNN/HEAD/word2vec/vectors.h5.t7 -------------------------------------------------------------------------------- /scripts/plots/class.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fh295/semanticCNN/HEAD/scripts/plots/class.png -------------------------------------------------------------------------------- /scripts/plots/humans.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fh295/semanticCNN/HEAD/scripts/plots/humans.png -------------------------------------------------------------------------------- /scripts/plots/pixels.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fh295/semanticCNN/HEAD/scripts/plots/pixels.png -------------------------------------------------------------------------------- /scripts/plots/softsem.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fh295/semanticCNN/HEAD/scripts/plots/softsem.png -------------------------------------------------------------------------------- /scripts/plots/words.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fh295/semanticCNN/HEAD/scripts/plots/words.png -------------------------------------------------------------------------------- /README.txt: -------------------------------------------------------------------------------- 1 | get top1 predictions: 2 | 3 | softsem 4 | CUDA_VISIBLE_DEVICES=3 th eval.lua -modelPath imagenet/checkpoint/alexnet\,batchSize=32\,crit=class/MonApr1118\:43\:422016/model_29.t7 -crit softsem -data ../DATA/ > res.prediction.softsem 5 | 6 | sem 7 | CUDA_VISIBLE_DEVICES=3 th eval.lua -modelPath imagenet/checkpoint/MODELS_SEM/model_55.t7 -data ../DATA/ crit sem > res.prediction.sem 8 | 9 | 10 | class 11 | CUDA_VISIBLE_DEVICES=3 th eval.lua -modelPath imagenet/checkpoint/alexnet\,batchSize=32\,crit=class/MonApr1118\:43\:422016/model_50.t7 -data ../DATA/ crit class > res.prediction.class 12 | -------------------------------------------------------------------------------- /imagenet_labels/get_label_vectors.py: -------------------------------------------------------------------------------- 1 | import cPickle 2 | import pdb 3 | 4 | with open('mapping.txt') as inp: 5 | D = inp.read().splitlines() 6 | D_split = [x.split('\t') for x in D] 7 | words = [x[1].strip() for x in D_split] 8 | 9 | with open('/home/fh295/Documents/Deep_learning_Bengio/arctic/defgen/D_cbow_pdw_8B.pkl') as inp: 10 | D = cPickle.load(inp) 11 | 12 | 13 | W = {} 14 | for w in words: 15 | try: 16 | W[w] = D[w] 17 | except: 18 | print 'we do not have this word: %s' % (w) 19 | pass 20 | 21 | with open('semantic_vector_dict.pkl','w') as out: 22 | cPickle.dump(W,out) 23 | 24 | -------------------------------------------------------------------------------- /imagenet_labels/get_label_vectors.py~: -------------------------------------------------------------------------------- 1 | import cPickle 2 | import pdb 3 | 4 | with open('mapping.txt') as inp: 5 | D = inp.read().splitlines() 6 | D_split = [x.split('\t') for x in D] 7 | words = [x[1].strip() for x in D_split] 8 | 9 | with open('/home/fh295/Documents/Deep_learning_Bengio/arctic/defgen/D_cbow_pdw_8B.pkl') as inp: 10 | D = cPickle.load(inp) 11 | 12 | 13 | W = {} 14 | for w in words: 15 | pdb.set_trace() 16 | try: 17 | W[w] = D[w] 18 | except: 19 | print 'we do not have this word: %s' % (w) 20 | pass 21 | 22 | with open('semantic_vector_dict.pkl','w') as out: 23 | cPickle.dump(W,out) 24 | 25 | -------------------------------------------------------------------------------- /word2vec/pkl2h5.py: -------------------------------------------------------------------------------- 1 | #converts a pkl file to an h5 file 2 | 3 | import numpy as np 4 | import json 5 | import h5py 6 | import pickle 7 | 8 | 9 | #load vectors 10 | vec_file = "semantic_vector_dict.pkl" 11 | with open(vec_file,"r") as f: 12 | vecs = pickle.load(f) 13 | 14 | #write keys 15 | rows = vecs.keys() 16 | keys_file = "rows.json" 17 | with open(keys_file,"w") as f: 18 | json.dump(rows, f) 19 | 20 | a = np.zeros([len(rows),vecs[rows[0]].shape[0]]) 21 | for i,k in enumerate(rows): 22 | a[i] = vecs[k] 23 | 24 | #write vectors 25 | vec_file = "vectors.h5" 26 | f = h5py.File(vec_file, "w") 27 | dset = f.create_dataset("vectors", a.shape, dtype='f', data=a) 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /scripts/create_test_data.py: -------------------------------------------------------------------------------- 1 | # creates a small test set by subsampling from training data 2 | 3 | from os import listdir, mkdir 4 | from os.path import join, exists 5 | from random import shuffle 6 | import shutil 7 | 8 | train_dir = '/local/filespace-2/fh295/DATA/train/' 9 | val_dir = '/local/filespace-2/fh295/DATA/val/' 10 | test_dir = '/local/filespace-2/fh295/DATA/test-small/' 11 | 12 | TST_SIZE = 100 13 | 14 | for cls in listdir(val_dir): 15 | cls_path = join(train_dir, cls) 16 | all_files = listdir(cls_path) 17 | shuffle(all_files) 18 | print(cls) 19 | 20 | new_dir = join(test_dir, cls) 21 | if not exists(new_dir): 22 | mkdir(new_dir) 23 | 24 | 25 | for f in all_files[:TST_SIZE]: 26 | file_path = join(cls_path, f) 27 | shutil.copy(file_path, new_dir) 28 | 29 | 30 | -------------------------------------------------------------------------------- /imagenet_labels/words_to_labels.py: -------------------------------------------------------------------------------- 1 | import pdb 2 | frequency = {} 3 | with open('sorted.uk.word.unigrams','r') as f: 4 | for line in f.readlines(): 5 | els = line.strip().split("\t") 6 | if int(els[0])<10: 7 | break 8 | frequency[els[1]] = els[0] 9 | 10 | 11 | labels = {} 12 | with open('labels.txt') as f: 13 | for line in f.readlines(): 14 | els = line.strip().split("\t") 15 | synset = els[0] 16 | descr = els[1] 17 | candidates = [] 18 | skip = False 19 | for el in descr.strip().split(','): 20 | el = el.strip().rstrip().lower() 21 | if not ' ' in el: 22 | if el in frequency: 23 | candidates.append((el,int(frequency[el]))) 24 | else: 25 | last = el.strip().split(' ')[-1] 26 | if last in frequency: 27 | candidates.append((last, frequency[last])) 28 | if len(candidates)>0: 29 | s = sorted(candidates, key=lambda t: t[1], reverse=True) 30 | labels[synset] = (s[0][0], descr) 31 | for s in labels: 32 | print s,"\t",labels[s][0],"\t",labels[s][1] 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /main.lua: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2014, Facebook, Inc. 3 | -- All rights reserved. 4 | -- 5 | -- This source code is licensed under the BSD-style license found in the 6 | -- LICENSE file in the root directory of this source tree. An additional grant 7 | -- of patent rights can be found in the PATENTS file in the same directory. 8 | -- 9 | require 'torch' 10 | require 'cutorch' 11 | require 'paths' 12 | require 'xlua' 13 | require 'optim' 14 | require 'nn' 15 | 16 | torch.setdefaulttensortype('torch.FloatTensor') 17 | 18 | local opts = paths.dofile('opts.lua') 19 | 20 | opt = opts.parse(arg) 21 | 22 | nClasses = opt.nClasses 23 | --THIS IS UGLY!!! 24 | if opt.crit == 'sem' or opt.crit == 'mse' then 25 | topLayer = opt.wvectors_dim 26 | else 27 | topLayer = nClasses 28 | end 29 | print(topLayer) 30 | paths.dofile('util.lua') 31 | paths.dofile('model.lua') 32 | opt.imageSize = model.imageSize or opt.imageSize 33 | opt.imageCrop = model.imageCrop or opt.imageCrop 34 | 35 | print(opt) 36 | 37 | cutorch.setDevice(opt.GPU) -- by default, use GPU 1 38 | torch.manualSeed(opt.manualSeed) 39 | 40 | print('Saving everything to: ' .. opt.save) 41 | os.execute('mkdir -p ' .. opt.save) 42 | 43 | paths.dofile('data.lua') 44 | paths.dofile('train.lua') 45 | paths.dofile('test.lua') 46 | 47 | epoch = opt.epochNumber 48 | 49 | for i=1,opt.nEpochs do 50 | train() 51 | -- Need to fix tex 52 | -- test() 53 | epoch = epoch + 1 54 | end 55 | -------------------------------------------------------------------------------- /models/overfeat.lua: -------------------------------------------------------------------------------- 1 | function createModel(nGPU) 2 | local features = nn.Sequential() 3 | 4 | features:add(nn.SpatialConvolution(3, 96, 11, 11, 4, 4)) 5 | features:add(nn.ReLU(true)) 6 | features:add(nn.SpatialMaxPooling(2, 2, 2, 2)) 7 | 8 | features:add(nn.SpatialConvolution(96, 256, 5, 5, 1, 1)) 9 | features:add(nn.ReLU(true)) 10 | features:add(nn.SpatialMaxPooling(2, 2, 2, 2)) 11 | 12 | features:add(nn.SpatialConvolution(256, 512, 3, 3, 1, 1, 1, 1)) 13 | features:add(nn.ReLU(true)) 14 | 15 | features:add(nn.SpatialConvolution(512, 1024, 3, 3, 1, 1, 1, 1)) 16 | features:add(nn.ReLU(true)) 17 | 18 | features:add(nn.SpatialConvolution(1024, 1024, 3, 3, 1, 1, 1, 1)) 19 | features:add(nn.ReLU(true)) 20 | features:add(nn.SpatialMaxPooling(2, 2, 2, 2)) 21 | 22 | features:cuda() 23 | features = makeDataParallel(features, nGPU) -- defined in util.lua 24 | 25 | -- 1.3. Create Classifier (fully connected layers) 26 | local classifier = nn.Sequential() 27 | classifier:add(nn.View(1024*5*5)) 28 | classifier:add(nn.Dropout(0.5)) 29 | classifier:add(nn.Linear(1024*5*5, 3072)) 30 | classifier:add(nn.Threshold(0, 1e-6)) 31 | 32 | classifier:add(nn.Dropout(0.5)) 33 | classifier:add(nn.Linear(3072, 4096)) 34 | classifier:add(nn.Threshold(0, 1e-6)) 35 | 36 | classifier:add(nn.Linear(4096, nClasses)) 37 | classifier:add(nn.LogSoftMax()) 38 | 39 | classifier:cuda() 40 | 41 | -- 1.4. Combine 1.2 and 1.3 to produce final model 42 | local model = nn.Sequential():add(features):add(classifier) 43 | model.imageSize = 256 44 | model.imageCrop = 224 45 | return model 46 | end 47 | -------------------------------------------------------------------------------- /models/ninbn.lua: -------------------------------------------------------------------------------- 1 | -- Achieves 62.6% top1 on validation set at 35 epochs with this regime: 2 | -- { 1, 9, 1e-1, 5e-4, }, 3 | -- { 10, 19, 1e-2, 5e-4 }, 4 | -- { 20, 25, 1e-3, 0 }, 5 | -- { 26, 30, 1e-4, 0 }, 6 | -- Trained model: 7 | -- https://gist.github.com/szagoruyko/0f5b4c5e2d2b18472854 8 | 9 | function createModel(nGPU) 10 | local nin = nn.Sequential() 11 | local function block(...) 12 | local arg = {...} 13 | local no = arg[2] 14 | nin:add(nn.SpatialConvolution(...)) 15 | nin:add(nn.SpatialBatchNormalization(no,1e-3)) 16 | nin:add(nn.ReLU(true)) 17 | nin:add(nn.SpatialConvolution(no, no, 1, 1, 1, 1, 0, 0)) 18 | nin:add(nn.SpatialBatchNormalization(no,1e-3)) 19 | nin:add(nn.ReLU(true)) 20 | nin:add(nn.SpatialConvolution(no, no, 1, 1, 1, 1, 0, 0)) 21 | nin:add(nn.SpatialBatchNormalization(no,1e-3)) 22 | nin:add(nn.ReLU(true)) 23 | end 24 | 25 | local function mp(...) 26 | nin:add(nn.SpatialMaxPooling(...)) 27 | end 28 | 29 | block(3, 96, 11, 11, 4, 4, 5, 5) 30 | mp(3, 3, 2, 2, 1, 1) 31 | block(96, 256, 5, 5, 1, 1, 2, 2) 32 | mp(3, 3, 2, 2, 1, 1) 33 | block(256, 384, 3, 3, 1, 1, 1, 1) 34 | mp(3, 3, 2, 2, 1, 1) 35 | block(384, 1024, 3, 3, 1, 1, 1, 1) 36 | 37 | nin:add(nn.SpatialAveragePooling(7, 7, 1, 1)) 38 | nin:add(nn.View(-1):setNumInputDims(3)) 39 | 40 | local model = nn.Sequential() 41 | :add(makeDataParallel(nin, nGPU)) 42 | :add(nn.Linear(1024,1000)) 43 | :add(nn.LogSoftMax()) 44 | 45 | model.imageSize = 256 46 | model.imageCrop = 224 47 | 48 | return model:cuda() 49 | end 50 | -------------------------------------------------------------------------------- /models/alexnetowt.lua: -------------------------------------------------------------------------------- 1 | function createModel(nGPU) 2 | -- from https://code.google.com/p/cuda-convnet2/source/browse/layers/layers-imagenet-1gpu.cfg 3 | -- this is AlexNet that was presented in the One Weird Trick paper. http://arxiv.org/abs/1404.5997 4 | local features = nn.Sequential() 5 | features:add(nn.SpatialConvolution(3,64,11,11,4,4,2,2)) -- 224 -> 55 6 | features:add(nn.ReLU(true)) 7 | features:add(nn.SpatialMaxPooling(3,3,2,2)) -- 55 -> 27 8 | features:add(nn.SpatialConvolution(64,192,5,5,1,1,2,2)) -- 27 -> 27 9 | features:add(nn.ReLU(true)) 10 | features:add(nn.SpatialMaxPooling(3,3,2,2)) -- 27 -> 13 11 | features:add(nn.SpatialConvolution(192,384,3,3,1,1,1,1)) -- 13 -> 13 12 | features:add(nn.ReLU(true)) 13 | features:add(nn.SpatialConvolution(384,256,3,3,1,1,1,1)) -- 13 -> 13 14 | features:add(nn.ReLU(true)) 15 | features:add(nn.SpatialConvolution(256,256,3,3,1,1,1,1)) -- 13 -> 13 16 | features:add(nn.ReLU(true)) 17 | features:add(nn.SpatialMaxPooling(3,3,2,2)) -- 13 -> 6 18 | 19 | features:cuda() 20 | features = makeDataParallel(features, nGPU) -- defined in util.lua 21 | 22 | local classifier = nn.Sequential() 23 | classifier:add(nn.View(256*6*6)) 24 | 25 | classifier:add(nn.Dropout(0.5)) 26 | classifier:add(nn.Linear(256*6*6, 4096)) 27 | classifier:add(nn.ReLU()) 28 | 29 | classifier:add(nn.Dropout(0.5)) 30 | classifier:add(nn.Linear(4096, 4096)) 31 | classifier:add(nn.ReLU()) 32 | 33 | classifier:add(nn.Linear(4096, nClasses)) 34 | classifier:add(nn.LogSoftMax()) 35 | 36 | classifier:cuda() 37 | 38 | local model = nn.Sequential():add(features):add(classifier) 39 | model.imageSize = 256 40 | model.imageCrop = 224 41 | 42 | return model 43 | end 44 | -------------------------------------------------------------------------------- /model.lua: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2014, Facebook, Inc. 3 | -- All rights reserved. 4 | -- 5 | -- This source code is licensed under the BSD-style license found in the 6 | -- LICENSE file in the root directory of this source tree. An additional grant 7 | -- of patent rights can be found in the PATENTS file in the same directory. 8 | -- 9 | require 'nn' 10 | require 'cunn' 11 | require 'optim' 12 | 13 | --[[ 14 | 1. Create Model 15 | 2. Create Criterion 16 | 3. Convert model to CUDA 17 | ]]-- 18 | 19 | -- 1. Create Network 20 | -- 1.1 If preloading option is set, preload weights from existing models appropriately 21 | if opt.retrain ~= 'none' then 22 | assert(paths.filep(opt.retrain), 'File not found: ' .. opt.retrain) 23 | print('Loading model from file: ' .. opt.retrain); 24 | model = loadDataParallel(opt.retrain, opt.nGPU) -- defined in util.lua 25 | else 26 | paths.dofile('models/' .. opt.netType .. '.lua') 27 | print('=> Creating model from file: models/' .. opt.netType .. '.lua') 28 | model = createModel(opt.nGPU) -- for the model creation code, check the models/ folder 29 | if opt.backend == 'cudnn' then 30 | require 'cudnn' 31 | cudnn.convert(model, cudnn) 32 | elseif opt.backend ~= 'nn' then 33 | error'Unsupported backend' 34 | end 35 | end 36 | 37 | -- 2. Create Criterion based on word embeddings 38 | if opt.crit == 'sem' then 39 | criterion = nn.CosineEmbeddingCriterion(opt.margin) 40 | elseif opt.crit == 'class' or opt.crit == 'softsem' then 41 | criterion = nn.ClassNLLCriterion() 42 | else 43 | criterion = nn.MSECriterion() 44 | end 45 | 46 | print('=> Model') 47 | print(model) 48 | 49 | print('=> Criterion') 50 | print(criterion) 51 | 52 | -- 3. Convert model to CUDA 53 | print('==> Converting model to CUDA') 54 | -- model is converted to CUDA in the init script itself 55 | -- model = model:cuda() 56 | criterion:cuda() 57 | 58 | collectgarbage() 59 | -------------------------------------------------------------------------------- /scripts/preprocess_val.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | import shutil 4 | import json 5 | import pickle 6 | SIZE = 50 7 | 8 | concepts = [] 9 | #with open('../word2vec/rows.json') as f: 10 | # concepts = json.load(f) 11 | 12 | with open('../word2vec/semantic_vector_dict.pkl') as f: 13 | concepts = pickle.load(f).keys() 14 | 15 | 16 | mappings = {} 17 | with open('../imagenet_labels/mapping.txt') as f: 18 | for line in f.readlines(): 19 | line = line.strip() 20 | synset = line.split()[0] 21 | label = line.split()[1] 22 | if label in concepts: 23 | mappings[synset] = label 24 | 25 | ids2synsets = {} 26 | with open('../imagenet_labels/ILSVRC2012_mapping.txt') as f: 27 | for line in f.readlines(): 28 | line = line.strip() 29 | ID = line.split()[0] 30 | synset = line.split()[1] 31 | ids2synsets[ID] = synset 32 | 33 | 34 | byClass = {} 35 | images2labels = {} 36 | imagedir_in = "../../DATA/val" 37 | base = "ILSVRC2012_val_" 38 | i = 1 39 | with open('../imagenet_labels/ILSVRC2012_validation_ground_truth.txt') as f: 40 | for line in f.readlines(): 41 | ID = line.strip() 42 | instance = str(i).zfill(8) 43 | fileName = base+instance+".JPEG" 44 | synset = ids2synsets[ID] 45 | if synset in mappings: 46 | l = mappings[ids2synsets[ID]] 47 | if l not in byClass: 48 | byClass[l] = [] 49 | byClass[l].append(fileName) 50 | i = i+1 51 | 52 | print "read ",i," images" 53 | print len(byClass) 54 | #imagedir_out = "/home/angeliki/git/imagenet-multiGPU.torch/DATA/semantic_tmp/val" 55 | imagedir_out = "../../DATA/val" 56 | towrite = {} 57 | for label in byClass: 58 | random.shuffle(byClass[label]) 59 | towrite = byClass[label][:SIZE] 60 | newdir = os.path.join(imagedir_out,label) 61 | os.mkdir(newdir) 62 | for f in towrite: 63 | print os.path.join(imagedir_in,f), os.path.join(newdir,f) 64 | shutil.copyfile(os.path.join(imagedir_in,f),os.path.join(newdir,f)) 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | -------------------------------------------------------------------------------- /scripts/preprocess_val.py~: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | import shutil 4 | import json 5 | import pickle 6 | SIZE = 50 7 | 8 | concepts = [] 9 | #with open('../word2vec/rows.json') as f: 10 | # concepts = json.load(f) 11 | 12 | with open('../word2vec/semantic_vector_dict.pkl') as f: 13 | concepts = pickle.load(f).keys() 14 | 15 | 16 | mappings = {} 17 | with open('../imagenet_labels/mapping.txt') as f: 18 | for line in f.readlines(): 19 | line = line.strip() 20 | synset = line.split()[0] 21 | label = line.split()[1] 22 | if label in concepts: 23 | mappings[synset] = label 24 | 25 | ids2synsets = {} 26 | with open('../imagenet_labels/ILSVRC2012_mapping.txt') as f: 27 | for line in f.readlines(): 28 | line = line.strip() 29 | ID = line.split()[0] 30 | synset = line.split()[1] 31 | ids2synsets[ID] = synset 32 | 33 | 34 | byClass = {} 35 | images2labels = {} 36 | imagedir_in = "../../DATA/val2" 37 | base = "ILSVRC2012_val_" 38 | i = 1 39 | with open('../imagenet_labels/ILSVRC2012_validation_ground_truth.txt') as f: 40 | for line in f.readlines(): 41 | ID = line.strip() 42 | instance = str(i).zfill(8) 43 | fileName = base+instance+".JPEG" 44 | synset = ids2synsets[ID] 45 | if synset in mappings: 46 | l = mappings[ids2synsets[ID]] 47 | if l not in byClass: 48 | byClass[l] = [] 49 | byClass[l].append(fileName) 50 | i = i+1 51 | 52 | print "read ",i," images" 53 | print len(byClass) 54 | #imagedir_out = "/home/angeliki/git/imagenet-multiGPU.torch/DATA/semantic_tmp/val" 55 | imagedir_out = "../../DATA/val2" 56 | towrite = {} 57 | for label in byClass: 58 | random.shuffle(byClass[label]) 59 | towrite = byClass[label][:SIZE] 60 | newdir = os.path.join(imagedir_out,label) 61 | os.mkdir(newdir) 62 | for f in towrite: 63 | print os.path.join(imagedir_in,f), os.path.join(newdir,f) 64 | shutil.copyfile(os.path.join(imagedir_in,f),os.path.join(newdir,f)) 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | -------------------------------------------------------------------------------- /models/vgg.lua: -------------------------------------------------------------------------------- 1 | function createModel(nGPU) 2 | local modelType = 'A' -- on a titan black, B/D/E run out of memory even for batch-size 32 3 | 4 | -- Create tables describing VGG configurations A, B, D, E 5 | local cfg = {} 6 | if modelType == 'A' then 7 | cfg = {64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'} 8 | elseif modelType == 'B' then 9 | cfg = {64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'} 10 | elseif modelType == 'D' then 11 | cfg = {64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'} 12 | elseif modelType == 'E' then 13 | cfg = {64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'} 14 | else 15 | error('Unknown model type: ' .. modelType .. ' | Please specify a modelType A or B or D or E') 16 | end 17 | 18 | local features = nn.Sequential() 19 | do 20 | local iChannels = 3; 21 | for k,v in ipairs(cfg) do 22 | if v == 'M' then 23 | features:add(nn.SpatialMaxPooling(2,2,2,2)) 24 | else 25 | local oChannels = v; 26 | local conv3 = nn.SpatialConvolution(iChannels,oChannels,3,3,1,1,1,1); 27 | features:add(conv3) 28 | features:add(nn.ReLU(true)) 29 | iChannels = oChannels; 30 | end 31 | end 32 | end 33 | 34 | features:cuda() 35 | features = makeDataParallel(features, nGPU) -- defined in util.lua 36 | 37 | local classifier = nn.Sequential() 38 | classifier:add(nn.View(512*7*7)) 39 | classifier:add(nn.Linear(512*7*7, 4096)) 40 | classifier:add(nn.Threshold(0, 1e-6)) 41 | classifier:add(nn.Dropout(0.5)) 42 | classifier:add(nn.Linear(4096, 4096)) 43 | classifier:add(nn.Threshold(0, 1e-6)) 44 | classifier:add(nn.Dropout(0.5)) 45 | classifier:add(nn.Linear(4096, nClasses)) 46 | classifier:add(nn.LogSoftMax()) 47 | classifier:cuda() 48 | 49 | local model = nn.Sequential() 50 | model:add(features):add(classifier) 51 | model.imageSize = 256 52 | model.imageCrop = 224 53 | 54 | return model 55 | end 56 | -------------------------------------------------------------------------------- /eval.lua: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2014, Facebook, Inc. 3 | -- All rights reserved. 4 | -- 5 | -- This source code is licensed under the BSD-style license found in the 6 | -- LICENSE file in the root directory of this source tree. An additional grant 7 | -- of patent rights can be found in the PATENTS file in the same directory. 8 | -- 9 | require 'torch' 10 | require 'cutorch' 11 | require 'paths' 12 | require 'xlua' 13 | require 'optim' 14 | require 'nn' 15 | require 'LinearNB' 16 | 17 | torch.setdefaulttensortype('torch.FloatTensor') 18 | 19 | local opts = paths.dofile('opts.lua') 20 | 21 | opt = opts.parse(arg) 22 | print(opt) 23 | model = torch.load(opt.modelPath) 24 | print(model) 25 | 26 | require 'image' 27 | 28 | paths.dofile('util.lua') 29 | paths.dofile('donkey.lua') 30 | 31 | opt.imageSize = model.imageSize or opt.imageSize 32 | opt.imageCrop = model.imageCrop or opt.imageCrop 33 | 34 | cutorch.setDevice(opt.GPU) -- by default, use GPU 1 35 | torch.manualSeed(opt.manualSeed) 36 | 37 | local ffi = require 'ffi' 38 | local Threads = require 'threads' 39 | Threads.serialization('threads.sharedserialize') 40 | 41 | do -- start K datathreads (donkeys) 42 | if opt.nDonkeys > 0 then 43 | local options = opt -- make an upvalue to serialize over to donkey threads 44 | donkeys = Threads( 45 | opt.nDonkeys, 46 | function() 47 | require 'torch' 48 | end, 49 | function(idx) 50 | opt = options -- pass to all donkeys via upvalue 51 | tid = idx 52 | local seed = opt.manualSeed + idx 53 | torch.manualSeed(seed) 54 | print(string.format('Starting donkey with id: %d seed: %d', tid, seed)) 55 | paths.dofile('donkey.lua') 56 | end 57 | ); 58 | else -- single threaded data loading. useful for debugging 59 | paths.dofile('donkey.lua') 60 | donkeys = {} 61 | function donkeys:addjob(f1, f2) f2(f1()) end 62 | function donkeys:synchronize() end 63 | end 64 | end 65 | 66 | 67 | 68 | paths.dofile('test.lua') 69 | 70 | epoch = opt.epochNumber 71 | 72 | nTest = testLoader:size() 73 | test() 74 | -------------------------------------------------------------------------------- /scripts/preprocess_train_data.py~: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | import shutil 4 | import json 5 | import tarfile 6 | from collections import defaultdict 7 | import pdb 8 | 9 | concepts = [] 10 | with open('../word2vec/rows.json') as f: 11 | concepts = json.load(f) 12 | 13 | mappings = {} 14 | with open('../imagenet_labels/mapping.txt') as f: 15 | for line in f.readlines(): 16 | line = line.strip() 17 | synset = line.split()[0] 18 | label = line.split()[1] 19 | if label in concepts: 20 | mappings[synset] = label 21 | 22 | imagedir_in = '/local/filespace-2/fh295/DATA/train' 23 | tar_filenames = [code for code in os.listdir(imagedir_in) if \ 24 | code.startswith('n') and\ 25 | not code.isalpha() and not code.endswith('JPEG')] 26 | 27 | print 'total of %s tar files in the directory' % (len(tar_filenames)) 28 | 29 | #extract every file 30 | for i,f in enumerate(tar_filenames): 31 | full_f = os.path.join(imagedir_in,f) 32 | t = tarfile.open(full_f,'r') 33 | t.extractall(path=imagedir_in) 34 | print 'extracted %s tar files' % (i) 35 | os.remove(full_f) 36 | 37 | img_filenames = [f for f in os.listdir(imagedir_in) if f.endswith('JPEG')] 38 | class_image_dict = defaultdict(list) 39 | 40 | # fill class image dict 41 | for img in img_filenames: 42 | cls = img.split('_')[0] 43 | class_image_dict[mappings[cls]].append(img) 44 | 45 | min_class_size = 1000 # min([len(imgs) for cls,imgs in class_image_dict.iteritems()]) 46 | print 'min class size of training set is %s' % (min_class_size) 47 | 48 | # fill up new directories up to this size 49 | # by moving files 50 | for cls,imgs in class_image_dict.items(): 51 | random.shuffle(imgs) 52 | imgs_chosen = imgs[:min(len(imgs)-1,min_class_size)] 53 | newdir = os.path.join(imagedir_in,cls) 54 | if not os.path.exists(newdir): 55 | os.mkdir(newdir) 56 | print 'made new directory %s' % (cls) 57 | for img in imgs: 58 | full_img = os.path.join(imagedir_in,img) 59 | if img in imgs_chosen: 60 | shutil.copy(full_img,newdir) 61 | os.remove(full_img) 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /models/vggbn.lua: -------------------------------------------------------------------------------- 1 | function createModel(nGPU) 2 | local modelType = 'A' -- on a titan black, B/D/E run out of memory even for batch-size 32 3 | 4 | -- Create tables describing VGG configurations A, B, D, E 5 | local cfg = {} 6 | if modelType == 'A' then 7 | cfg = {64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'} 8 | elseif modelType == 'B' then 9 | cfg = {64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'} 10 | elseif modelType == 'D' then 11 | cfg = {64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'} 12 | elseif modelType == 'E' then 13 | cfg = {64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'} 14 | else 15 | error('Unknown model type: ' .. modelType .. ' | Please specify a modelType A or B or D or E') 16 | end 17 | 18 | local features = nn.Sequential() 19 | do 20 | local iChannels = 3; 21 | for k,v in ipairs(cfg) do 22 | if v == 'M' then 23 | features:add(nn.SpatialMaxPooling(2,2,2,2)) 24 | else 25 | local oChannels = v; 26 | local conv3 = nn.SpatialConvolution(iChannels,oChannels,3,3,1,1,1,1); 27 | features:add(conv3) 28 | features:add(nn.ReLU(true)) 29 | iChannels = oChannels; 30 | end 31 | end 32 | end 33 | 34 | features:cuda() 35 | features = makeDataParallel(features, nGPU) -- defined in util.lua 36 | 37 | local classifier = nn.Sequential() 38 | classifier:add(nn.View(512*7*7)) 39 | classifier:add(nn.Linear(512*7*7, 4096)) 40 | classifier:add(nn.Threshold(0, 1e-6)) 41 | classifier:add(nn.BatchNormalization(4096, 1e-3)) 42 | classifier:add(nn.Dropout(0.5)) 43 | classifier:add(nn.Linear(4096, 4096)) 44 | classifier:add(nn.Threshold(0, 1e-6)) 45 | classifier:add(nn.BatchNormalization(4096, 1e-3)) 46 | classifier:add(nn.Dropout(0.5)) 47 | classifier:add(nn.Linear(4096, 1000)) 48 | classifier:add(nn.LogSoftMax()) 49 | classifier:cuda() 50 | 51 | local model = nn.Sequential() 52 | model:add(features):add(classifier) 53 | 54 | return model 55 | end 56 | -------------------------------------------------------------------------------- /models/alexnetowtbn.lua: -------------------------------------------------------------------------------- 1 | function createModel(nGPU) 2 | -- from https://code.google.com/p/cuda-convnet2/source/browse/layers/layers-imagenet-1gpu.cfg 3 | -- this is AlexNet that was presented in the One Weird Trick paper. http://arxiv.org/abs/1404.5997 4 | local features = nn.Sequential() 5 | features:add(nn.SpatialConvolution(3,64,11,11,4,4,2,2)) -- 224 -> 55 6 | features:add(nn.SpatialBatchNormalization(64,1e-3)) 7 | features:add(nn.ReLU(true)) 8 | features:add(nn.SpatialMaxPooling(3,3,2,2)) -- 55 -> 27 9 | features:add(nn.SpatialConvolution(64,192,5,5,1,1,2,2)) -- 27 -> 27 10 | features:add(nn.SpatialBatchNormalization(192,1e-3)) 11 | features:add(nn.ReLU(true)) 12 | features:add(nn.SpatialMaxPooling(3,3,2,2)) -- 27 -> 13 13 | features:add(nn.SpatialConvolution(192,384,3,3,1,1,1,1)) -- 13 -> 13 14 | features:add(nn.SpatialBatchNormalization(384,1e-3)) 15 | features:add(nn.ReLU(true)) 16 | features:add(nn.SpatialConvolution(384,256,3,3,1,1,1,1)) -- 13 -> 13 17 | features:add(nn.SpatialBatchNormalization(256,1e-3)) 18 | features:add(nn.ReLU(true)) 19 | features:add(nn.SpatialConvolution(256,256,3,3,1,1,1,1)) -- 13 -> 13 20 | features:add(nn.SpatialBatchNormalization(256,1e-3)) 21 | features:add(nn.ReLU(true)) 22 | features:add(nn.SpatialMaxPooling(3,3,2,2)) -- 13 -> 6 23 | 24 | features:cuda() 25 | features = makeDataParallel(features, nGPU) -- defined in util.lua 26 | 27 | local classifier = nn.Sequential() 28 | classifier:add(nn.View(256*6*6)) 29 | 30 | classifier:add(nn.Dropout(0.5)) 31 | classifier:add(nn.Linear(256*6*6, 4096)) 32 | classifier:add(nn.BatchNormalization(4096, 1e-3)) 33 | classifier:add(nn.ReLU()) 34 | 35 | classifier:add(nn.Dropout(0.5)) 36 | classifier:add(nn.Linear(4096, 4096)) 37 | classifier:add(nn.BatchNormalization(4096, 1e-3)) 38 | classifier:add(nn.ReLU()) 39 | 40 | classifier:add(nn.Linear(4096, nClasses)) 41 | classifier:add(nn.LogSoftMax()) 42 | 43 | classifier:cuda() 44 | 45 | local model = nn.Sequential():add(features):add(classifier) 46 | model.imageSize = 256 47 | model.imageCrop = 224 48 | 49 | return model 50 | end 51 | -------------------------------------------------------------------------------- /data.lua: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2014, Facebook, Inc. 3 | -- All rights reserved. 4 | -- 5 | -- This source code is licensed under the BSD-style license found in the 6 | -- LICENSE file in the root directory of this source tree. An additional grant 7 | -- of patent rights can be found in the PATENTS file in the same directory. 8 | -- 9 | local ffi = require 'ffi' 10 | local Threads = require 'threads' 11 | Threads.serialization('threads.sharedserialize') 12 | 13 | -- This script contains the logic to create K threads for parallel data-loading. 14 | -- For the data-loading details, look at donkey.lua 15 | ------------------------------------------------------------------------------- 16 | do -- start K datathreads (donkeys) 17 | if opt.nDonkeys > 0 then 18 | local options = opt -- make an upvalue to serialize over to donkey threads 19 | donkeys = Threads( 20 | opt.nDonkeys, 21 | function() 22 | require 'torch' 23 | end, 24 | function(idx) 25 | opt = options -- pass to all donkeys via upvalue 26 | tid = idx 27 | local seed = opt.manualSeed + idx 28 | torch.manualSeed(seed) 29 | print(string.format('Starting donkey with id: %d seed: %d', tid, seed)) 30 | paths.dofile('donkey.lua') 31 | end 32 | ); 33 | else -- single threaded data loading. useful for debugging 34 | paths.dofile('donkey.lua') 35 | donkeys = {} 36 | function donkeys:addjob(f1, f2) f2(f1()) end 37 | function donkeys:synchronize() end 38 | end 39 | end 40 | print('Tralala!') 41 | nClasses = nil 42 | classes = nil 43 | donkeys:addjob(function() return trainLoader.classes end, function(c) classes = c end) 44 | donkeys:synchronize() 45 | nClasses = #classes 46 | assert(nClasses, "Failed to get nClasses") 47 | assert(nClasses == opt.nClasses, 48 | "nClasses is reported different in the data loader, and in the commandline options") 49 | print('nClasses: ', nClasses) 50 | torch.save(paths.concat(opt.save, 'classes.t7'), classes) 51 | 52 | nTest = 0 53 | donkeys:addjob(function() return testLoader:size() end, function(c) nTest = c end) 54 | donkeys:synchronize() 55 | assert(nTest > 0, "Failed to get nTest") 56 | print('nTest: ', nTest) 57 | -------------------------------------------------------------------------------- /models/alexnet.lua: -------------------------------------------------------------------------------- 1 | require 'LinearNB' 2 | 3 | function createModel(nGPU) 4 | local features = nn.Concat(2) 5 | local fb1 = nn.Sequential() -- branch 1 6 | fb1:add(nn.SpatialConvolution(3,48,11,11,4,4,2,2)) -- 224 -> 55 7 | fb1:add(nn.ReLU(true)) 8 | fb1:add(nn.SpatialMaxPooling(3,3,2,2)) -- 55 -> 27 9 | fb1:add(nn.SpatialConvolution(48,128,5,5,1,1,2,2)) -- 27 -> 27 10 | fb1:add(nn.ReLU(true)) 11 | fb1:add(nn.SpatialMaxPooling(3,3,2,2)) -- 27 -> 13 12 | fb1:add(nn.SpatialConvolution(128,192,3,3,1,1,1,1)) -- 13 -> 13 13 | fb1:add(nn.ReLU(true)) 14 | fb1:add(nn.SpatialConvolution(192,192,3,3,1,1,1,1)) -- 13 -> 13 15 | fb1:add(nn.ReLU(true)) 16 | fb1:add(nn.SpatialConvolution(192,128,3,3,1,1,1,1)) -- 13 -> 13 17 | fb1:add(nn.ReLU(true)) 18 | fb1:add(nn.SpatialMaxPooling(3,3,2,2)) -- 13 -> 6 19 | 20 | local fb2 = fb1:clone() -- branch 2 21 | for k,v in ipairs(fb2:findModules('nn.SpatialConvolution')) do 22 | v:reset() -- reset branch 2's weights 23 | end 24 | 25 | features:add(fb1) 26 | features:add(fb2) 27 | features:cuda() 28 | features = makeDataParallel(features, nGPU) -- defined in util.lua 29 | 30 | -- 1.3. Create Classifier (fully connected layers) 31 | local classifier = nn.Sequential() 32 | classifier:add(nn.View(256*6*6)) 33 | classifier:add(nn.Dropout(0.5)) 34 | classifier:add(nn.Linear(256*6*6, 4096)) 35 | classifier:add(nn.Threshold(0, 1e-6)) 36 | classifier:add(nn.Dropout(0.5)) 37 | classifier:add(nn.Linear(4096, 4096)) 38 | classifier:add(nn.Threshold(0, 1e-6)) 39 | if opt.crit == 'softsem' then 40 | classifier:add(nn.Linear(4096, opt.wvectors_dim)) 41 | classifier:add(nn.LinearNB(opt.wvectors_dim, topLayer)) -- word vectors 42 | else 43 | classifier:add(nn.LinearNB(4096, topLayer)) 44 | end 45 | if opt.crit == 'class' or opt.crit == 'softsem' then 46 | classifier:add(nn.LogSoftMax()) 47 | end 48 | classifier:cuda() 49 | 50 | -- 1.4. Combine 1.1 and 1.3 to produce final model 51 | local model = nn.Sequential():add(features):add(classifier) 52 | model.imageSize = 256 53 | model.imageCrop = 224 54 | 55 | return model 56 | end 57 | -------------------------------------------------------------------------------- /util.lua: -------------------------------------------------------------------------------- 1 | require 'cunn' 2 | local ffi=require 'ffi' 3 | 4 | function makeDataParallel(model, nGPU) 5 | if nGPU > 1 then 6 | print('converting module to nn.DataParallelTable') 7 | assert(nGPU <= cutorch.getDeviceCount(), 'number of GPUs less than nGPU specified') 8 | local model_single = model 9 | model = nn.DataParallelTable(1) 10 | for i=1, nGPU do 11 | cutorch.setDevice(i) 12 | model:add(model_single:clone():cuda(), i) 13 | end 14 | end 15 | cutorch.setDevice(opt.GPU) 16 | 17 | return model 18 | end 19 | 20 | local function cleanDPT(module) 21 | -- This assumes this DPT was created by the function above: all the 22 | -- module.modules are clones of the same network on different GPUs 23 | -- hence we only need to keep one when saving the model to the disk. 24 | local newDPT = nn.DataParallelTable(1) 25 | cutorch.setDevice(opt.GPU) 26 | newDPT:add(module:get(1), opt.GPU) 27 | return newDPT 28 | end 29 | 30 | function saveDataParallel(filename, model) 31 | if torch.type(model) == 'nn.DataParallelTable' then 32 | torch.save(filename, cleanDPT(model)) 33 | elseif torch.type(model) == 'nn.Sequential' then 34 | local temp_model = nn.Sequential() 35 | for i, module in ipairs(model.modules) do 36 | if torch.type(module) == 'nn.DataParallelTable' then 37 | temp_model:add(cleanDPT(module)) 38 | else 39 | temp_model:add(module) 40 | end 41 | end 42 | torch.save(filename, temp_model) 43 | else 44 | error('This saving function only works with Sequential or DataParallelTable modules.') 45 | end 46 | end 47 | 48 | function loadDataParallel(filename, nGPU) 49 | if opt.backend == 'cudnn' then 50 | require 'cudnn' 51 | end 52 | local model = torch.load(filename) 53 | if torch.type(model) == 'nn.DataParallelTable' then 54 | return makeDataParallel(model:get(1):float(), nGPU) 55 | elseif torch.type(model) == 'nn.Sequential' then 56 | for i,module in ipairs(model.modules) do 57 | if torch.type(module) == 'nn.DataParallelTable' then 58 | model.modules[i] = makeDataParallel(module:get(1):float(), nGPU) 59 | end 60 | end 61 | return model 62 | else 63 | error('The loaded model is not a Sequential or DataParallelTable module.') 64 | end 65 | end 66 | 67 | 68 | -------------------------------------------------------------------------------- /scripts/#preprocess_train_data.py#: -------------------------------------------------------------------------------- 1 | # organise the training data according to the mapping in mappings.txt 2 | # start with a directory full of .tar files 3 | # this will extract everything and organise it into directories 4 | # so that the training code can run on that directory 5 | 6 | import os 7 | import random 8 | import shutil 9 | import json 10 | import tarfile 11 | from collections import defaultdict 12 | import pdb 13 | 14 | concepts = [] 15 | with open('../word2vec/rows.json') as f: 16 | concepts = json.load(f) 17 | 18 | mappings = {} 19 | with open('../imagenet_labels/mapping.txt') as f: 20 | for line in f.readlines(): 21 | line = line.strip() 22 | synset = line.split()[0] 23 | label = line.split()[1] 24 | if label in concepts: 25 | mappings[synset] = label 26 | 27 | imagedir_in = '/local/filespace-2/fh295/DATA/train' 28 | tar_filenames = [code for code in os.listdir(imagedir_in) if \ 29 | code.startswith('n') and code.endswith('tar') \ 30 | and code.split('.')[0] in mappings 31 | 32 | print 'total of %s tar files in the directory' % (len(tar_filenames)) 33 | 34 | #extract every file 35 | for i,f in enumerate(tar_filenames): 36 | full_f = os.path.join(imagedir_in,f) 37 | t = tarfile.open(full_f,'r') 38 | t.extractall(path=imagedir_in) 39 | print 'extracted %s tar files' % (i) 40 | os.remove(full_f) 41 | 42 | 43 | img_filenames = [f for f in os.listdir(imagedir_in) if \ 44 | f.endswith('JPEG') and f.split('_')[0] in mappings] 45 | class_image_dict = defaultdict(list) 46 | pdb.set_trace() 47 | 48 | # fill class image dict 49 | for img in img_filenames: 50 | cls = img.split('_')[0] 51 | class_image_dict[mappings[cls]].append(img) 52 | 53 | 54 | min_class_size = 1000 # min([len(imgs) for cls,imgs in class_image_dict.iteritems()]) 55 | print 'min class size of training set is %s' % (min_class_size) 56 | 57 | # fill up new directories up to this size 58 | # by moving files 59 | for cls,imgs in class_image_dict.items(): 60 | random.shuffle(imgs) 61 | imgs_chosen = imgs[:min(len(imgs)-1,min_class_size)] 62 | newdir = os.path.join(imagedir_in,cls) 63 | if not os.path.exists(newdir): 64 | os.mkdir(newdir) 65 | print 'made new directory %s' % (cls) 66 | for img in imgs: 67 | full_img = os.path.join(imagedir_in,img) 68 | if img in imgs_chosen: 69 | shutil.copy(full_img,newdir) 70 | os.remove(full_img) 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /scripts/preprocess_train_data.py: -------------------------------------------------------------------------------- 1 | # organise the training data according to the mapping in mappings.txt 2 | # start with a directory full of .tar files 3 | # this will extract everything and organise it into directories 4 | # so that the training code can run on that directory 5 | 6 | import os 7 | import random 8 | import shutil 9 | import json 10 | import tarfile 11 | from collections import defaultdict 12 | import pdb 13 | 14 | concepts = [] 15 | with open('../word2vec/rows.json') as f: 16 | concepts = json.load(f) 17 | 18 | mappings = {} 19 | with open('../imagenet_labels/mapping.txt') as f: 20 | for line in f.readlines(): 21 | line = line.strip() 22 | synset = line.split()[0] 23 | label = line.split()[1] 24 | if label in concepts: 25 | mappings[synset] = label 26 | 27 | imagedir_in = '/local/filespace-2/fh295/DATA/train' 28 | tar_filenames = [code for code in os.listdir(imagedir_in) if \ 29 | code.startswith('n') and code.endswith('tar') \ 30 | and code.split('.')[0] in mappings] 31 | 32 | print 'total of %s tar files in the directory' % (len(tar_filenames)) 33 | 34 | #extract every file 35 | for i,f in enumerate(tar_filenames): 36 | full_f = os.path.join(imagedir_in,f) 37 | t = tarfile.open(full_f,'r') 38 | t.extractall(path=imagedir_in) 39 | print 'extracted %s tar files' % (i) 40 | os.remove(full_f) 41 | 42 | 43 | img_filenames = [f for f in os.listdir(imagedir_in) if \ 44 | f.endswith('JPEG') and f.split('_')[0] in mappings] 45 | class_image_dict = defaultdict(list) 46 | pdb.set_trace() 47 | 48 | # fill class image dict 49 | for img in img_filenames: 50 | cls = img.split('_')[0] 51 | class_image_dict[mappings[cls]].append(img) 52 | 53 | 54 | min_class_size = 1000 # min([len(imgs) for cls,imgs in class_image_dict.iteritems()]) 55 | print 'min class size of training set is %s' % (min_class_size) 56 | 57 | # fill up new directories up to this size 58 | # by moving files 59 | for cls,imgs in class_image_dict.items(): 60 | random.shuffle(imgs) 61 | imgs_chosen = imgs[:min(len(imgs)-1,min_class_size)] 62 | newdir = os.path.join(imagedir_in,cls) 63 | if not os.path.exists(newdir): 64 | os.mkdir(newdir) 65 | print 'made new directory %s' % (cls) 66 | for img in imgs: 67 | full_img = os.path.join(imagedir_in,img) 68 | if img in imgs_chosen: 69 | shutil.copy(full_img,newdir) 70 | os.remove(full_img) 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /#opts.lua#: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2014, Facebook, Inc. 3 | -- All rights reserved. 4 | -- 5 | -- This source code is licensed under the BSD-style license found in the 6 | -- LICENSE file in the root directory of this source tree. An additional grant 7 | -- of patent rights can be found in the PATENTS file in the same directory. 8 | -- 9 | local M = { } 10 | 11 | function M.parse(arg) 12 | local cmd = torch.CmdLine() 13 | cmd:text() 14 | cmd:text('Torch-7 Imagenet Training script') 15 | cmd:text() 16 | cmd:text('Options:') 17 | ------------ General options -------------------- 18 | 19 | , 'Default preferred GPU') 20 | cmd:option('-nGPU', 1, 'Number of GPUs to use by default') 21 | cmd:option('-backend', 'nn', 'Options: cudnn | nn') 22 | ------------- Data options ------------------------ 23 | cmd:option('-nDonkeys', 2, 'number of donkeys to initialize (data loading threads)') 24 | cmd:option('-imageSize', 256, 'Smallest side of the resized image') 25 | cmd:option('-cropSize', 224, 'Height and Width of image crop to be used as input layer') 26 | cmd:option('-nClasses', 733, 'number of classes in the dataset') 27 | ------------- Training options -------------------- 28 | cmd:option('-nEpochs', 55, 'Number of total epochs to run') 29 | cmd:option('-epochSize', 10000, 'Number of batches per epoch') 30 | cmd:option('-epochNumber', 1, 'Manual epoch number (useful on restarts)') 31 | cmd:option('-batchSize', 128, 'mini-batch size (1 = pure stochastic)') 32 | ---------- Optimization options ---------------------- 33 | cmd:option('-LR', 0.0, 'learning rate; if set, overrides default LR/WD recipe') 34 | cmd:option('-momentum', 0.9, 'momentum') 35 | cmd:option('-weightDecay', 5e-4, 'weight decay') 36 | ---------- Model options ---------------------------------- 37 | cmd:option('-netType', 'alexnet', 'Options: alexnet | overfeat | alexnetowtbn | vgg | googlenet') 38 | cmd:option('-crit', 'sem', 'Options: Semantic | ClassNLL') 39 | cmd:option('-margin', 0.5, 'Margin for semantic training with Embedding criterion') 40 | cmd:option('-neg_samples',0,'Negative samples') 41 | cmd:option('-wvectors','word2vec','Directory where word vectors are') 42 | cmd:option('-wvectors_dim',500, ' The dimensionality of the word vectors') 43 | cmd:option('-retrain', 'none', 'provide path to model to retrain with') 44 | cmd:option('-optimState', 'none', 'provide path to an optimState to reload from') 45 | cmd:text() 46 | 47 | local opt = cmd:parse(arg or {}) 48 | -- add commandline specified options 49 | opt.save = paths.concat(opt.cache, 50 | cmd:string(opt.netType, opt, 51 | {netType=true, retrain=true, optimState=true, cache=true, data=true})) 52 | -- add date/time 53 | opt.save = paths.concat(opt.save, '' .. os.date():gsub(' ','')) 54 | return opt 55 | end 56 | 57 | return M 58 | -------------------------------------------------------------------------------- /opts.lua: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2014, Facebook, Inc. 3 | -- All rights reserved. 4 | -- 5 | -- This source code is licensed under the BSD-style license found in the 6 | -- LICENSE file in the root directory of this source tree. An additional grant 7 | -- of patent rights can be found in the PATENTS file in the same directory. 8 | -- 9 | local M = { } 10 | 11 | function M.parse(arg) 12 | local cmd = torch.CmdLine() 13 | cmd:text() 14 | cmd:text('Torch-7 Imagenet Training script') 15 | cmd:text() 16 | cmd:text('Options:') 17 | ------------ General options -------------------- 18 | 19 | cmd:option('-cache', './imagenet/checkpoint/', 'subdirectory in which to save/log experiments') 20 | cmd:option('-data', '../DATA/', 'Home of ImageNet dataset') 21 | cmd:option('-modelPath','','Load model to do eval') 22 | cmd:option('-class_map','./imagenet/checkpoint/MODELS_CLASS/classes.t7') 23 | cmd:option('-manualSeed', 2, 'Manually set RNG seed') 24 | cmd:option('-GPU', 3, 'Default preferred GPU') 25 | cmd:option('-nGPU', 1, 'Number of GPUs to use by default') 26 | cmd:option('-backend', 'nn', 'Options: cudnn | nn') 27 | ------------- Data options ------------------------ 28 | cmd:option('-nDonkeys', 2, 'number of donkeys to initialize (data loading threads)') 29 | cmd:option('-imageSize', 256, 'Smallest side of the resized image') 30 | cmd:option('-cropSize', 224, 'Height and Width of image crop to be used as input layer') 31 | cmd:option('-nClasses', 733, 'number of classes in the dataset') 32 | ------------- Training options -------------------- 33 | cmd:option('-nEpochs', 55, 'Number of total epochs to run') 34 | cmd:option('-epochSize', 10000, 'Number of batches per epoch') 35 | cmd:option('-epochNumber', 1, 'Manual epoch number (useful on restarts)') 36 | cmd:option('-batchSize', 128, 'mini-batch size (1 = pure stochastic)') 37 | ---------- Optimization options ---------------------- 38 | cmd:option('-LR', 0.0, 'learning rate; if set, overrides default LR/WD recipe') 39 | cmd:option('-momentum', 0.9, 'momentum') 40 | cmd:option('-weightDecay', 5e-4, 'weight decay') 41 | ---------- Model options ---------------------------------- 42 | cmd:option('-netType', 'alexnet', 'Options: alexnet | overfeat | alexnetowtbn | vgg | googlenet') 43 | cmd:option('-crit', 'sem', 'Options: Semantic | ClassNLL') 44 | cmd:option('-margin', 0.5, 'Margin for semantic training with Embedding criterion') 45 | cmd:option('-neg_samples',0,'Negative samples') 46 | cmd:option('-wvectors','word2vec','Directory where word vectors are') 47 | cmd:option('-wvectors_dim',500, ' The dimensionality of the word vectors') 48 | cmd:option('-retrain', 'none', 'provide path to model to retrain with') 49 | cmd:option('-optimState', 'none', 'provide path to an optimState to reload from') 50 | cmd:text() 51 | 52 | local opt = cmd:parse(arg or {}) 53 | -- add commandline specified options 54 | opt.save = paths.concat(opt.cache, 55 | cmd:string(opt.netType, opt, 56 | {netType=true, retrain=true, optimState=true, cache=true, data=true})) 57 | -- add date/time 58 | opt.save = paths.concat(opt.save, '' .. os.date():gsub(' ','')) 59 | return opt 60 | end 61 | 62 | return M 63 | -------------------------------------------------------------------------------- /LinearNB.lua: -------------------------------------------------------------------------------- 1 | -- Copyright (c) 2015-present, Facebook, Inc. 2 | -- All rights reserved. 3 | -- 4 | -- This source code is licensed under the BSD-style license found in the 5 | -- LICENSE file in the root directory of this source tree. An additional grant 6 | -- of patent rights can be found in the PATENTS file in the same directory. 7 | 8 | local LinearNB, parent = torch.class('nn.LinearNB', 'nn.Module') 9 | 10 | function LinearNB:__init(inputSize, outputSize) 11 | parent.__init(self) 12 | 13 | self.outputSize = outputSize 14 | self.inputSize = inputSize 15 | self.weight = torch.Tensor(outputSize, inputSize) 16 | self.gradWeight = torch.Tensor(outputSize, inputSize) 17 | 18 | self:reset() 19 | end 20 | 21 | function LinearNB:reset(stdv) 22 | if stdv then 23 | stdv = stdv * math.sqrt(3) 24 | else 25 | stdv = 1./math.sqrt(self.weight:size(2)) 26 | end 27 | if nn.oldSeed then 28 | for i=1,self.weight:size(1) do 29 | self.weight:select(1, i):apply(function() 30 | return torch.uniform(-stdv, stdv) 31 | end) 32 | end 33 | else 34 | self.weight:uniform(-stdv, stdv) 35 | end 36 | end 37 | 38 | function LinearNB:updateOutput(input) 39 | if input:dim() == 1 then 40 | self.output:resize(self.outputSize) 41 | self.output:zero() 42 | self.output:addmv(1, self.weight, input) 43 | elseif input:dim() == 2 then 44 | local nframe = input:size(1) 45 | local nunit = self.outputSize 46 | self.output:resize(nframe, nunit):zero() 47 | if not self.addBuffer or self.addBuffer:size(1) ~= nframe then 48 | self.addBuffer = input.new(nframe):fill(1) 49 | end 50 | if nunit == 1 then 51 | -- Special case to fix output size of 1 bug: 52 | self.output:select(2,1):addmv(1, input, self.weight:select(1,1)) 53 | else 54 | self.output:addmm(1, input, self.weight:t()) 55 | end 56 | else 57 | error('input must be vector or matrix') 58 | end 59 | 60 | return self.output 61 | end 62 | 63 | function LinearNB:updateGradInput(input, gradOutput) 64 | if self.gradInput then 65 | 66 | local nElement = self.gradInput:nElement() 67 | self.gradInput:resizeAs(input) 68 | if self.gradInput:nElement() ~= nElement then 69 | self.gradInput:zero() 70 | end 71 | if input:dim() == 1 then 72 | self.gradInput:addmv(0, 1, self.weight:t(), gradOutput) 73 | elseif input:dim() == 2 then 74 | self.gradInput:addmm(0, 1, gradOutput, self.weight) 75 | end 76 | 77 | return self.gradInput 78 | end 79 | end 80 | 81 | function LinearNB:accGradParameters(input, gradOutput, scale) 82 | scale = scale or 1 83 | 84 | if input:dim() == 1 then 85 | self.gradWeight:addr(scale, gradOutput, input) 86 | elseif input:dim() == 2 then 87 | local nunit = self.outputSize 88 | if nunit == 1 then 89 | -- Special case to fix output size of 1 bug: 90 | self.gradWeight:select(1,1):addmv(scale, 91 | input:t(), gradOutput:select(2,1)) 92 | else 93 | self.gradWeight:addmm(scale, gradOutput:t(), input) 94 | end 95 | end 96 | end 97 | 98 | -- we do not need to accumulate parameters when sharing 99 | LinearNB.sharedAccUpdateGradParameters = LinearNB.accUpdateGradParameters 100 | -------------------------------------------------------------------------------- /word2vec.lua: -------------------------------------------------------------------------------- 1 | --[[ 2 | from here: https://github.com/rotmanmi/word2vec.torch 3 | --]] 4 | require 'paths' 5 | require 'hdf5' 6 | local cjson = require 'cjson' 7 | 8 | torch.setdefaulttensortype('torch.FloatTensor') 9 | 10 | local Word2vec = torch.class('Word2vec') 11 | 12 | function Word2vec:__init(dir, classes) 13 | 14 | self.classes = classes 15 | self.w2v = {} 16 | local f = dir .. '/vectors.h5' 17 | if not paths.filep(f .. '.t7') then 18 | print(string.format('Ready to load word vectors from %s', f)) 19 | self.w2v = self:_bintot7(dir) 20 | else 21 | print(string.format('Ready to load word vectors from %s', f .. '.t7')) 22 | self.w2v = torch.load(dir ..'/vectors.h5'.. '.t7') 23 | end 24 | print('Done reading word2vec data.') 25 | end 26 | 27 | function Word2vec:_read_json(path) 28 | local file = io.open(path, 'r') 29 | local text = file:read() 30 | file:close() 31 | local info = cjson.decode(text) 32 | return info 33 | end 34 | 35 | 36 | function Word2vec:_bintot7(dir) 37 | 38 | 39 | local f = dir .. '/vectors.h5' 40 | local h5_file = hdf5.open(f, 'r') 41 | 42 | local vecs_size = h5_file:read('/vectors'):dataspaceSize() 43 | local words = vecs_size[1] 44 | local dims = vecs_size[2] 45 | 46 | local M = h5_file:read('/vectors'):partial({1,words},{1,dims}) 47 | 48 | local rows = self:_read_json(dir .. '/rows.json') 49 | --Writing Files 50 | word2vec = {} 51 | word2vec.M = torch.FloatTensor(M:size()) 52 | word2vec.w2vvocab = {} 53 | word2vec.v2wvocab = {} 54 | for i=1,words do 55 | local w = rows[i] 56 | word2vec.v2wvocab[i] = w 57 | word2vec.w2vvocab[w] = i 58 | --normalize to unit norm 59 | local n = M[i]:norm() 60 | word2vec.M[i] = M[i]/n 61 | end 62 | 63 | torch.save(f .. '.t7',word2vec) 64 | print('Writing t7 File for future usage.') 65 | 66 | return word2vec 67 | end 68 | 69 | 70 | 71 | function Word2vec:getVector(label) 72 | local ind = self.w2v.w2vvocab[self.classes[label]] 73 | return self.w2v.M[ind] 74 | end 75 | 76 | function Word2vec:getWordVector(label) 77 | print(label) 78 | local ind = self.w2v.w2vvocab[label] 79 | print(ind) 80 | return self.w2v.M[ind] 81 | end 82 | 83 | function Word2vec:eval_ranking(predictions, labels, classes, k, neg_samples) 84 | 85 | local els = predictions:size(1) 86 | 87 | -- normalize to have multiplication be cosine 88 | local p_norm = predictions:norm(2,2) 89 | predictions:cdiv(p_norm:expandAs(predictions)) 90 | 91 | -- cosine 92 | local cosine = predictions * self.w2v.M:transpose(1,2) 93 | 94 | local sim = 0 95 | -- ranking 96 | local ranking = torch.Tensor(els/(neg_samples+1)) 97 | local topk = 0 98 | local tot = 0 99 | 100 | for s = 1,els do 101 | 102 | if labels[s] == 1 then 103 | local _,index = torch.sort(cosine:select(1,s),true) -- sort rows 104 | 105 | local ind = self.w2v.w2vvocab[self.classes[classes[s]]] 106 | local not_found = true 107 | local r = 1 108 | while not_found do 109 | if index[r]==ind then 110 | ranking[tot+1] = r 111 | not_found = false 112 | if r <= k then topk = topk+1 end 113 | end 114 | r = r + 1 115 | end 116 | tot = tot + 1 117 | sim = sim + cosine[s][s] 118 | --print(string.format('Gold: %s -- Predicted %s ',self.classes[classes[s]], self.w2v.v2wvocab[index[1]])) 119 | end 120 | end 121 | 122 | median = torch.median(ranking)[1] 123 | 124 | return topk * 100 /tot , sim/tot, median 125 | end 126 | 127 | -------------------------------------------------------------------------------- /models/googlenet.lua: -------------------------------------------------------------------------------- 1 | local function inception(input_size, config) 2 | local concat = nn.Concat(2) 3 | if config[1][1] ~= 0 then 4 | local conv1 = nn.Sequential() 5 | conv1:add(nn.SpatialConvolution(input_size, config[1][1],1,1,1,1)):add(nn.ReLU(true)) 6 | concat:add(conv1) 7 | end 8 | 9 | local conv3 = nn.Sequential() 10 | conv3:add(nn.SpatialConvolution( input_size, config[2][1],1,1,1,1)):add(nn.ReLU(true)) 11 | conv3:add(nn.SpatialConvolution(config[2][1], config[2][2],3,3,1,1,1,1)):add(nn.ReLU(true)) 12 | concat:add(conv3) 13 | 14 | local conv3xx = nn.Sequential() 15 | conv3xx:add(nn.SpatialConvolution( input_size, config[3][1],1,1,1,1)):add(nn.ReLU(true)) 16 | conv3xx:add(nn.SpatialConvolution(config[3][1], config[3][2],3,3,1,1,1,1)):add(nn.ReLU(true)) 17 | conv3xx:add(nn.SpatialConvolution(config[3][2], config[3][2],3,3,1,1,1,1)):add(nn.ReLU(true)) 18 | concat:add(conv3xx) 19 | 20 | local pool = nn.Sequential() 21 | pool:add(nn.SpatialZeroPadding(1,1,1,1)) -- remove after getting nn R2 into fbcode 22 | if config[4][1] == 'max' then 23 | pool:add(nn.SpatialMaxPooling(3,3,1,1):ceil()) 24 | elseif config[4][1] == 'avg' then 25 | pool:add(nn.SpatialAveragePooling(3,3,1,1):ceil()) 26 | else 27 | error('Unknown pooling') 28 | end 29 | if config[4][2] ~= 0 then 30 | pool:add(nn.SpatialConvolution(input_size, config[4][2],1,1,1,1)):add(nn.ReLU(true)) 31 | end 32 | concat:add(pool) 33 | 34 | return concat 35 | end 36 | 37 | function createModel(nGPU) 38 | local features = nn.Sequential() 39 | features:add(nn.SpatialConvolution(3,64,7,7,2,2,3,3)):add(nn.ReLU(true)) 40 | features:add(nn.SpatialMaxPooling(3,3,2,2):ceil()) 41 | features:add(nn.SpatialConvolution(64,64,1,1)):add(nn.ReLU(true)) 42 | features:add(nn.SpatialConvolution(64,192,3,3,1,1,1,1)):add(nn.ReLU(true)) 43 | features:add(nn.SpatialMaxPooling(3,3,2,2):ceil()) 44 | features:add(inception( 192, {{ 64},{ 64, 64},{ 64, 96},{'avg', 32}})) -- 3(a) 45 | features:add(inception( 256, {{ 64},{ 64, 96},{ 64, 96},{'avg', 64}})) -- 3(b) 46 | features:add(inception( 320, {{ 0},{128,160},{ 64, 96},{'max', 0}})) -- 3(c) 47 | features:add(nn.SpatialConvolution(576,576,2,2,2,2)) 48 | features:add(inception( 576, {{224},{ 64, 96},{ 96,128},{'avg',128}})) -- 4(a) 49 | features:add(inception( 576, {{192},{ 96,128},{ 96,128},{'avg',128}})) -- 4(b) 50 | features:add(inception( 576, {{160},{128,160},{128,160},{'avg', 96}})) -- 4(c) 51 | features:add(inception( 576, {{ 96},{128,192},{160,192},{'avg', 96}})) -- 4(d) 52 | 53 | local main_branch = nn.Sequential() 54 | main_branch:add(inception( 576, {{ 0},{128,192},{192,256},{'max', 0}})) -- 4(e) 55 | main_branch:add(nn.SpatialConvolution(1024,1024,2,2,2,2)) 56 | main_branch:add(inception(1024, {{352},{192,320},{160,224},{'avg',128}})) -- 5(a) 57 | main_branch:add(inception(1024, {{352},{192,320},{192,224},{'max',128}})) -- 5(b) 58 | main_branch:add(nn.SpatialAveragePooling(7,7,1,1)) 59 | main_branch:add(nn.View(1024):setNumInputDims(3)) 60 | main_branch:add(nn.Linear(1024,nClasses)) 61 | main_branch:add(nn.LogSoftMax()) 62 | 63 | -- add auxillary classifier here (thanks to Christian Szegedy for the details) 64 | local aux_classifier = nn.Sequential() 65 | aux_classifier:add(nn.SpatialAveragePooling(5,5,3,3):ceil()) 66 | aux_classifier:add(nn.SpatialConvolution(576,128,1,1,1,1)) 67 | aux_classifier:add(nn.View(128*4*4):setNumInputDims(3)) 68 | aux_classifier:add(nn.Linear(128*4*4,768)) 69 | aux_classifier:add(nn.ReLU()) 70 | aux_classifier:add(nn.Linear(768,nClasses)) 71 | aux_classifier:add(nn.LogSoftMax()) 72 | 73 | local splitter = nn.Concat(2) 74 | splitter:add(main_branch):add(aux_classifier) 75 | local model = nn.Sequential():add(features):add(splitter) 76 | 77 | model:cuda() 78 | model = makeDataParallel(model, nGPU) -- defined in util.lua 79 | model.imageSize = 256 80 | model.imageCrop = 224 81 | 82 | 83 | return model 84 | end 85 | -------------------------------------------------------------------------------- /test.lua: -------------------------------------------------------------------------------- 1 | --TODO: FIX THE EVALUATION CODE 2 | 3 | 4 | -- 5 | -- Copyright (c) 2014, Facebook, Inc. 6 | -- All rights reserved. 7 | -- 8 | -- This source code is licensed under the BSD-style license found in the 9 | -- LICENSE file in the root directory of this source tree. An additional grant 10 | -- of patent rights can be found in the PATENTS file in the same directory. 11 | -- 12 | testLogger = optim.Logger(paths.concat(opt.save, 'test.log')) 13 | 14 | local batchNumber 15 | local top1_center, loss 16 | local timer = torch.Timer() 17 | 18 | if opt.crit == 'sem' or 'mse' then 19 | dummy = dataLoader{ 20 | paths = {paths.concat(opt.data, 'val')}, --train 21 | loadSize = {3, opt.imageSize, opt.imageSize}, --doesn't really matter 22 | sampleSize = {3, opt.cropSize, opt.cropSize}, -- doesn't really matter 23 | split = 100, 24 | verbose = true, 25 | wvectors = opt.wvectors, 26 | neg_samples = 0, 27 | } 28 | w2v = dummy:get_w2v() 29 | end 30 | 31 | 32 | function test() 33 | print('==> doing epoch on validation data:') 34 | print("==> online epoch # " .. epoch) 35 | 36 | batchNumber = 0 37 | cutorch.synchronize() 38 | timer:reset() 39 | 40 | -- set the dropouts to evaluate mode 41 | model:evaluate() 42 | 43 | top1_center = 0 44 | loss = 0 45 | for i=1,nTest/opt.batchSize do -- nTest is set in 1_data.lua 46 | local indexStart = (i-1) * opt.batchSize + 1 47 | local indexEnd = (indexStart + opt.batchSize - 1) 48 | donkeys:addjob( 49 | -- work to be done by donkey thread 50 | function() 51 | local inputs, labels 52 | if opt.crit == 'class' or opt.crit == 'mse' or opt.crit == 'softsem' then 53 | inputs, labels = testLoader:get(indexStart, indexEnd) 54 | else 55 | inputs, labels = testLoader:getSemantic(indexStart, indexEnd) 56 | end 57 | return inputs, labels 58 | end, 59 | -- callback that is run in the main thread once the work is done 60 | testBatch 61 | ) 62 | end 63 | 64 | donkeys:synchronize() 65 | cutorch.synchronize() 66 | 67 | top1_center = top1_center * 100 / nTest 68 | testLogger:add{ 69 | ['% top1 accuracy (test set) (center crop)'] = top1_center, 70 | ['avg loss (test set)'] = loss 71 | } 72 | print(string.format('Epoch: [%d][TESTING SUMMARY] Total Time(s): %.2f \t' 73 | .. 'accuracy [Center](%%):\t top-1 %.2f\t ', 74 | epoch, timer:time().real, top1_center)) 75 | 76 | print('\n') 77 | 78 | 79 | end -- of test() 80 | ----------------------------------------------------------------------------- 81 | local inputs = torch.CudaTensor() 82 | local labels = torch.CudaTensor() 83 | 84 | function testBatch(inputsCPU, labelsCPU) 85 | batchNumber = batchNumber + opt.batchSize 86 | 87 | inputs:resize(inputsCPU:size()):copy(inputsCPU) 88 | if opt.crit == 'sem' then 89 | labels:resize(labelsCPU[2]:size()):copy(labelsCPU[2]) 90 | else 91 | labels:resize(labelsCPU:size()):copy(labelsCPU) 92 | end 93 | 94 | local outputs = model:forward(inputs) 95 | cutorch.synchronize() 96 | local pred = outputs:float() 97 | 98 | local median = 0 99 | local sim = 0 100 | local top1 = 0 101 | if opt.crit == 'class' or opt.crit == 'softsem' then 102 | local _, pred_sorted = pred:sort(2, true) 103 | for i=1,pred:size(1) do 104 | local g = labelsCPU[i] 105 | if pred_sorted[i][1] == g then top1 = top1 + 1 end 106 | end 107 | top1_center = top1 + top1_center 108 | elseif opt.crit == 'sem' then 109 | top1, sim, median = w2v:eval_ranking(pred, labelsCPU[1], labelsCPU[2],1, opt.neg_samples) 110 | top1 = top1*opt.batchSize/100 111 | top1_center = top1_center + top1 112 | end 113 | if batchNumber % opt.batchSize == 0 then 114 | print(('Epoch: Testing [%d][%d/%d] -- (top1: %d, median: %d)'):format(epoch, batchNumber, nTest, top1, median)) 115 | end 116 | end 117 | -------------------------------------------------------------------------------- /scripts/compute_RDM.py: -------------------------------------------------------------------------------- 1 | # takes learned representations and computes the RDM 2 | # representations stored in h5 format 3 | 4 | import pickle 5 | import numpy as np 6 | from matplotlib import pyplot as plt 7 | import sys 8 | from scipy.spatial.distance import pdist# 9 | import h5py 10 | 11 | model = sys.argv[1] 12 | sim = sys.argv[2] 13 | layer = sys.argv[3] 14 | 15 | ALL = [] 16 | 17 | if model == 'words' or model=='ALL': 18 | 19 | word_vecs_file = "/home/fh295/Documents/Deep_learning_Bengio/arctic/defgen/D_medium_cbow_pdw_8B.pkl" 20 | with open(word_vecs_file,'r') as f: 21 | word_vecs = pickle.load(f) 22 | 23 | stimuli_words_file = "/home/fh295/filespace2/DATA/stimuli/mapping.txt" 24 | with open(stimuli_words_file,'r') as f: 25 | stimuli_words = [w.strip() for w in f.readlines()] 26 | 27 | vecs = [] 28 | rows = [] 29 | for w in stimuli_words: 30 | if w in word_vecs: 31 | vecs.append(word_vecs[w]) 32 | rows.append(w) 33 | else: 34 | print w," not in dict" 35 | 36 | #compute all pairwise similarities 37 | if sim == 'corr': 38 | m = 1-np.corrcoef(vecs) 39 | ALL.append([m[i,j] for i in range(m.shape[0]) for j in range(i+1,m.shape[0])]) 40 | else: 41 | m = pdist(vecs, 'cosine') 42 | ALL.append(m) 43 | 44 | if model == 'humans' or model == 'ALL': 45 | m = np.loadtxt("/home/fh295/filespace2/DATA/stimuli/RDM_hIT_fig1.txt") 46 | ALL.append([m[i,j] for i in range(m.shape[0]) for j in range(i+1,m.shape[0])]) 47 | 48 | if model == "pixels" or model == "ALL": 49 | f = h5py.File('/home/fh295/filespace2/DATA/PREDICTIONS/class.h5','r') 50 | vecs = f["pixels"] 51 | #compute all pairwise similarities 52 | if sim == 'corr': 53 | m = 1-np.corrcoef(vecs) 54 | ALL.append([m[i,j] for i in range(m.shape[0]) for j in range(i+1,m.shape[0])]) 55 | else: 56 | m = pdist(vecs, 'cosine') 57 | ALL.append(m) 58 | f.close() 59 | 60 | 61 | if model == "sem" or model == "ALL": 62 | f = h5py.File('/home/fh295/filespace2/DATA/PREDICTIONS/sem.h5','r') 63 | vecs = f[layer] 64 | #compute all pairwise similarities 65 | if sim == 'corr': 66 | m = 1-np.corrcoef(vecs) 67 | ALL.append([m[i,j] for i in range(m.shape[0]) for j in range(i+1,m.shape[0])]) 68 | print m[0][0],m[0][1],m[0][2] 69 | else: 70 | m = pdist(vecs, 'cosine') 71 | ALL.append(m) 72 | f.close() 73 | 74 | if model == "class" or model == "ALL": 75 | f = h5py.File('/home/fh295/filespace2/DATA/PREDICTIONS/class.h5','r') 76 | vecs = f[layer] 77 | #compute all pairwise similarities 78 | if sim == 'corr': 79 | m = 1-np.corrcoef(vecs) 80 | ALL.append([m[i,j] for i in range(m.shape[0]) for j in range(i+1,m.shape[0])]) 81 | else: 82 | m = pdist(vecs, 'cosine') 83 | ALL.append(m) 84 | f.close() 85 | 86 | if model == "softsem" or model == "ALL": 87 | f = h5py.File('/home/fh295/filespace2/DATA/PREDICTIONS/softsem.h5','r') 88 | vecs = f[layer] 89 | #compute all pairwise similarities 90 | if sim == 'corr': 91 | m = 1-np.corrcoef(vecs) 92 | ALL.append([m[i,j] for i in range(m.shape[0]) for j in range(i+1,m.shape[0])]) 93 | else: 94 | m = pdist(vecs, 'cosine') 95 | ALL.append(m) 96 | f.close() 97 | 98 | if model == "vgg" or model == "ALL": 99 | f = h5py.File('/home/fh295/filespace2/DATA/PREDICTIONS/vgg.h5','r') 100 | vecs = np.array(f["topLayer"]) 101 | vecs = vecs.transpose() 102 | #compute all pairwise similarities 103 | if sim == 'corr': 104 | m = 1-np.corrcoef(vecs) 105 | ALL.append([m[i,j] for i in range(m.shape[0]) for j in range(i+1,m.shape[0])]) 106 | else: 107 | m = pdist(vecs, 'cosine') 108 | ALL.append(m) 109 | f.close() 110 | 111 | 112 | if model == "ALL": 113 | m = np.corrcoef(ALL) 114 | print "Humans vs Words: ",m[1,0] 115 | print "Humans vs Pixels: ",m[1,2] 116 | print "Humans vs Sem: ",m[1,3] 117 | print "Humans vs Class: ",m[1,4] 118 | print "Humans vs SoftSem: ",m[1,5] 119 | print "Humans vs VGG: ",m[1,6] 120 | print 121 | print "Words vs Sem: ",m[0,3] 122 | print "Pixels vs Sem: ",m[2,3] 123 | print 124 | print "Words vs Class: ",m[0,4] 125 | print "Pixels vs Class: ",m[2,4] 126 | print 127 | print "Words vs VGG: ",m[0,6] 128 | print "Pixels vs VGG: ",m[2,6] 129 | 130 | else: 131 | fig, ax = plt.subplots() 132 | heatmap = ax.pcolor(m) 133 | 134 | # want a more natural, table-like display 135 | ax.invert_yaxis() 136 | ax.xaxis.tick_top() 137 | plt.xlim(xmax=m.shape[0]) 138 | plt.ylim(ymin=m.shape[1]) 139 | 140 | plt.savefig(model+'.png') 141 | 142 | 143 | 144 | -------------------------------------------------------------------------------- /apply_alexnet.lua: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2014, Facebook, Inc. 3 | -- All rights reserved. 4 | -- 5 | -- This source code is licensed under the BSD-style license found in the 6 | -- LICENSE file in the root directory of this source tree. An additional grant 7 | -- of patent rights can be found in the PATENTS file in the same directory. 8 | -- 9 | require 'image' 10 | require 'hdf5' 11 | require 'gnuplot' 12 | require 'nn' 13 | require 'LinearNB' 14 | 15 | 16 | paths.dofile('dataset.lua') 17 | paths.dofile('util.lua') 18 | 19 | 20 | local cmd = torch.CmdLine() 21 | cmd:text() 22 | cmd:text('Apply trained model to extract features from images from ALL layers') 23 | cmd:text() 24 | cmd:text('Options:') 25 | ------------ General options -------------------- 26 | cmd:option('-cache', 'imagenet/checkpoint', 'subdirectory containing cached files like mean, std etc') 27 | cmd:option('-cropSize',224,'Crop size') 28 | cmd:option('-imageSize',256,'Size of images') 29 | cmd:option('-modelPath','','File to load the trained model') 30 | cmd:option('-type_of_model','', 'Type of model considered (class or sem or softsem') 31 | cmd:option('-imageList','/home/fh295/filespace2/DATA/stimuli/stimuliPaths.txt','A file containing all the images to extract features for') 32 | cmd:option('-n_imgs',92,'How many images are in the imageList?') 33 | 34 | local opt = cmd:parse(arg or {}) 35 | 36 | cache = opt.cache 37 | cropSize = opt.cropSize 38 | imageSize = opt.imageSize 39 | 40 | -- This file contains the data-loading logic and details. 41 | -- It is run by each data-loader thread. 42 | ------------------------------------------ 43 | 44 | -- a cache file of the training metadata (if doesnt exist, will be created) 45 | local trainCache = paths.concat(cache, 'trainCache.t7') 46 | local testCache = paths.concat(cache, 'testCache.t7') 47 | local meanstdCache = paths.concat(cache, 'meanstdCache.t7') 48 | 49 | local loadSize = {3, imageSize, imageSize} 50 | local sampleSize = {3, cropSize, cropSize} 51 | 52 | 53 | local function loadImage(path) 54 | local input = image.load(path, 3, 'float') 55 | -- find the smaller dimension, and resize it to loadSize (while keeping aspect ratio) 56 | if input:size(3) < input:size(2) then 57 | input = image.scale(input, loadSize[2], loadSize[3] * input:size(2) / input:size(3)) 58 | else 59 | input = image.scale(input, loadSize[2] * input:size(3) / input:size(2), loadSize[3]) 60 | end 61 | return input 62 | end 63 | 64 | -- channel-wise mean and std. Calculate or load them from disk later in the script. 65 | local mean,std 66 | -------------------------------------------------------------------------------- 67 | --[[ 68 | Section 2: Create a test data loader (testLoader), 69 | which can iterate over the test set and returns an image's 70 | --]] 71 | 72 | -- function to load the image 73 | testHook = function(self, path) 74 | collectgarbage() 75 | local input = loadImage(path) 76 | local oH = sampleSize[2] 77 | local oW = sampleSize[3] 78 | local iW = input:size(3) 79 | local iH = input:size(2) 80 | local w1 = math.ceil((iW-oW)/2) 81 | local h1 = math.ceil((iH-oH)/2) 82 | local out = image.crop(input, w1, h1, w1+oW, h1+oH) -- center patch 83 | -- mean/std 84 | for i=1,3 do -- channels 85 | if mean then out[{{i},{},{}}]:add(-mean[i]) end 86 | if std then out[{{i},{},{}}]:div(std[i]) end 87 | end 88 | return out 89 | end 90 | 91 | if paths.filep(meanstdCache) then 92 | local meanstd = torch.load(meanstdCache) 93 | mean = meanstd.mean 94 | std = meanstd.std 95 | print('Loaded mean and std from cache.') 96 | end 97 | 98 | 99 | 100 | print(string.format('Loading file %s',opt.modelPath)) 101 | local model = torch.load(opt.modelPath) 102 | model:evaluate() 103 | print(model) 104 | 105 | ----------------------------- main starts here --------------------- 106 | -- read imageList 107 | local fh,err = io.open(opt.imageList) 108 | local n = opt.n_imgs 109 | ii =1 110 | -- line by line 111 | 112 | all_images = torch.DoubleTensor(n, 3, cropSize, cropSize) 113 | while true do 114 | imagePath = fh:read() 115 | if imagePath == nil then break end 116 | print(string.format('Extracting features for %s',imagePath)) 117 | img = testHook({loadSize}, imagePath) 118 | if img:dim() == 3 then 119 | img = img:view(1, img:size(1), img:size(2), img:size(3)) 120 | end 121 | all_images[ii] = img 122 | ii = ii + 1 123 | end 124 | fh:close() 125 | 126 | local saveFile = hdf5.open('../DATA/PREDICTIONS/'..opt.type_of_model.. '.h5','w') 127 | 128 | ---- save pixels 129 | local pixels = all_images:view(n,all_images[1]:nElement()) 130 | saveFile:write('pixels',pixels) 131 | 132 | all_images = all_images:cuda() 133 | ---------------------------------------------------------------------- 134 | -- extract/remove/save etc etc 135 | local predictions = model:forward(all_images:cuda()):float() 136 | saveFile:write('topLayer',predictions) 137 | model:get(2):remove() 138 | model:get(2):remove() 139 | print(model) 140 | predictions = model:forward(all_images:cuda()):float() 141 | saveFile:write('bottomLayer',predictions) 142 | model:get(2):remove() 143 | model:get(2):remove() 144 | model:get(2):remove() 145 | print(model) 146 | predictions = model:forward(all_images:cuda()):float() 147 | saveFile:write('evenbottomLayer',predictions) 148 | 149 | 150 | --------------------------------------- 151 | 152 | --[[ 153 | model:remove(2) 154 | predictions = model:forward(all_images:cuda()):float() 155 | predictions= predictions:view(n, predictions:size(2) * predictions:size(3) * predictions:size(4)) 156 | p_norm = predictions:norm(2,2) 157 | predictions:cdiv(p_norm:expandAs(predictions)) 158 | sims = predictions * predictions:transpose(1,2) 159 | gnuplot.imagesc(sims,'color') 160 | --]] 161 | 162 | saveFile:close() 163 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ##Training an Object Classifier in Torch-7 on multiple GPUs over [ImageNet](http://image-net.org/download-images) 2 | 3 | In this concise example (1200 lines including a general-purpose and highly scalable data loader for images), we showcase: 4 | - train [AlexNet](http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks) or [Overfeat](http://arxiv.org/abs/1312.6229), VGG and Googlenet on ImageNet 5 | - showcase multiple backends: CuDNN, CuNN 6 | - use nn.DataParallelTable to speedup training over multiple GPUs 7 | - multithreaded data-loading from disk (showcases sending tensors from one thread to another without serialization) 8 | 9 | ### Requirements 10 | - [Install torch on a machine with CUDA GPU](http://torch.ch/docs/getting-started.html#_) 11 | - If on Mac OSX, run `brew install coreutils findutils` to get GNU versions of `wc`, `find`, and `cut` 12 | - Download Imagenet-12 dataset from http://image-net.org/download-images . It has 1000 classes and 1.2 million images. 13 | 14 | ### Data processing 15 | **The images dont need to be preprocessed or packaged in any database.** It is preferred to keep the dataset on an [SSD](http://en.wikipedia.org/wiki/Solid-state_drive) but we have used the data loader comfortably over NFS without loss in speed. 16 | We just use a simple convention: SubFolderName == ClassName. 17 | So, for example: if you have classes {cat,dog}, cat images go into the folder dataset/cat and dog images go into dataset/dog 18 | 19 | The training images for imagenet are already in appropriate subfolders (like n07579787, n07880968). 20 | You need to get the validation groundtruth and move the validation images into appropriate subfolders. 21 | To do this, download ILSVRC2012_img_train.tar ILSVRC2012_img_val.tar and use the following commands: 22 | ```bash 23 | # extract train data 24 | mkdir train && mv ILSVRC2012_img_train.tar train/ && cd train 25 | tar -xvf ILSVRC2012_img_train.tar && rm -f ILSVRC2012_img_train.tar 26 | find . -name "*.tar" | while read NAME ; do mkdir -p "${NAME%.tar}"; tar -xvf "${NAME}" -C "${NAME%.tar}"; rm -f "${NAME}"; done 27 | # extract validation data 28 | cd ../ && mkdir val && mv ILSVRC2012_img_val.tar val/ && cd val && tar -xvf ILSVRC2012_img_val.tar 29 | wget -qO- https://raw.githubusercontent.com/soumith/imagenetloader.torch/master/valprep.sh | bash 30 | ``` 31 | 32 | Now you are all set! 33 | 34 | If your imagenet dataset is on HDD or a slow SSD, run this command to resize all the images such that the smaller dimension is 256 and the aspect ratio is intact. 35 | This helps with loading the data from disk faster. 36 | ```bash 37 | find . -name "*.JPEG" | xargs -I {} convert {} -resize "256^>" {} 38 | ``` 39 | 40 | ### Running 41 | The training scripts come with several options which can be listed by running the script with the flag --help 42 | ```bash 43 | th main.lua --help 44 | ``` 45 | 46 | To run the training, simply run main.lua 47 | By default, the script runs 1-GPU AlexNet with the CuDNN backend and 2 data-loader threads. 48 | ```bash 49 | th main.lua -data [imagenet-folder with train and val folders] 50 | ``` 51 | 52 | For 2-GPU model parallel AlexNet + CuDNN, you can run it this way: 53 | ```bash 54 | th main.lua -data [imagenet-folder with train and val folders] -nGPU 2 -backend cudnn -netType alexnet 55 | ``` 56 | Similarly, you can switch the backends to 'cunn' to use a different set of CUDA kernels. 57 | 58 | You can also alternatively train OverFeat using this following command: 59 | ```bash 60 | th main.lua -data [imagenet-folder with train and val folders] -netType overfeat 61 | 62 | # multi-GPU overfeat (let's say 2-GPU) 63 | th main.lua -data [imagenet-folder with train and val folders] -netType overfeat -nGPU 2 64 | ``` 65 | 66 | The training script prints the current Top-1 and Top-5 error as well as the objective loss at every mini-batch. 67 | We hard-coded a learning rate schedule so that AlexNet converges to an error of 42.5% at the end of 53 epochs. 68 | 69 | At the end of every epoch, the model is saved to disk (as model_[xx].t7 where xx is the epoch number). 70 | You can reload this model into torch at any time using torch.load 71 | ```lua 72 | model = torch.load('model_10.t7') -- loading back a saved model 73 | ``` 74 | 75 | Similarly, if you would like to test your model on a new image, you can use testHook from line 103 in donkey.lua to load your image, and send it through the model for predictions. For example: 76 | ```lua 77 | dofile('donkey.lua') 78 | img = testHook({loadSize}, 'test.jpg') 79 | model = torch.load('model_10.t7') 80 | if img:dim() == 3 then 81 | img = img:view(1, img:size(1), img:size(2), img:size(3)) 82 | end 83 | predictions = model:forward(img:cuda()) 84 | ``` 85 | 86 | If you ever want to reuse this example, and debug your scripts, it is suggested to debug and develop in the single-threaded mode, so that stack traces are printed fully. 87 | ```lua 88 | th main.lua -nDonkeys 0 [...options...] 89 | ``` 90 | 91 | ### Code Description 92 | - `main.lua` (~30 lines) - loads all other files, starts training. 93 | - `opts.lua` (~50 lines) - all the command-line options and description 94 | - `data.lua` (~60 lines) - contains the logic to create K threads for parallel data-loading. 95 | - `donkey.lua` (~200 lines) - contains the data-loading logic and details. It is run by each data-loader thread. random image cropping, generating 10-crops etc. are in here. 96 | - `model.lua` (~80 lines) - creates AlexNet model and criterion 97 | - `train.lua` (~190 lines) - logic for training the network. we hard-code a learning rate + weight decay schedule that produces good results. 98 | - `test.lua` (~120 lines) - logic for testing the network on validation set (including calculating top-1 and top-5 errors) 99 | - `dataset.lua` (~430 lines) - a general purpose data loader, mostly derived from [here: imagenetloader.torch](https://github.com/soumith/imagenetloader.torch). That repo has docs and more examples of using this loader. 100 | -------------------------------------------------------------------------------- /word2vec/rows.json: -------------------------------------------------------------------------------- 1 | ["tigris", "peacock", "coach", "chain", "gown", "maypole", "viaduct", "hopper", "woods", "aegis", "snowmobile", "dish", "chair", "dumbbell", "alp", "mitten", "muzzle", "weimaraner", "mosque", "mower", "bike", "mug", "schooner", "viridis", "ladle", "crate", "jacamar", "screwdriver", "vase", "spoon", "fan", "jack", "llama", "boot", "ox", "nursery", "courgette", "abaya", "rack", "waggon", "merganser", "bicycle", "fence", "bullfrog", "sundial", "spoonbill", "horn", "nail", "guillotine", "crutch", "hamper", "disk", "sub", "pyrenees", "espresso", "revolver", "paddle", "uniform", "unicycle", "tarantula", "carbonara", "admiral", "tusker", "barometer", "urchin", "kangaroo", "bookcase", "baseball", "totanus", "box", "mantis", "bow", "concolor", "galerita", "hamster", "lorikeet", "boa", "bob", "head", "maker", "patagonica", "wig", "puffer", "shovel", "guacamole", "agaric", "weevil", "terrapin", "moped", "strainer", "bra", "projector", "alligator", "canoe", "poodle", "frog", "odometer", "camera", "crab", "vehicle", "postbag", "limousine", "lorry", "chestnut", "throne", "door", "oboe", "obelisk", "altar", "magister", "envelope", "missile", "glass", "train", "armadillo", "stick", "rabbit", "fly", "car", "pedestal", "cap", "worm", "roof", "cat", "wallet", "can", "drill", "mortar", "vulpes", "hatchet", "control", "shark", "nautilus", "reflector", "lock", "anemone", "parachute", "anteater", "violin", "stole", "egret", "pinscher", "speedboat", "comfort", "magpie", "machine", "lamp", "elephant", "wolf", "furnace", "coucal", "mat", "beauty", "ptarmigan", "swing", "spot", "fungus", "plane", "niloticus", "cabbage", "arch", "lar", "jellyfish", "beam", "coat", "platypus", "liner", "volleyball", "dragon", "switch", "truck", "shade", "ambulance", "necklace", "turnstile", "wing", "wine", "rottweiler", "shield", "heater", "drumstick", "essence", "meter", "spatula", "dough", "artichoke", "oven", "keyboard", "pointer", "iguana", "trifle", "papillon", "monitor", "accordion", "broom", "chickadee", "fig", "pussy", "orange", "mail", "carton", "apron", "dam", "safe", "salamander", "snorkel", "snake", "abacus", "bank", "slipper", "scrofa", "dingo", "drop", "ridgeback", "drake", "slide", "pygmaeus", "volcano", "tray", "glasses", "turtle", "trunks", "grouse", "house", "fish", "bookshop", "spider", "sled", "goose", "zebra", "beetle", "sweatshirt", "poncho", "harp", "silky", "toyshop", "velvet", "badger", "opener", "fountain", "migratorius", "washer", "langur", "eagle", "umbrella", "monster", "sarong", "yurt", "ipod", "pug", "cart", "flatworm", "loafer", "acorn", "dishcloth", "retriever", "dowitcher", "albatross", "montifringilla", "padlock", "locomotive", "filter", "chicken", "place", "daisy", "castle", "clothes", "buckle", "jubatus", "pelican", "dwelling", "gazelle", "puck", "bonnet", "hook", "niger", "trimaran", "whale", "horse", "basketball", "monkey", "terrier", "system", "wrapper", "stork", "hedgehog", "putorius", "flagpole", "syrup", "banana", "store", "bulldog", "xylophone", "rickshaw", "rifle", "sign", "hen", "stocking", "cuirass", "schnauzer", "minibus", "catamaran", "trike", "scabbard", "bee", "panda", "iron", "light", "dispenser", "dane", "tractor", "cleaner", "mink", "macaw", "handkerchief", "ram", "greyhound", "banjo", "ant", "sax", "saw", "siamang", "ray", "screw", "microphone", "cardigan", "loupe", "bassoon", "capuchin", "eft", "bakery", "couch", "bagel", "carduelis", "chest", "ibex", "hourglass", "brace", "sawmill", "honeycomb", "butterfly", "consomme", "printer", "troglodytes", "theatre", "track", "monastery", "eater", "minivan", "ouzel", "basket", "yawl", "sunglass", "cabinet", "jean", "refrigerator", "bearskin", "modem", "clock", "cover", "dog", "bustard", "pipe", "tinca", "pineapple", "barrel", "bolete", "shop", "caerulea", "cobra", "kite", "stove", "scoreboard", "barbell", "radio", "shoe", "rouge", "spindle", "setter", "vulture", "geyser", "jay", "longlegs", "menu", "scooter", "scorpion", "cheeseburger", "cloak", "bernard", "cassette", "blocker", "piano", "wool", "hip", "plate", "bittern", "cannon", "watch", "bear", "coast", "whistle", "ferret", "boathouse", "roundabout", "compass", "bridge", "net", "gray", "catcher", "bar", "organ", "maculatum", "pretzel", "lolly", "bag", "bison", "corgi", "widow", "bars", "packet", "seat", "lion", "computer", "powder", "racket", "hammer", "diver", "barn", "atratus", "closet", "strawberry", "sombrero", "whippet", "pump", "viper", "wok", "baboon", "beasts", "sharpener", "tent", "cauliflower", "trilobite", "screen", "terrace", "dome", "vestment", "lotion", "guitar", "spray", "chihuahua", "convertible", "newt", "eggnog", "bloodhound", "cot", "pepper", "conch", "lagopus", "bandit", "vault", "microcomputer", "sloth", "loaf", "stretcher", "sheepdog", "mini", "wall", "pot", "forklift", "bikini", "isopod", "bassinet", "weasel", "boar", "table", "tub", "belt", "palace", "maraca", "gondola", "airliner", "eraser", "caretta", "curtain", "basin", "crinoline", "mushroom", "kelpie", "squash", "grille", "spaniel", "bighorn", "turnstone", "bull", "drum", "teapot", "case", "tank", "hat", "alpinus", "lifeboat", "lizard", "aid", "cock", "groin", "pack", "vest", "helper", "handrail", "lakeside", "purse", "site", "player", "tissue", "brake", "beaver", "warthog", "mouse", "wreck", "leafhopper", "helmet", "shirt", "doctor", "balloon", "toaster", "bowl", "gallinule", "kimono", "pad", "shuttle", "oscilloscope", "snoek", "pan", "foxhound", "wheel", "ball", "snail", "commode", "pomegranate", "pick", "leopard", "kit", "cucumber", "pardus", "lemon", "tope", "gong", "broccoli", "hummingbird", "bubalis", "burrito", "center", "pillow", "dryer", "bottle", "joystick", "crayfish", "taxi", "macaque", "clog", "camel", "coffeepot", "deerhound", "mongoose", "constrictor", "apple", "web", "sandal", "goblet", "tablet", "cup", "bell", "smith", "bench", "beagle", "hay", "thimble", "ski", "radiator", "wagon", "mop", "bib", "photocopier", "boxer", "nappy", "knot", "desk", "press", "partridge", "roach", "tick", "pier", "hairless", "flamingo", "towel", "signal", "plough", "pole", "marmoset", "eel", "griffon", "nipple", "paintbrush", "tower", "hornbill", "triceratops", "library", "cicada", "lobster", "mirror", "trombone", "home", "samoyed", "fox", "rattlesnake", "mousetrap", "pen", "cradle", "market", "stingray", "t", "shaker", "flute", "pyjama", "corn", "binder", "lionfish", "agama", "slug", "stage", "beaker", "mastiff", "jeep", "coral", "sturgeon", "carrier", "auratus", "chow", "maritimus", "damselfly", "meerkat", "owl", "facility", "maillot", "puzzle", "matchstick", "cauldron", "hartebeest", "holster", "syringe", "mamba", "mortarboard", "rig", "van", "pictus", "birdhouse", "sock", "spiral", "airship", "suit", "maze", "pinwheel", "valley", "bubble", "hare", "vat", "cricket", "sorrel", "jug", "bus", "pitcher", "cardoon", "tripod", "torch", "toucan", "reef", "crane", "ear", "reel", "otter", "harvester", "stethoscope", "potato", "bucket", "cyanea", "centipede", "barbershop", "cello", "lynx", "fireboat", "snowplough", "rotisserie", "pin", "wolfhound", "mask", "marmot", "bulbul", "pie", "pig", "planetarium", "ship", "hound", "gecko", "sandpiper", "pekinese", "cream", "quail", "hyena", "collie", "melampus", "nigra", "book", "shoji", "finch", "tie", "puppy", "wombat", "star", "gar", "macrotis", "rapeseed", "cleaver", "chameleon", "structure", "building", "phone", "groom", "dugong", "camelus", "rule", "gorilla", "husky", "onca"] -------------------------------------------------------------------------------- /donkey.lua: -------------------------------------------------------------------------------- 1 | -- 2 | -- Copyright (c) 2014, Facebook, Inc. 3 | -- All rights reserved. 4 | -- 5 | -- This source code is licensed under the BSD-style license found in the 6 | -- LICENSE file in the root directory of this source tree. An additional grant 7 | -- of patent rights can be found in the PATENTS file in the same directory. 8 | -- 9 | require 'image' 10 | paths.dofile('dataset.lua') 11 | paths.dofile('util.lua') 12 | 13 | -- This file contains the data-loading logic and details. 14 | -- It is run by each data-loader thread. 15 | ------------------------------------------ 16 | 17 | 18 | -- a cache file of the training metadata (if doesnt exist, will be created) 19 | local trainCache = paths.concat(opt.cache, 'trainCache.t7') 20 | local testCache = paths.concat(opt.cache, 'testCache.t7') 21 | local meanstdCache = paths.concat(opt.cache, 'meanstdCache.t7') 22 | -- Check for existence of opt.data 23 | if not os.execute('cd ' .. opt.data) then 24 | error(("could not chdir to '%s'"):format(opt.data)) 25 | end 26 | 27 | local loadSize = {3, opt.imageSize, opt.imageSize} 28 | local sampleSize = {3, opt.cropSize, opt.cropSize} 29 | 30 | 31 | local function loadImage(path) 32 | local input = image.load(path, 3, 'float') 33 | -- find the smaller dimension, and resize it to loadSize (while keeping aspect ratio) 34 | if input:size(3) < input:size(2) then 35 | input = image.scale(input, loadSize[2], loadSize[3] * input:size(2) / input:size(3)) 36 | else 37 | input = image.scale(input, loadSize[2] * input:size(3) / input:size(2), loadSize[3]) 38 | end 39 | return input 40 | end 41 | 42 | -- channel-wise mean and std. Calculate or load them from disk later in the script. 43 | local mean,std 44 | -------------------------------------------------------------------------------- 45 | --[[ 46 | Section 1: Create a train data loader (trainLoader), 47 | which does class-balanced sampling from the dataset and does a random crop 48 | --]] 49 | 50 | -- function to load the image, jitter it appropriately (random crops etc.) 51 | local trainHook = function(self, path) 52 | collectgarbage() 53 | local input = loadImage(path) 54 | local iW = input:size(3) 55 | local iH = input:size(2) 56 | 57 | -- do random crop 58 | local oW = sampleSize[3] 59 | local oH = sampleSize[2] 60 | local h1 = math.ceil(torch.uniform(1e-2, iH-oH)) 61 | local w1 = math.ceil(torch.uniform(1e-2, iW-oW)) 62 | local out = image.crop(input, w1, h1, w1 + oW, h1 + oH) 63 | assert(out:size(3) == oW) 64 | assert(out:size(2) == oH) 65 | -- do hflip with probability 0.5 66 | if torch.uniform() > 0.5 then out = image.hflip(out) end 67 | -- mean/std 68 | for i=1,3 do -- channels 69 | if mean then out[{{i},{},{}}]:add(-mean[i]) end 70 | if std then out[{{i},{},{}}]:div(std[i]) end 71 | end 72 | return out 73 | end 74 | 75 | if paths.filep(trainCache) then 76 | print('Loading train metadata from cache') 77 | trainLoader = torch.load(trainCache) 78 | trainLoader.sampleHookTrain = trainHook 79 | assert(trainLoader.paths[1] == paths.concat(opt.data, 'train'), 80 | 'cached files dont have the same path as opt.data. Remove your cached files at: ' 81 | .. trainCache .. ' and rerun the program') 82 | else 83 | print('Creating train metadata') 84 | trainLoader = dataLoader{ 85 | paths = {paths.concat(opt.data, 'train')}, --train 86 | loadSize = loadSize, 87 | sampleSize = sampleSize, 88 | split = 100, 89 | verbose = true, 90 | wvectors = opt.wvectors, 91 | neg_samples = opt.neg_samples 92 | } 93 | torch.save(trainCache, trainLoader) 94 | trainLoader.sampleHookTrain = trainHook 95 | end 96 | collectgarbage() 97 | 98 | -- do some sanity checks on trainLoader 99 | do 100 | local class = trainLoader.imageClass 101 | local nClasses = #trainLoader.classes 102 | assert(class:max() <= nClasses, "class logic has error") 103 | assert(class:min() >= 1, "class logic has error") 104 | 105 | end 106 | 107 | -- End of train loader section 108 | -------------------------------------------------------------------------------- 109 | --[[ 110 | Section 2: Create a test data loader (testLoader), 111 | which can iterate over the test set and returns an image's 112 | --]] 113 | 114 | -- function to load the image 115 | testHook = function(self, path) 116 | class_map = torch.load(opt.class_map) 117 | collectgarbage() 118 | local input = loadImage(path) 119 | local oH = sampleSize[2] 120 | local oW = sampleSize[3] 121 | local iW = input:size(3) 122 | local iH = input:size(2) 123 | local w1 = math.ceil((iW-oW)/2) 124 | local h1 = math.ceil((iH-oH)/2) 125 | local out = image.crop(input, w1, h1, w1+oW, h1+oH) -- center patch 126 | -- mean/std 127 | for i=1,3 do -- channels 128 | if mean then out[{{i},{},{}}]:add(-mean[i]) end 129 | if std then out[{{i},{},{}}]:div(std[i]) end 130 | end 131 | return out 132 | end 133 | 134 | if paths.filep(testCache) then 135 | print('Loading test metadata from cache') 136 | testLoader = torch.load(testCache) 137 | testLoader.sampleHookTest = testHook 138 | assert(testLoader.paths[1] == paths.concat(opt.data, 'val'), 139 | 'cached files dont have the same path as opt.data. Remove your cached files at: ' 140 | .. testCache .. ' and rerun the program') 141 | else 142 | print('Creating test metadata') 143 | testLoader = dataLoader{ 144 | paths = {paths.concat(opt.data, 'val')}, 145 | loadSize = loadSize, 146 | sampleSize = sampleSize, 147 | split = 0, 148 | verbose = true, 149 | forceClasses = classes_map, -- force consistent class indices between trainLoader and testLoader 150 | wvectors = opt.wvectors, 151 | neg_samples = 0, 152 | } 153 | torch.save(testCache, testLoader) 154 | testLoader.sampleHookTest = testHook 155 | end 156 | collectgarbage() 157 | -- End of test loader section 158 | print('Blabla') 159 | -- Estimate the per-channel mean/std (so that the loaders can normalize appropriately) 160 | if paths.filep(meanstdCache) then 161 | local meanstd = torch.load(meanstdCache) 162 | mean = meanstd.mean 163 | std = meanstd.std 164 | print('Loaded mean and std from cache.') 165 | else 166 | local tm = torch.Timer() 167 | local nSamples = 10000 168 | print('Estimating the mean (per-channel, shared for all pixels) over ' .. nSamples .. ' randomly sampled training images') 169 | local meanEstimate = {0,0,0} 170 | for i=1,nSamples do 171 | local img = trainLoader:sample(1)[1] 172 | for j=1,3 do 173 | meanEstimate[j] = meanEstimate[j] + img[j]:mean() 174 | end 175 | end 176 | for j=1,3 do 177 | meanEstimate[j] = meanEstimate[j] / nSamples 178 | end 179 | mean = meanEstimate 180 | 181 | print('Estimating the std (per-channel, shared for all pixels) over ' .. nSamples .. ' randomly sampled training images') 182 | local stdEstimate = {0,0,0} 183 | for i=1,nSamples do 184 | local img = trainLoader:sample(1)[1] 185 | for j=1,3 do 186 | stdEstimate[j] = stdEstimate[j] + img[j]:std() 187 | end 188 | end 189 | for j=1,3 do 190 | stdEstimate[j] = stdEstimate[j] / nSamples 191 | end 192 | std = stdEstimate 193 | 194 | local cache = {} 195 | cache.mean = mean 196 | cache.std = std 197 | torch.save(meanstdCache, cache) 198 | print('Time to estimate:', tm:time().real) 199 | end 200 | -------------------------------------------------------------------------------- /train.lua: -------------------------------------------------------------------------------- 1 | 2 | -- 3 | -- Copyright (c) 2014, Facebook, Inc. 4 | -- All rights reserved. 5 | -- 6 | -- This source code is licensed under the BSD-style license found in the 7 | -- LICENSE file in the root directory of this source tree. An additional grant 8 | -- of patent rights can be found in the PATENTS file in the same directory. 9 | -- 10 | require 'optim' 11 | require 'dataset' 12 | 13 | 14 | 15 | local w2v 16 | --[[ 17 | NOTE!!!! 18 | FIND A WAY TO NOT HAVE TO CREATE A DUMMY DATALOADER JUST TO 19 | BE ABLE TO GRAB THE W2V 20 | --]] 21 | 22 | if opt.crit == 'sem' or opt.crit == 'mse' or opt.crit == 'softsem' then 23 | dummy = dataLoader{ 24 | paths = {paths.concat(opt.data, 'train')}, --train 25 | loadSize = {3, opt.imageSize, opt.imageSize}, --doesn't really matter 26 | sampleSize = {3, opt.cropSize, opt.cropSize}, -- doesn't really matter 27 | split = 100, 28 | verbose = true, 29 | wvectors = opt.wvectors, 30 | neg_samples = opt.neg_samples, 31 | } 32 | w2v = dummy:get_w2v() 33 | end 34 | 35 | print(#w2v.w2v.w2vvocab) 36 | local counter=0 37 | for i,v in pairs(w2v.w2v.w2vvocab) do 38 | print(i) print(v) 39 | counter = counter + 1 40 | end 41 | print('next number is the count') 42 | print(counter) 43 | 44 | 45 | if opt.crit == 'softsem' then 46 | local class_labels = torch.load(paths.concat(opt.save, 'classes.t7')) 47 | semantic_array = torch.Tensor(nClasses, opt.wvectors_dim) 48 | 49 | --fill up semantic array 50 | for i,c in ipairs(class_labels) do 51 | print(c) 52 | local vector = w2v:getWordVector(c) 53 | semantic_array[i] = vector:clone() 54 | end 55 | nFixedParams = semantic_array:nElement() 56 | semantic_array = semantic_array:view(nFixedParams) 57 | semantic_array = semantic_array:cuda() 58 | end 59 | 60 | 61 | 62 | --[[ 63 | 1. Setup SGD optimization state and learning rate schedule 64 | 2. Create loggers. 65 | 3. train - this function handles the high-level training loop, 66 | i.e. load data, train model, save model and state to disk 67 | 4. trainBatch - Used by train() to train a single batch after the data is loaded. 68 | ]]-- 69 | 70 | -- Setup a reused optimization state (for sgd). If needed, reload it from disk 71 | local optimState = { 72 | learningRate = opt.LR, 73 | learningRateDecay = 0.0, 74 | momentum = opt.momentum, 75 | dampening = 0.0, 76 | weightDecay = opt.weightDecay 77 | } 78 | 79 | if opt.optimState ~= 'none' then 80 | assert(paths.filep(opt.optimState), 'File not found: ' .. opt.optimState) 81 | print('Loading optimState from file: ' .. opt.optimState) 82 | optimState = torch.load(opt.optimState) 83 | end 84 | 85 | -- Learning rate annealing schedule. We will build a new optimizer for 86 | -- each epoch. 87 | -- 88 | -- By default we follow a known recipe for a 55-epoch training. If 89 | -- the learningRate command-line parameter has been specified, though, 90 | -- we trust the user is doing something manual, and will use her 91 | -- exact settings for all optimization. 92 | -- 93 | -- Return values: 94 | -- diff to apply to optimState, 95 | -- true IFF this is the first epoch of a new regime 96 | local function paramsForEpoch(epoch) 97 | if opt.LR ~= 0.0 then -- if manually specified 98 | return { } 99 | end 100 | local regimes = { 101 | -- start, end, LR, WD, 102 | { 1, 18, 1e-2, 5e-4, }, 103 | { 19, 29, 5e-3, 5e-4 }, 104 | { 30, 43, 1e-3, 0 }, 105 | { 44, 52, 5e-4, 0 }, 106 | { 53, 1e8, 1e-4, 0 }, 107 | } 108 | 109 | for _, row in ipairs(regimes) do 110 | if epoch >= row[1] and epoch <= row[2] then 111 | return { learningRate=row[3], weightDecay=row[4] }, epoch == row[1] 112 | end 113 | end 114 | end 115 | 116 | -- 2. Create loggers. 117 | trainLogger = optim.Logger(paths.concat(opt.save, 'train.log')) 118 | local batchNumber 119 | local top1_epoch, loss_epoch 120 | 121 | -- 3. train - this function handles the high-level training loop, 122 | -- i.e. load data, train model, save model and state to disk 123 | function train() 124 | print('==> doing epoch on training data:') 125 | print("==> online epoch # " .. epoch) 126 | local params, newRegime = paramsForEpoch(epoch) 127 | if newRegime then 128 | optimState = { 129 | learningRate = params.learningRate, 130 | learningRateDecay = 0.0, 131 | momentum = opt.momentum, 132 | dampening = 0.0, 133 | weightDecay = params.weightDecay 134 | } 135 | end 136 | batchNumber = 0 137 | cutorch.synchronize() 138 | 139 | -- set the dropouts to training mode 140 | model:training() 141 | 142 | local tm = torch.Timer() 143 | top1_epoch = 0 144 | loss_epoch = 0 145 | for i=1,opt.epochSize do 146 | -- queue jobs to data-workers 147 | donkeys:addjob( 148 | -- the job callback (runs in data-worker thread) 149 | function() 150 | local inputs, labels, vectors 151 | vectors = torch.rand(1) 152 | if opt.crit == 'class' or opt.crit == 'softsem' then 153 | inputs, labels = trainLoader:sample(opt.batchSize) 154 | else 155 | inputs, vectors, labels = trainLoader:semanticsample(opt.batchSize, opt.neg_samples) 156 | end 157 | return inputs, vectors, labels 158 | end, 159 | -- the end callback (runs in the main thread) 160 | trainBatch 161 | ) 162 | end 163 | 164 | donkeys:synchronize() 165 | cutorch.synchronize() 166 | 167 | top1_epoch = top1_epoch * 100 / (opt.batchSize * opt.epochSize) 168 | loss_epoch = loss_epoch / opt.epochSize 169 | 170 | trainLogger:add{ 171 | ['% top1 accuracy (train set)'] = top1_epoch, 172 | ['avg loss (train set)'] = loss_epoch 173 | } 174 | print(string.format('Epoch: [%d][TRAINING SUMMARY] Total Time(s): %.2f\t' 175 | .. 'average loss (per batch): %.2f \t ' 176 | .. 'accuracy(%%):\t top-1 %.2f\t', 177 | epoch, tm:time().real, loss_epoch, top1_epoch)) 178 | print('\n') 179 | 180 | -- save model 181 | collectgarbage() 182 | 183 | -- clear the intermediate states in the model before saving to disk 184 | -- this saves lots of disk space 185 | model:clearState() 186 | saveDataParallel(paths.concat(opt.save, 'model_' .. epoch .. '.t7'), model) -- defined in util.lua 187 | torch.save(paths.concat(opt.save, 'optimState_' .. epoch .. '.t7'), optimState) 188 | end -- of train() 189 | ------------------------------------------------------------------------------------------- 190 | -- GPU inputs (preallocate) 191 | local inputs = torch.CudaTensor() 192 | local vectors = torch.CudaTensor() 193 | local labels = torch.CudaTensor() 194 | 195 | local timer = torch.Timer() 196 | local dataTimer = torch.Timer() 197 | 198 | local parameters, gradParameters = model:getParameters() 199 | if opt.crit == 'softsem' then 200 | nParameters = parameters:size()[1] 201 | parameters[{{nParameters-nFixedParams+1,nParameters}}] = semantic_array:clone() 202 | end 203 | 204 | 205 | --semantic initialisation of 206 | 207 | -- 4. trainBatch - Used by train() to train a single batch after the data is loaded. 208 | function trainBatch(inputsCPU, vectorsCPU, labelsCPU) 209 | cutorch.synchronize() 210 | collectgarbage() 211 | local dataLoadingTime = dataTimer:time().real 212 | timer:reset() 213 | 214 | -- transfer over to GPU 215 | inputs:resize(inputsCPU:size()):copy(inputsCPU) 216 | if opt.crit == 'sem' then 217 | vectors:resize(vectorsCPU:size()):copy(vectorsCPU) 218 | labels:resize(labelsCPU[1]:size()):copy(labelsCPU[1]) 219 | elseif opt.crit == 'class' or opt.crit == 'softsem' then 220 | labels:resize(labelsCPU:size()):copy(labelsCPU) 221 | else 222 | labels:resize(vectorsCPU:size()):copy(vectorsCPU) 223 | end 224 | 225 | local err, outputs 226 | feval = function(x) 227 | model:zeroGradParameters() 228 | -- format input data to be {images} 229 | local output = model:forward(inputs) 230 | 231 | --format input to criterion to be either {prediction} or {predictions, w_vectors} 232 | if opt.crit == 'class' or opt.crit == 'mse' or opt.crit == 'softsem' then 233 | outputs = output 234 | else 235 | outputs = {output, vectors} 236 | end 237 | err = criterion:forward(outputs, labels) 238 | 239 | local grads = criterion:backward(outputs, labels) 240 | 241 | local gradOutputs 242 | if opt.crit == 'class' or opt.crit == 'mse' or opt.crit == 'softsem' then 243 | gradOutputs = grads 244 | else 245 | gradOutputs = grads[1] -- cause we throw away the grads for the word embeddings 246 | end 247 | 248 | model:backward(inputs, gradOutputs) 249 | 250 | return err, gradParameters 251 | 252 | end 253 | 254 | optim.sgd(feval, parameters, optimState) 255 | if opt.crit == 'softsem' then 256 | parameters[{{nParameters-nFixedParams+1,nParameters}}] = semantic_array:clone() 257 | end 258 | 259 | -- DataParallelTable's syncParameters 260 | if model.needsSync then 261 | model:syncParameters() 262 | end 263 | 264 | 265 | cutorch.synchronize() 266 | batchNumber = batchNumber + 1 267 | loss_epoch = loss_epoch + err 268 | -- top-1 error 269 | local top1 = 0 270 | local median = 0 271 | local sim = 0 272 | if opt.crit == 'class' or opt.crit == 'softsem' then 273 | do 274 | local _,prediction_sorted = outputs:float():sort(2, true) -- descending 275 | for i=1,opt.batchSize do 276 | if prediction_sorted[i][1] == labelsCPU[i] then 277 | top1_epoch = top1_epoch + 1; 278 | top1 = top1 + 1 279 | end 280 | end 281 | 282 | end 283 | top1 = top1 * 100 / opt.batchSize; 284 | elseif opt.crit == 'sem' then 285 | top1, median, sim = w2v:eval_ranking(outputs[1]:float(), labelsCPU[1], labelsCPU[2],1, opt.neg_samples) 286 | else 287 | top1, median, sim = w2v:eval_ranking(outputs:float(), labelsCPU[1], labelsCPU[2],1, opt.neg_samples) 288 | end 289 | -- Calculate top-1 error, and print information 290 | print(('Epoch: [%d][%d/%d]\tTime %.3f Err %.4f Top1 %.4f (Sim %.4f Med %.4f) LR %.0e DataLoadingTime %.3f'):format( 291 | epoch, batchNumber, opt.epochSize, timer:time().real, err, top1, median, sim, 292 | optimState.learningRate, dataLoadingTime)) 293 | 294 | dataTimer:reset() 295 | end 296 | -------------------------------------------------------------------------------- /dataset.lua: -------------------------------------------------------------------------------- 1 | require 'torch' 2 | torch.setdefaulttensortype('torch.FloatTensor') 3 | local ffi = require 'ffi' 4 | local class = require('pl.class') 5 | local dir = require 'pl.dir' 6 | local tablex = require 'pl.tablex' 7 | local argcheck = require 'argcheck' 8 | require 'word2vec' 9 | require 'sys' 10 | require 'xlua' 11 | require 'image' 12 | 13 | local dataset = torch.class('dataLoader') 14 | 15 | local initcheck = argcheck{ 16 | pack=true, 17 | help=[[ 18 | A dataset class for images in a flat folder structure (folder-name is class-name). 19 | Optimized for extremely large datasets (upwards of 14 million images). 20 | Tested only on Linux (as it uses command-line linux utilities to scale up) 21 | ]], 22 | {check=function(paths) 23 | local out = true; 24 | for k,v in ipairs(paths) do 25 | if type(v) ~= 'string' then 26 | print('paths can only be of string input'); 27 | out = false 28 | end 29 | end 30 | return out 31 | end, 32 | name="paths", 33 | type="table", 34 | help="Multiple paths of directories with images"}, 35 | 36 | {name="sampleSize", 37 | type="table", 38 | help="a consistent sample size to resize the images"}, 39 | 40 | {name="split", 41 | type="number", 42 | help="Percentage of split to go to Training" 43 | }, 44 | 45 | 46 | {name="samplingMode", 47 | type="string", 48 | help="Sampling mode: random | balanced ", 49 | default = "balanced"}, 50 | 51 | {name="verbose", 52 | type="boolean", 53 | help="Verbose mode during initialization", 54 | default = false}, 55 | 56 | {name="neg_samples", 57 | type="number", 58 | help="number of negative samples" 59 | }, 60 | 61 | {name="loadSize", 62 | type="table", 63 | help="a size to load the images to, initially", 64 | opt = true}, 65 | 66 | {name = "wvectors", 67 | type = "string"}, 68 | 69 | {name="forceClasses", 70 | type="table", 71 | help="If you want this loader to map certain classes to certain indices, " 72 | .. "pass a classes table that has {classname : classindex} pairs." 73 | .. " For example: {3 : 'dog', 5 : 'cat'}" 74 | .. "This function is very useful when you want two loaders to have the same " 75 | .. "class indices (trainLoader/testLoader for example)", 76 | opt = true}, 77 | 78 | {name="sampleHookTrain", 79 | type="function", 80 | help="applied to sample during training(ex: for lighting jitter). " 81 | .. "It takes the image path as input", 82 | opt = true}, 83 | 84 | {name="sampleHookTest", 85 | type="function", 86 | help="applied to sample during testing", 87 | opt = true}, 88 | } 89 | 90 | function dataset:__init(...) 91 | 92 | -- argcheck 93 | local args = initcheck(...) 94 | print(args) 95 | for k,v in pairs(args) do self[k] = v print(k) end 96 | 97 | if not self.loadSize then self.loadSize = self.sampleSize; end 98 | 99 | if not self.sampleHookTrain then self.sampleHookTrain = self.defaultSampleHook end 100 | if not self.sampleHookTest then self.sampleHookTest = self.defaultSampleHook end 101 | 102 | -- find class names 103 | self.classes = {} 104 | local classPaths = {} 105 | if self.forceClasses then 106 | for k,v in pairs(self.forceClasses) do 107 | self.classes[k] = v 108 | classPaths[k] = {} 109 | end 110 | end 111 | local function tableFind(t, o) for k,v in pairs(t) do if v == o then return k end end end 112 | -- loop over each paths folder, get list of unique class names, 113 | -- also store the directory paths per class 114 | -- for each class, 115 | for k,path in ipairs(self.paths) do 116 | local dirs = dir.getdirectories(path); 117 | for k,dirpath in ipairs(dirs) do 118 | local class = paths.basename(dirpath) 119 | local idx = tableFind(self.classes, class) 120 | if not idx then 121 | table.insert(self.classes, class) 122 | idx = #self.classes 123 | classPaths[idx] = {} 124 | end 125 | if not tableFind(classPaths[idx], dirpath) then 126 | table.insert(classPaths[idx], dirpath); 127 | end 128 | end 129 | end 130 | 131 | self.classIndices = {} 132 | for k,v in ipairs(self.classes) do 133 | self.classIndices[v] = k 134 | end 135 | 136 | 137 | -- read in word vectors 138 | self.w2v = Word2vec(self.wvectors, self.classes) 139 | 140 | -- define command-line tools, try your best to maintain OSX compatibility 141 | local wc = 'wc' 142 | local cut = 'cut' 143 | local find = 'find' 144 | if ffi.os == 'OSX' then 145 | wc = 'gwc' 146 | cut = 'gcut' 147 | find = 'gfind' 148 | end 149 | ---------------------------------------------------------------------- 150 | -- Options for the GNU find command 151 | local extensionList = {'jpg', 'png','JPG','PNG','JPEG', 'ppm', 'PPM', 'bmp', 'BMP'} 152 | local findOptions = ' -iname "*.' .. extensionList[1] .. '"' 153 | for i=2,#extensionList do 154 | findOptions = findOptions .. ' -o -iname "*.' .. extensionList[i] .. '"' 155 | end 156 | 157 | -- find the image path names 158 | self.imagePath = torch.CharTensor() -- path to each image in dataset 159 | self.imageClass = torch.LongTensor() -- class index of each image (class index in self.classes) 160 | self.classList = {} -- index of imageList to each image of a particular class 161 | self.classListSample = self.classList -- the main list used when sampling data 162 | 163 | print('running "find" on each class directory, and concatenate all' 164 | .. ' those filenames into a single file containing all image paths for a given class') 165 | -- so, generates one file per class 166 | local classFindFiles = {} 167 | for i=1,#self.classes do 168 | classFindFiles[i] = os.tmpname() 169 | end 170 | local combinedFindList = os.tmpname(); 171 | 172 | local tmpfile = os.tmpname() 173 | local tmphandle = assert(io.open(tmpfile, 'w')) 174 | -- iterate over classes 175 | for i, class in ipairs(self.classes) do 176 | -- iterate over classPaths 177 | for j,path in ipairs(classPaths[i]) do 178 | local command = find .. ' "' .. path .. '" ' .. findOptions 179 | .. ' >>"' .. classFindFiles[i] .. '" \n' 180 | tmphandle:write(command) 181 | end 182 | end 183 | io.close(tmphandle) 184 | os.execute('bash ' .. tmpfile) 185 | os.execute('rm -f ' .. tmpfile) 186 | 187 | print('now combine all the files to a single large file') 188 | local tmpfile = os.tmpname() 189 | local tmphandle = assert(io.open(tmpfile, 'w')) 190 | -- concat all finds to a single large file in the order of self.classes 191 | for i=1,#self.classes do 192 | local command = 'cat "' .. classFindFiles[i] .. '" >>' .. combinedFindList .. ' \n' 193 | tmphandle:write(command) 194 | end 195 | io.close(tmphandle) 196 | os.execute('bash ' .. tmpfile) 197 | os.execute('rm -f ' .. tmpfile) 198 | 199 | --========================================================================== 200 | print('load the large concatenated list of sample paths to self.imagePath') 201 | local maxPathLength = tonumber(sys.fexecute(wc .. " -L '" 202 | .. combinedFindList .. "' |" 203 | .. cut .. " -f1 -d' '")) + 1 204 | local length = tonumber(sys.fexecute(wc .. " -l '" 205 | .. combinedFindList .. "' |" 206 | .. cut .. " -f1 -d' '")) 207 | assert(length > 0, "Could not find any image file in the given input paths") 208 | assert(maxPathLength > 0, "paths of files are length 0?") 209 | self.imagePath:resize(length, maxPathLength):fill(0) 210 | local s_data = self.imagePath:data() 211 | local count = 0 212 | for line in io.lines(combinedFindList) do 213 | ffi.copy(s_data, line) 214 | s_data = s_data + maxPathLength 215 | if self.verbose and count % 10000 == 0 then 216 | xlua.progress(count, length) 217 | end; 218 | count = count + 1 219 | end 220 | 221 | self.numSamples = self.imagePath:size(1) 222 | if self.verbose then print(self.numSamples .. ' samples found.') end 223 | --========================================================================== 224 | print('Updating classList and imageClass appropriately') 225 | self.imageClass:resize(self.numSamples) 226 | local runningIndex = 0 227 | for i=1,#self.classes do 228 | if self.verbose then xlua.progress(i, #(self.classes)) end 229 | local length = tonumber(sys.fexecute(wc .. " -l '" 230 | .. classFindFiles[i] .. "' |" 231 | .. cut .. " -f1 -d' '")) 232 | if length == 0 then 233 | error('Class has zero samples') 234 | else 235 | self.classList[i] = torch.linspace(runningIndex + 1, runningIndex + length, length):long() 236 | self.imageClass[{{runningIndex + 1, runningIndex + length}}]:fill(i) 237 | end 238 | runningIndex = runningIndex + length 239 | end 240 | 241 | --========================================================================== 242 | -- clean up temporary files 243 | print('Cleaning up temporary files') 244 | local tmpfilelistall = '' 245 | for i=1,#(classFindFiles) do 246 | tmpfilelistall = tmpfilelistall .. ' "' .. classFindFiles[i] .. '"' 247 | if i % 1000 == 0 then 248 | os.execute('rm -f ' .. tmpfilelistall) 249 | tmpfilelistall = '' 250 | end 251 | end 252 | os.execute('rm -f ' .. tmpfilelistall) 253 | os.execute('rm -f "' .. combinedFindList .. '"') 254 | --========================================================================== 255 | 256 | if self.split == 100 then 257 | self.testIndicesSize = 0 258 | else 259 | print('Splitting training and test sets to a ratio of ' 260 | .. self.split .. '/' .. (100-self.split)) 261 | self.classListTrain = {} 262 | self.classListTest = {} 263 | self.classListSample = self.classListTrain 264 | local totalTestSamples = 0 265 | -- split the classList into classListTrain and classListTest 266 | for i=1,#self.classes do 267 | local list = self.classList[i] 268 | local count = self.classList[i]:size(1) 269 | local splitidx = math.floor((count * self.split / 100) + 0.5) -- +round 270 | local perm = torch.randperm(count) 271 | self.classListTrain[i] = torch.LongTensor(splitidx) 272 | for j=1,splitidx do 273 | self.classListTrain[i][j] = list[perm[j]] 274 | end 275 | if splitidx == count then -- all samples were allocated to train set 276 | self.classListTest[i] = torch.LongTensor() 277 | else 278 | self.classListTest[i] = torch.LongTensor(count-splitidx) 279 | totalTestSamples = totalTestSamples + self.classListTest[i]:size(1) 280 | local idx = 1 281 | for j=splitidx+1,count do 282 | self.classListTest[i][idx] = list[perm[j]] 283 | idx = idx + 1 284 | end 285 | end 286 | end 287 | -- Now combine classListTest into a single tensor 288 | self.testIndices = torch.LongTensor(totalTestSamples) 289 | self.testIndicesSize = totalTestSamples 290 | local tdata = self.testIndices:data() 291 | local tidx = 0 292 | for i=1,#self.classes do 293 | local list = self.classListTest[i] 294 | if list:dim() ~= 0 then 295 | local ldata = list:data() 296 | for j=0,list:size(1)-1 do 297 | tdata[tidx] = ldata[j] 298 | tidx = tidx + 1 299 | end 300 | end 301 | end 302 | end 303 | end 304 | 305 | 306 | function dataset:get_w2v() 307 | return self.w2v 308 | end 309 | 310 | -- size(), size(class) 311 | function dataset:size(class, list) 312 | list = list or self.classList 313 | if not class then 314 | return self.numSamples 315 | elseif type(class) == 'string' then 316 | return list[self.classIndices[class]]:size(1) 317 | elseif type(class) == 'number' then 318 | return list[class]:size(1) 319 | end 320 | end 321 | 322 | -- getByClass 323 | function dataset:getByClass(class) 324 | local index = math.max(1, math.ceil(torch.uniform() * self.classListSample[class]:nElement())) 325 | local imgpath = ffi.string(torch.data(self.imagePath[self.classListSample[class][index]])) 326 | return self:sampleHookTrain(imgpath) 327 | end 328 | 329 | -- converts a table of samples (and corresponding labels) to a clean tensor 330 | local function tableToOutput(self, dataTable, scalarTable) 331 | local data, scalarLabels 332 | local quantity = #scalarTable 333 | assert(dataTable[1]:dim() == 3) 334 | data = torch.Tensor(quantity, 335 | self.sampleSize[1], self.sampleSize[2], self.sampleSize[3]) 336 | scalarLabels = torch.LongTensor(quantity):fill(-1111) 337 | for i=1,#dataTable do 338 | data[i]:copy(dataTable[i]) 339 | scalarLabels[i] = scalarTable[i] 340 | end 341 | return data, scalarLabels 342 | end 343 | 344 | -- converts a table of samples (and corresponding labels) to a clean tensor 345 | local function tableToSemanticOutput(self, dataTable, embedTable, scalarTable_1, scalarTable_2) 346 | local data, vectors, scalarLabels 347 | local quantity = #scalarTable_1 348 | assert(dataTable[1]:dim() == 3) 349 | local v_dim = embedTable[1]:size(1) 350 | data = torch.Tensor(quantity, 351 | self.sampleSize[1], self.sampleSize[2], self.sampleSize[3]) 352 | vectors = torch.Tensor(quantity, v_dim) 353 | scalarLabels_1 = torch.LongTensor(quantity):fill(0) 354 | scalarLabels_2 = torch.LongTensor(quantity):fill(-1) 355 | for i=1,#dataTable do 356 | data[i]:copy(dataTable[i]) 357 | scalarLabels_1[i] = scalarTable_1[i] 358 | scalarLabels_2[i] = scalarTable_2[i] 359 | vectors[i]:copy(embedTable[i]) 360 | end 361 | return data, vectors, {scalarLabels_1, scalarLabels_2} 362 | end 363 | 364 | 365 | 366 | -- sampler, samples from the training set. 367 | function dataset:sample(quantity) 368 | assert(quantity) 369 | local dataTable = {} 370 | local scalarTable = {} 371 | for i=1,quantity do 372 | local class = torch.random(1, #self.classes) 373 | local out = self:getByClass(class) 374 | table.insert(dataTable, out) 375 | table.insert(scalarTable, class) 376 | end 377 | local data, scalarLabels = tableToOutput(self, dataTable, scalarTable) 378 | return data, scalarLabels 379 | end 380 | 381 | 382 | -- Semantic sampler, samples from the training set and adds also word vectors. 383 | function dataset:semanticsample(quantity, neg_samples) 384 | local dataTable = {} 385 | local embedTable = {} 386 | local scalarTable_1 = {} 387 | local scalarTable_2 = {} 388 | 389 | local pos_samples = 0 390 | if neg_samples <= 0 then 391 | pos_samples = quantity 392 | else 393 | pos_samples = quantity / (neg_samples+1) 394 | end 395 | 396 | assert(quantity % (neg_samples+1) == 0 ,' Give a batchSize so that batchSize % (1 + neg_samples) is divisible!') 397 | 398 | for i=1,pos_samples do 399 | local class = torch.random(1, #self.classes) 400 | local out = self:getByClass(class) 401 | table.insert(dataTable, out) 402 | local outEmb = self.w2v:getVector(class) 403 | table.insert(embedTable, outEmb) 404 | local label = 1 405 | table.insert(scalarTable_1, label) 406 | table.insert(scalarTable_2, class) 407 | 408 | for n=1,neg_samples do 409 | 410 | --get random example 411 | local n_class = class 412 | while n_class == class do 413 | n_class = torch.random(1, #self.classes) 414 | end 415 | 416 | -- add negative text samples 417 | table.insert(dataTable, out) 418 | local n_outEmb = self.w2v:getVector(n_class) 419 | table.insert(embedTable, n_outEmb) 420 | local label = -1 421 | table.insert(scalarTable_1, label) 422 | table.insert(scalarTable_2, n_class) 423 | 424 | end 425 | end 426 | local data, vectors, scalarLabels = tableToSemanticOutput(self, dataTable, embedTable, scalarTable_1, scalarTable_2) 427 | return data, vectors, scalarLabels 428 | end 429 | 430 | 431 | 432 | function dataset:get(i1, i2) 433 | local indices = torch.range(i1, i2); 434 | local quantity = i2 - i1 + 1; 435 | assert(quantity > 0) 436 | -- now that indices has been initialized, get the samples 437 | local dataTable = {} 438 | local scalarTable = {} 439 | for i=1,quantity do 440 | -- load the sample 441 | local imgpath = ffi.string(torch.data(self.imagePath[indices[i]])) 442 | local out = self:sampleHookTest(imgpath) 443 | table.insert(dataTable, out) 444 | table.insert(scalarTable, self.imageClass[indices[i]]) 445 | end 446 | local data, scalarLabels = tableToOutput(self, dataTable, scalarTable) 447 | return data, scalarLabels 448 | end 449 | 450 | function dataset:getSemantic(i1, i2) 451 | local indices = torch.range(i1, i2); 452 | local quantity = i2 - i1 + 1; 453 | assert(quantity > 0) 454 | -- now that indices has been initialized, get the samples 455 | local dataTable = {} 456 | local scalarTable_1 = {} 457 | local scalarTable_2 = {} 458 | for i=1,quantity do 459 | -- load the sample 460 | local imgpath = ffi.string(torch.data(self.imagePath[indices[i]])) 461 | local out = self:sampleHookTest(imgpath) 462 | table.insert(dataTable, out) 463 | table.insert(scalarTable_1, 1) 464 | table.insert(scalarTable_2, self.imageClass[indices[i]]) 465 | end 466 | local data, vectors, scalarLabels = tableToSemanticOutput(self, dataTable, torch.rand(quantity,1), scalarTable_1, scalarTable_2) 467 | return data, scalarLabels 468 | end 469 | 470 | 471 | 472 | return dataset 473 | -------------------------------------------------------------------------------- /imagenet_labels/labels.txt: -------------------------------------------------------------------------------- 1 | n02119789 kit fox, Vulpes macrotis 2 | n02100735 English setter 3 | n02096294 Australian terrier 4 | n02066245 grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus 5 | n02509815 lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens 6 | n02124075 Egyptian cat 7 | n02417914 ibex, Capra ibex 8 | n02123394 Persian cat 9 | n02125311 cougar, puma, catamount, mountain lion, painter, panther, Felis concolor 10 | n02423022 gazelle 11 | n02346627 porcupine, hedgehog 12 | n02077923 sea lion 13 | n02447366 badger 14 | n02109047 Great Dane 15 | n02092002 Scottish deerhound, deerhound 16 | n02071294 killer whale, killer, orca, grampus, sea wolf, Orcinus orca 17 | n02442845 mink 18 | n02504458 African elephant, Loxodonta africana 19 | n02114712 red wolf, maned wolf, Canis rufus, Canis niger 20 | n02128925 jaguar, panther, Panthera onca, Felis onca 21 | n02117135 hyena, hyaena 22 | n02493509 titi, titi monkey 23 | n02457408 three-toed sloth, ai, Bradypus tridactylus 24 | n02389026 sorrel 25 | n02443484 black-footed ferret, ferret, Mustela nigripes 26 | n02110341 dalmatian, coach dog, carriage dog 27 | n02093256 Staffordshire bullterrier, Staffordshire bull terrier 28 | n02106382 Bouvier des Flandres, Bouviers des Flandres 29 | n02441942 weasel 30 | n02113712 miniature poodle 31 | n02415577 bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis 32 | n02356798 fox squirrel, eastern fox squirrel, Sciurus niger 33 | n02488702 colobus, colobus monkey 34 | n02123159 tiger cat 35 | n02422699 impala, Aepyceros melampus 36 | n02114855 coyote, prairie wolf, brush wolf, Canis latrans 37 | n02094433 Yorkshire terrier 38 | n02111277 Newfoundland, Newfoundland dog 39 | n02119022 red fox, Vulpes vulpes 40 | n02422106 hartebeest 41 | n02120505 grey fox, gray fox, Urocyon cinereoargenteus 42 | n02086079 Pekinese, Pekingese, Peke 43 | n02484975 guenon, guenon monkey 44 | n02137549 mongoose 45 | n02500267 indri, indris, Indri indri, Indri brevicaudatus 46 | n02129604 tiger, Panthera tigris 47 | n02396427 wild boar, boar, Sus scrofa 48 | n02391049 zebra 49 | n02412080 ram, tup 50 | n02480495 orangutan, orang, orangutang, Pongo pygmaeus 51 | n02110806 basenji 52 | n02128385 leopard, Panthera pardus 53 | n02100583 vizsla, Hungarian pointer 54 | n02494079 squirrel monkey, Saimiri sciureus 55 | n02123597 Siamese cat, Siamese 56 | n02481823 chimpanzee, chimp, Pan troglodytes 57 | n02105505 komondor 58 | n02489166 proboscis monkey, Nasalis larvatus 59 | n02364673 guinea pig, Cavia cobaya 60 | n02114548 white wolf, Arctic wolf, Canis lupus tundrarum 61 | n02134084 ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus 62 | n02480855 gorilla, Gorilla gorilla 63 | n02403003 ox 64 | n02108551 Tibetan mastiff 65 | n02493793 spider monkey, Ateles geoffroyi 66 | n02107142 Doberman, Doberman pinscher 67 | n02397096 warthog 68 | n02437312 Arabian camel, dromedary, Camelus dromedarius 69 | n02483708 siamang, Hylobates syndactylus, Symphalangus syndactylus 70 | n02099601 golden retriever 71 | n02106166 Border collie 72 | n02326432 hare 73 | n02108089 boxer 74 | n02486261 patas, hussar monkey, Erythrocebus patas 75 | n02486410 baboon 76 | n02487347 macaque 77 | n02492035 capuchin, ringtail, Cebus capucinus 78 | n02099267 flat-coated retriever 79 | n02395406 hog, pig, grunter, squealer, Sus scrofa 80 | n02109961 Eskimo dog, husky 81 | n02101388 Brittany spaniel 82 | n03187595 dial telephone, dial phone 83 | n03733281 maze, labyrinth 84 | n02101006 Gordon setter 85 | n02115641 dingo, warrigal, warragal, Canis dingo 86 | n02342885 hamster 87 | n02120079 Arctic fox, white fox, Alopex lagopus 88 | n02408429 water buffalo, water ox, Asiatic buffalo, Bubalus bubalis 89 | n02133161 American black bear, black bear, Ursus americanus, Euarctos americanus 90 | n02328150 Angora, Angora rabbit 91 | n02410509 bison 92 | n02492660 howler monkey, howler 93 | n02398521 hippopotamus, hippo, river horse, Hippopotamus amphibius 94 | n02510455 giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca 95 | n02123045 tabby, tabby cat 96 | n02490219 marmoset 97 | n02109525 Saint Bernard, St Bernard 98 | n02454379 armadillo 99 | n02090379 redbone 100 | n02443114 polecat, fitch, foulmart, foumart, Mustela putorius 101 | n02361337 marmot 102 | n02483362 gibbon, Hylobates lar 103 | n02437616 llama 104 | n02325366 wood rabbit, cottontail, cottontail rabbit 105 | n02129165 lion, king of beasts, Panthera leo 106 | n02100877 Irish setter, red setter 107 | n02074367 dugong, Dugong dugon 108 | n02504013 Indian elephant, Elephas maximus 109 | n02363005 beaver 110 | n02497673 Madagascar cat, ring-tailed lemur, Lemur catta 111 | n02087394 Rhodesian ridgeback 112 | n02127052 lynx, catamount 113 | n02116738 African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus 114 | n02488291 langur 115 | n02114367 timber wolf, grey wolf, gray wolf, Canis lupus 116 | n02130308 cheetah, chetah, Acinonyx jubatus 117 | n02134418 sloth bear, Melursus ursinus, Ursus ursinus 118 | n02106662 German shepherd, German shepherd dog, German police dog, alsatian 119 | n02444819 otter 120 | n01882714 koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus 121 | n01871265 tusker 122 | n01872401 echidna, spiny anteater, anteater 123 | n01877812 wallaby, brush kangaroo 124 | n01873310 platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus 125 | n01883070 wombat 126 | n04086273 revolver, six-gun, six-shooter 127 | n04507155 umbrella 128 | n04147183 schooner 129 | n04254680 soccer ball 130 | n02672831 accordion, piano accordion, squeeze box 131 | n02219486 ant, emmet, pismire 132 | n02317335 starfish, sea star 133 | n01968897 chambered nautilus, pearly nautilus, nautilus 134 | n03452741 grand piano, grand 135 | n03642806 laptop, laptop computer 136 | n07745940 strawberry 137 | n02690373 airliner 138 | n04552348 warplane, military plane 139 | n02692877 airship, dirigible 140 | n02782093 balloon 141 | n04266014 space shuttle 142 | n03344393 fireboat 143 | n03447447 gondola 144 | n04273569 speedboat 145 | n03662601 lifeboat 146 | n02951358 canoe 147 | n04612504 yawl 148 | n02981792 catamaran 149 | n04483307 trimaran 150 | n03095699 container ship, containership, container vessel 151 | n03673027 liner, ocean liner 152 | n03947888 pirate, pirate ship 153 | n02687172 aircraft carrier, carrier, flattop, attack aircraft carrier 154 | n04347754 submarine, pigboat, sub, U-boat 155 | n04606251 wreck 156 | n03478589 half track 157 | n04389033 tank, army tank, armored combat vehicle, armoured combat vehicle 158 | n03773504 missile 159 | n02860847 bobsled, bobsleigh, bob 160 | n03218198 dogsled, dog sled, dog sleigh 161 | n02835271 bicycle-built-for-two, tandem bicycle, tandem 162 | n03792782 mountain bike, all-terrain bike, off-roader 163 | n03393912 freight car 164 | n03895866 passenger car, coach, carriage 165 | n02797295 barrow, garden cart, lawn cart, wheelbarrow 166 | n04204347 shopping cart 167 | n03791053 motor scooter, scooter 168 | n03384352 forklift 169 | n03272562 electric locomotive 170 | n04310018 steam locomotive 171 | n02704792 amphibian, amphibious vehicle 172 | n02701002 ambulance 173 | n02814533 beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon 174 | n02930766 cab, hack, taxi, taxicab 175 | n03100240 convertible 176 | n03594945 jeep, landrover 177 | n03670208 limousine, limo 178 | n03770679 minivan 179 | n03777568 Model T 180 | n04037443 racer, race car, racing car 181 | n04285008 sports car, sport car 182 | n03444034 go-kart 183 | n03445924 golfcart, golf cart 184 | n03785016 moped 185 | n04252225 snowplow, snowplough 186 | n03345487 fire engine, fire truck 187 | n03417042 garbage truck, dustcart 188 | n03930630 pickup, pickup truck 189 | n04461696 tow truck, tow car, wrecker 190 | n04467665 trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi 191 | n03796401 moving van 192 | n03977966 police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria 193 | n04065272 recreational vehicle, RV, R.V. 194 | n04335435 streetcar, tram, tramcar, trolley, trolley car 195 | n04252077 snowmobile 196 | n04465501 tractor 197 | n03776460 mobile home, manufactured home 198 | n04482393 tricycle, trike, velocipede 199 | n04509417 unicycle, monocycle 200 | n03538406 horse cart, horse-cart 201 | n03788365 mosquito net 202 | n03868242 oxcart 203 | n02804414 bassinet 204 | n03125729 cradle 205 | n03131574 crib, cot 206 | n03388549 four-poster 207 | n02870880 bookcase 208 | n03018349 china cabinet, china closet 209 | n03742115 medicine chest, medicine cabinet 210 | n03016953 chiffonier, commode 211 | n04380533 table lamp 212 | n03337140 file, file cabinet, filing cabinet 213 | n03902125 pay-phone, pay-station 214 | n03891251 park bench 215 | n02791124 barber chair 216 | n04429376 throne 217 | n03376595 folding chair 218 | n04099969 rocking chair, rocker 219 | n04344873 studio couch, day bed 220 | n04447861 toilet seat 221 | n03179701 desk 222 | n03982430 pool table, billiard table, snooker table 223 | n03201208 dining table, board 224 | n03290653 entertainment center 225 | n04550184 wardrobe, closet, press 226 | n07742313 Granny Smith 227 | n07747607 orange 228 | n07749582 lemon 229 | n07753113 fig 230 | n07753275 pineapple, ananas 231 | n07753592 banana 232 | n07754684 jackfruit, jak, jack 233 | n07760859 custard apple 234 | n07768694 pomegranate 235 | n12267677 acorn 236 | n12620546 hip, rose hip, rosehip 237 | n13133613 ear, spike, capitulum 238 | n11879895 rapeseed 239 | n12144580 corn 240 | n12768682 buckeye, horse chestnut, conker 241 | n03854065 organ, pipe organ 242 | n04515003 upright, upright piano 243 | n03017168 chime, bell, gong 244 | n03249569 drum, membranophone, tympan 245 | n03447721 gong, tam-tam 246 | n03720891 maraca 247 | n03721384 marimba, xylophone 248 | n04311174 steel drum 249 | n02787622 banjo 250 | n02992211 cello, violoncello 251 | n03637318 lampshade, lamp shade 252 | n03495258 harp 253 | n02676566 acoustic guitar 254 | n03272010 electric guitar 255 | n03110669 cornet, horn, trumpet, trump 256 | n03394916 French horn, horn 257 | n04487394 trombone 258 | n03494278 harmonica, mouth organ, harp, mouth harp 259 | n03840681 ocarina, sweet potato 260 | n03884397 panpipe, pandean pipe, syrinx 261 | n02804610 bassoon 262 | n04141076 sax, saxophone 263 | n03372029 flute, transverse flute 264 | n11939491 daisy 265 | n12057211 yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum 266 | n09246464 cliff, drop, drop-off 267 | n09468604 valley, vale 268 | n09193705 alp 269 | n09472597 volcano 270 | n09399592 promontory, headland, head, foreland 271 | n09421951 sandbar, sand bar 272 | n09256479 coral reef 273 | n09332890 lakeside, lakeshore 274 | n09428293 seashore, coast, seacoast, sea-coast 275 | n09288635 geyser 276 | n03498962 hatchet 277 | n03041632 cleaver, meat cleaver, chopper 278 | n03658185 letter opener, paper knife, paperknife 279 | n03954731 plane, carpenter's plane, woodworking plane 280 | n03995372 power drill 281 | n03649909 lawn mower, mower 282 | n03481172 hammer 283 | n03109150 corkscrew, bottle screw 284 | n02951585 can opener, tin opener 285 | n03970156 plunger, plumber's helper 286 | n04154565 screwdriver 287 | n04208210 shovel 288 | n03967562 plow, plough 289 | n03000684 chain saw, chainsaw 290 | n01514668 cock 291 | n01514859 hen 292 | n01518878 ostrich, Struthio camelus 293 | n01530575 brambling, Fringilla montifringilla 294 | n01531178 goldfinch, Carduelis carduelis 295 | n01532829 house finch, linnet, Carpodacus mexicanus 296 | n01534433 junco, snowbird 297 | n01537544 indigo bunting, indigo finch, indigo bird, Passerina cyanea 298 | n01558993 robin, American robin, Turdus migratorius 299 | n01560419 bulbul 300 | n01580077 jay 301 | n01582220 magpie 302 | n01592084 chickadee 303 | n01601694 water ouzel, dipper 304 | n01608432 kite 305 | n01614925 bald eagle, American eagle, Haliaeetus leucocephalus 306 | n01616318 vulture 307 | n01622779 great grey owl, great gray owl, Strix nebulosa 308 | n01795545 black grouse 309 | n01796340 ptarmigan 310 | n01797886 ruffed grouse, partridge, Bonasa umbellus 311 | n01798484 prairie chicken, prairie grouse, prairie fowl 312 | n01806143 peacock 313 | n01806567 quail 314 | n01807496 partridge 315 | n01817953 African grey, African gray, Psittacus erithacus 316 | n01818515 macaw 317 | n01819313 sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita 318 | n01820546 lorikeet 319 | n01824575 coucal 320 | n01828970 bee eater 321 | n01829413 hornbill 322 | n01833805 hummingbird 323 | n01843065 jacamar 324 | n01843383 toucan 325 | n01847000 drake 326 | n01855032 red-breasted merganser, Mergus serrator 327 | n01855672 goose 328 | n01860187 black swan, Cygnus atratus 329 | n02002556 white stork, Ciconia ciconia 330 | n02002724 black stork, Ciconia nigra 331 | n02006656 spoonbill 332 | n02007558 flamingo 333 | n02009912 American egret, great white heron, Egretta albus 334 | n02009229 little blue heron, Egretta caerulea 335 | n02011460 bittern 336 | n02012849 crane 337 | n02013706 limpkin, Aramus pictus 338 | n02018207 American coot, marsh hen, mud hen, water hen, Fulica americana 339 | n02018795 bustard 340 | n02025239 ruddy turnstone, Arenaria interpres 341 | n02027492 red-backed sandpiper, dunlin, Erolia alpina 342 | n02028035 redshank, Tringa totanus 343 | n02033041 dowitcher 344 | n02037110 oystercatcher, oyster catcher 345 | n02017213 European gallinule, Porphyrio porphyrio 346 | n02051845 pelican 347 | n02056570 king penguin, Aptenodytes patagonica 348 | n02058221 albatross, mollymawk 349 | n01484850 great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias 350 | n01491361 tiger shark, Galeocerdo cuvieri 351 | n01494475 hammerhead, hammerhead shark 352 | n01496331 electric ray, crampfish, numbfish, torpedo 353 | n01498041 stingray 354 | n02514041 barracouta, snoek 355 | n02536864 coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch 356 | n01440764 tench, Tinca tinca 357 | n01443537 goldfish, Carassius auratus 358 | n02526121 eel 359 | n02606052 rock beauty, Holocanthus tricolor 360 | n02607072 anemone fish 361 | n02643566 lionfish 362 | n02655020 puffer, pufferfish, blowfish, globefish 363 | n02640242 sturgeon 364 | n02641379 gar, garfish, garpike, billfish, Lepisosteus osseus 365 | n01664065 loggerhead, loggerhead turtle, Caretta caretta 366 | n01667114 mud turtle 367 | n01667778 terrapin 368 | n01669191 box turtle, box tortoise 369 | n01675722 banded gecko 370 | n01677366 common iguana, iguana, Iguana iguana 371 | n01682714 American chameleon, anole, Anolis carolinensis 372 | n01685808 whiptail, whiptail lizard 373 | n01687978 agama 374 | n01688243 frilled lizard, Chlamydosaurus kingi 375 | n01689811 alligator lizard 376 | n01692333 Gila monster, Heloderma suspectum 377 | n01693334 green lizard, Lacerta viridis 378 | n01694178 African chameleon, Chamaeleo chamaeleon 379 | n01695060 Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis 380 | n01704323 triceratops 381 | n01697457 African crocodile, Nile crocodile, Crocodylus niloticus 382 | n01698640 American alligator, Alligator mississipiensis 383 | n01728572 thunder snake, worm snake, Carphophis amoenus 384 | n01728920 ringneck snake, ring-necked snake, ring snake 385 | n01729322 hognose snake, puff adder, sand viper 386 | n01729977 green snake, grass snake 387 | n01734418 king snake, kingsnake 388 | n01735189 garter snake, grass snake 389 | n01737021 water snake 390 | n01739381 vine snake 391 | n01740131 night snake, Hypsiglena torquata 392 | n01742172 boa constrictor, Constrictor constrictor 393 | n01744401 rock python, rock snake, Python sebae 394 | n01748264 Indian cobra, Naja naja 395 | n01749939 green mamba 396 | n01751748 sea snake 397 | n01753488 horned viper, cerastes, sand viper, horned asp, Cerastes cornutus 398 | n04326547 stone wall 399 | n01756291 sidewinder, horned rattlesnake, Crotalus cerastes 400 | n01629819 European fire salamander, Salamandra salamandra 401 | n01630670 common newt, Triturus vulgaris 402 | n01631663 eft 403 | n01632458 spotted salamander, Ambystoma maculatum 404 | n01632777 axolotl, mud puppy, Ambystoma mexicanum 405 | n01641577 bullfrog, Rana catesbeiana 406 | n01644373 tree frog, tree-frog 407 | n01644900 tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui 408 | n04579432 whistle 409 | n04592741 wing 410 | n03876231 paintbrush 411 | n03868863 oxygen mask 412 | n04251144 snorkel 413 | n03691459 loudspeaker, speaker, speaker unit, loudspeaker system, speaker system 414 | n03759954 microphone, mike 415 | n04152593 screen, CRT screen 416 | n03793489 mouse, computer mouse 417 | n03271574 electric fan, blower 418 | n03843555 oil filter 419 | n04332243 strainer 420 | n04265275 space heater 421 | n04330267 stove 422 | n03467068 guillotine 423 | n02794156 barometer 424 | n04118776 rule, ruler 425 | n03841143 odometer, hodometer, mileometer, milometer 426 | n04141975 scale, weighing machine 427 | n02708093 analog clock 428 | n03196217 digital clock 429 | n04548280 wall clock 430 | n03544143 hourglass 431 | n04355338 sundial 432 | n03891332 parking meter 433 | n04328186 stopwatch, stop watch 434 | n03197337 digital watch 435 | n04317175 stethoscope 436 | n04376876 syringe 437 | n03706229 magnetic compass 438 | n02841315 binoculars, field glasses, opera glasses 439 | n04009552 projector 440 | n04356056 sunglasses, dark glasses, shades 441 | n03692522 loupe, jeweler's loupe 442 | n04044716 radio telescope, radio reflector 443 | n02879718 bow 444 | n02950826 cannon 445 | n02749479 assault rifle, assault gun 446 | n04090263 rifle 447 | n04008634 projectile, missile 448 | n03085013 computer keyboard, keypad 449 | n04505470 typewriter keyboard 450 | n03126707 crane 451 | n03666591 lighter, light, igniter, ignitor 452 | n02666196 abacus 453 | n02977058 cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM 454 | n04238763 slide rule, slipstick 455 | n03180011 desktop computer 456 | n03485407 hand-held computer, hand-held microcomputer 457 | n03832673 notebook, notebook computer 458 | n03874599 padlock 459 | n03496892 harvester, reaper 460 | n04428191 thresher, thrasher, threshing machine 461 | n04004767 printer 462 | n04243546 slot, one-armed bandit 463 | n04525305 vending machine 464 | n04179913 sewing machine 465 | n03602883 joystick 466 | n04372370 switch, electric switch, electrical switch 467 | n03532672 hook, claw 468 | n02974003 car wheel 469 | n03874293 paddlewheel, paddle wheel 470 | n03944341 pinwheel 471 | n03992509 potter's wheel 472 | n03425413 gas pump, gasoline pump, petrol pump, island dispenser 473 | n02966193 carousel, carrousel, merry-go-round, roundabout, whirligig 474 | n04371774 swing 475 | n04067472 reel 476 | n04040759 radiator 477 | n04019541 puck, hockey puck 478 | n03492542 hard disc, hard disk, fixed disk 479 | n04355933 sunglass 480 | n03929660 pick, plectrum, plectron 481 | n02965783 car mirror 482 | n04258138 solar dish, solar collector, solar furnace 483 | n04074963 remote control, remote 484 | n03208938 disk brake, disc brake 485 | n02910353 buckle 486 | n03476684 hair slide 487 | n03627232 knot 488 | n03075370 combination lock 489 | n06359193 web site, website, internet site, site 490 | n03804744 nail 491 | n04127249 safety pin 492 | n04153751 screw 493 | n03803284 muzzle 494 | n04162706 seat belt, seatbelt 495 | n04228054 ski 496 | n02948072 candle, taper, wax light 497 | n03590841 jack-o'-lantern 498 | n04286575 spotlight, spot 499 | n04456115 torch 500 | n03814639 neck brace 501 | n03933933 pier 502 | n04485082 tripod 503 | n03733131 maypole 504 | n03483316 hand blower, blow dryer, blow drier, hair dryer, hair drier 505 | n03794056 mousetrap 506 | n04275548 spider web, spider's web 507 | n01768244 trilobite 508 | n01770081 harvestman, daddy longlegs, Phalangium opilio 509 | n01770393 scorpion 510 | n01773157 black and gold garden spider, Argiope aurantia 511 | n01773549 barn spider, Araneus cavaticus 512 | n01773797 garden spider, Aranea diademata 513 | n01774384 black widow, Latrodectus mactans 514 | n01774750 tarantula 515 | n01775062 wolf spider, hunting spider 516 | n01776313 tick 517 | n01784675 centipede 518 | n01990800 isopod 519 | n01978287 Dungeness crab, Cancer magister 520 | n01978455 rock crab, Cancer irroratus 521 | n01980166 fiddler crab 522 | n01981276 king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica 523 | n01983481 American lobster, Northern lobster, Maine lobster, Homarus americanus 524 | n01984695 spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish 525 | n01985128 crayfish, crawfish, crawdad, crawdaddy 526 | n01986214 hermit crab 527 | n02165105 tiger beetle 528 | n02165456 ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle 529 | n02167151 ground beetle, carabid beetle 530 | n02168699 long-horned beetle, longicorn, longicorn beetle 531 | n02169497 leaf beetle, chrysomelid 532 | n02172182 dung beetle 533 | n02174001 rhinoceros beetle 534 | n02177972 weevil 535 | n02190166 fly 536 | n02206856 bee 537 | n02226429 grasshopper, hopper 538 | n02229544 cricket 539 | n02231487 walking stick, walkingstick, stick insect 540 | n02233338 cockroach, roach 541 | n02236044 mantis, mantid 542 | n02256656 cicada, cicala 543 | n02259212 leafhopper 544 | n02264363 lacewing, lacewing fly 545 | n02268443 dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk 546 | n02268853 damselfly 547 | n02276258 admiral 548 | n02277742 ringlet, ringlet butterfly 549 | n02279972 monarch, monarch butterfly, milkweed butterfly, Danaus plexippus 550 | n02280649 cabbage butterfly 551 | n02281406 sulphur butterfly, sulfur butterfly 552 | n02281787 lycaenid, lycaenid butterfly 553 | n01910747 jellyfish 554 | n01914609 sea anemone, anemone 555 | n01917289 brain coral 556 | n01924916 flatworm, platyhelminth 557 | n01930112 nematode, nematode worm, roundworm 558 | n01943899 conch 559 | n01944390 snail 560 | n01945685 slug 561 | n01950731 sea slug, nudibranch 562 | n01955084 chiton, coat-of-mail shell, sea cradle, polyplacophore 563 | n02319095 sea urchin 564 | n02321529 sea cucumber, holothurian 565 | n03584829 iron, smoothing iron 566 | n03297495 espresso maker 567 | n03761084 microwave, microwave oven 568 | n03259280 Dutch oven 569 | n04111531 rotisserie 570 | n04442312 toaster 571 | n04542943 waffle iron 572 | n04517823 vacuum, vacuum cleaner 573 | n03207941 dishwasher, dish washer, dishwashing machine 574 | n04070727 refrigerator, icebox 575 | n04554684 washer, automatic washer, washing machine 576 | n03133878 Crock Pot 577 | n03400231 frying pan, frypan, skillet 578 | n04596742 wok 579 | n02939185 caldron, cauldron 580 | n03063689 coffeepot 581 | n04398044 teapot 582 | n04270147 spatula 583 | n02699494 altar 584 | n04486054 triumphal arch 585 | n03899768 patio, terrace 586 | n04311004 steel arch bridge 587 | n04366367 suspension bridge 588 | n04532670 viaduct 589 | n02793495 barn 590 | n03457902 greenhouse, nursery, glasshouse 591 | n03877845 palace 592 | n03781244 monastery 593 | n03661043 library 594 | n02727426 apiary, bee house 595 | n02859443 boathouse 596 | n03028079 church, church building 597 | n03788195 mosque 598 | n04346328 stupa, tope 599 | n03956157 planetarium 600 | n04081281 restaurant, eating house, eating place, eatery 601 | n03032252 cinema, movie theater, movie theatre, movie house, picture palace 602 | n03529860 home theater, home theatre 603 | n03697007 lumbermill, sawmill 604 | n03065424 coil, spiral, volute, whorl, helix 605 | n03837869 obelisk 606 | n04458633 totem pole 607 | n02980441 castle 608 | n04005630 prison, prison house 609 | n03461385 grocery store, grocery, food market, market 610 | n02776631 bakery, bakeshop, bakehouse 611 | n02791270 barbershop 612 | n02871525 bookshop, bookstore, bookstall 613 | n02927161 butcher shop, meat market 614 | n03089624 confectionery, confectionary, candy store 615 | n04200800 shoe shop, shoe-shop, shoe store 616 | n04443257 tobacco shop, tobacconist shop, tobacconist 617 | n04462240 toyshop 618 | n03388043 fountain 619 | n03042490 cliff dwelling 620 | n04613696 yurt 621 | n03216828 dock, dockage, docking facility 622 | n02892201 brass, memorial tablet, plaque 623 | n03743016 megalith, megalithic structure 624 | n02788148 bannister, banister, balustrade, balusters, handrail 625 | n02894605 breakwater, groin, groyne, mole, bulwark, seawall, jetty 626 | n03160309 dam, dike, dyke 627 | n03000134 chainlink fence 628 | n03930313 picket fence, paling 629 | n04604644 worm fence, snake fence, snake-rail fence, Virginia fence 630 | n01755581 diamondback, diamondback rattlesnake, Crotalus adamanteus 631 | n03459775 grille, radiator grille 632 | n04239074 sliding door 633 | n04501370 turnstile 634 | n03792972 mountain tent 635 | n04149813 scoreboard 636 | n03530642 honeycomb 637 | n03961711 plate rack 638 | n03903868 pedestal, plinth, footstall 639 | n02814860 beacon, lighthouse, beacon light, pharos 640 | n01665541 leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea 641 | n07711569 mashed potato 642 | n07720875 bell pepper 643 | n07714571 head cabbage 644 | n07714990 broccoli 645 | n07715103 cauliflower 646 | n07716358 zucchini, courgette 647 | n07716906 spaghetti squash 648 | n07717410 acorn squash 649 | n07717556 butternut squash 650 | n07718472 cucumber, cuke 651 | n07718747 artichoke, globe artichoke 652 | n07730033 cardoon 653 | n07734744 mushroom 654 | n04209239 shower curtain 655 | n03594734 jean, blue jean, denim 656 | n02971356 carton 657 | n03485794 handkerchief, hankie, hanky, hankey 658 | n04133789 sandal 659 | n02747177 ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin 660 | n04125021 safe 661 | n07579787 plate 662 | n03814906 necklace 663 | n03134739 croquet ball 664 | n03404251 fur coat 665 | n04423845 thimble 666 | n03877472 pajama, pyjama, pj's, jammies 667 | n04120489 running shoe 668 | n03838899 oboe, hautboy, hautbois 669 | n03062245 cocktail shaker 670 | n03014705 chest 671 | n03717622 manhole cover 672 | n03777754 modem 673 | n04493381 tub, vat 674 | n04476259 tray 675 | n02777292 balance beam, beam 676 | n07693725 bagel, beigel 677 | n04536866 violin, fiddle 678 | n03998194 prayer rug, prayer mat 679 | n03617480 kimono 680 | n07590611 hot pot, hotpot 681 | n04579145 whiskey jug 682 | n03623198 knee pad 683 | n07248320 book jacket, dust cover, dust jacket, dust wrapper 684 | n04277352 spindle 685 | n04229816 ski mask 686 | n02823428 beer bottle 687 | n03127747 crash helmet 688 | n02877765 bottlecap 689 | n04435653 tile roof 690 | n03724870 mask 691 | n03710637 maillot 692 | n03920288 Petri dish 693 | n03379051 football helmet 694 | n02807133 bathing cap, swimming cap 695 | n04399382 teddy, teddy bear 696 | n03527444 holster 697 | n03983396 pop bottle, soda bottle 698 | n03924679 photocopier 699 | n04532106 vestment 700 | n06785654 crossword puzzle, crossword 701 | n03445777 golf ball 702 | n07613480 trifle 703 | n04350905 suit, suit of clothes 704 | n04562935 water tower 705 | n03325584 feather boa, boa 706 | n03045698 cloak 707 | n07892512 red wine 708 | n03250847 drumstick 709 | n04192698 shield, buckler 710 | n03026506 Christmas stocking 711 | n03534580 hoopskirt, crinoline 712 | n07565083 menu 713 | n04296562 stage 714 | n02869837 bonnet, poke bonnet 715 | n07871810 meat loaf, meatloaf 716 | n02799071 baseball 717 | n03314780 face powder 718 | n04141327 scabbard 719 | n04357314 sunscreen, sunblock, sun blocker 720 | n02823750 beer glass 721 | n13052670 hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa 722 | n07583066 guacamole 723 | n04599235 wool, woolen, woollen 724 | n07802026 hay 725 | n02883205 bow tie, bow-tie, bowtie 726 | n03709823 mailbag, postbag 727 | n04560804 water jug 728 | n02909870 bucket, pail 729 | n03207743 dishrag, dishcloth 730 | n04263257 soup bowl 731 | n07932039 eggnog 732 | n03786901 mortar 733 | n04479046 trench coat 734 | n03873416 paddle, boat paddle 735 | n02999410 chain 736 | n04367480 swab, swob, mop 737 | n03775546 mixing bowl 738 | n07875152 potpie 739 | n04591713 wine bottle 740 | n04201297 shoji 741 | n02916936 bulletproof vest 742 | n03240683 drilling platform, offshore rig 743 | n02840245 binder, ring-binder 744 | n02963159 cardigan 745 | n04370456 sweatshirt 746 | n03991062 pot, flowerpot 747 | n02843684 birdhouse 748 | n03599486 jinrikisha, ricksha, rickshaw 749 | n03482405 hamper 750 | n03942813 ping-pong ball 751 | n03908618 pencil box, pencil case 752 | n07584110 consomme 753 | n02730930 apron 754 | n04023962 punching bag, punch bag, punching ball, punchball 755 | n02769748 backpack, back pack, knapsack, packsack, rucksack, haversack 756 | n10148035 groom, bridegroom 757 | n02817516 bearskin, busby, shako 758 | n03908714 pencil sharpener 759 | n02906734 broom 760 | n02667093 abaya 761 | n03787032 mortarboard 762 | n03980874 poncho 763 | n03141823 crutch 764 | n03976467 Polaroid camera, Polaroid Land camera 765 | n04264628 space bar 766 | n07930864 cup 767 | n04039381 racket, racquet 768 | n06874185 traffic light, traffic signal, stoplight 769 | n04033901 quill, quill pen 770 | n04041544 radio, wireless 771 | n02128757 snow leopard, ounce, Panthera uncia 772 | n07860988 dough 773 | n03146219 cuirass 774 | n03763968 military uniform 775 | n03676483 lipstick, lip rouge 776 | n04209133 shower cap 777 | n03782006 monitor 778 | n03857828 oscilloscope, scope, cathode-ray oscilloscope, CRO 779 | n03775071 mitten 780 | n02892767 brassiere, bra, bandeau 781 | n07684084 French loaf 782 | n04522168 vase 783 | n03764736 milk can 784 | n04118538 rugby ball 785 | n03887697 paper towel 786 | n13044778 earthstar 787 | n03291819 envelope 788 | n03770439 miniskirt, mini 789 | n03124170 cowboy hat, ten-gallon hat 790 | n04487081 trolleybus, trolley coach, trackless trolley 791 | n03916031 perfume, essence 792 | n02808440 bathtub, bathing tub, bath, tub 793 | n07697537 hotdog, hot dog, red hot 794 | n12985857 coral fungus 795 | n02917067 bullet train, bullet 796 | n03938244 pillow 797 | n15075141 toilet tissue, toilet paper, bathroom tissue 798 | n02978881 cassette 799 | n02966687 carpenter's kit, tool kit 800 | n03633091 ladle 801 | n13040303 stinkhorn, carrion fungus 802 | n03690938 lotion 803 | n03476991 hair spray 804 | n02669723 academic gown, academic robe, judge's robe 805 | n03220513 dome 806 | n03127925 crate 807 | n04584207 wig 808 | n07880968 burrito 809 | n03937543 pill bottle 810 | n03000247 chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour 811 | n04418357 theater curtain, theatre curtain 812 | n04590129 window shade 813 | n02795169 barrel, cask 814 | n04553703 washbasin, handbasin, washbowl, lavabo, wash-hand basin 815 | n02783161 ballpoint, ballpoint pen, ballpen, Biro 816 | n02802426 basketball 817 | n02808304 bath towel 818 | n03124043 cowboy boot 819 | n03450230 gown 820 | n04589890 window screen 821 | n12998815 agaric 822 | n02113799 standard poodle 823 | n02992529 cellular telephone, cellular phone, cellphone, cell, mobile phone 824 | n03825788 nipple 825 | n02790996 barbell 826 | n03710193 mailbox, letter box 827 | n03630383 lab coat, laboratory coat 828 | n03347037 fire screen, fireguard 829 | n03769881 minibus 830 | n03871628 packet 831 | n02132136 brown bear, bruin, Ursus arctos 832 | n03976657 pole 833 | n03535780 horizontal bar, high bar 834 | n04259630 sombrero 835 | n03929855 pickelhaube 836 | n04049303 rain barrel 837 | n04548362 wallet, billfold, notecase, pocketbook 838 | n02979186 cassette player 839 | n06596364 comic book 840 | n03935335 piggy bank, penny bank 841 | n06794110 street sign 842 | n02825657 bell cote, bell cot 843 | n03388183 fountain pen 844 | n04591157 Windsor tie 845 | n04540053 volleyball 846 | n03866082 overskirt 847 | n04136333 sarong 848 | n04026417 purse 849 | n02865351 bolo tie, bolo, bola tie, bola 850 | n02834397 bib 851 | n03888257 parachute, chute 852 | n04235860 sleeping bag 853 | n04404412 television, television system 854 | n04371430 swimming trunks, bathing trunks 855 | n03733805 measuring cup 856 | n07920052 espresso 857 | n07873807 pizza, pizza pie 858 | n02895154 breastplate, aegis, egis 859 | n04204238 shopping basket 860 | n04597913 wooden spoon 861 | n04131690 saltshaker, salt shaker 862 | n07836838 chocolate sauce, chocolate syrup 863 | n09835506 ballplayer, baseball player 864 | n03443371 goblet 865 | n13037406 gyromitra 866 | n04336792 stretcher 867 | n04557648 water bottle 868 | n02445715 skunk, polecat, wood pussy 869 | n04254120 soap dispenser 870 | n03595614 jersey, T-shirt, tee shirt 871 | n04146614 school bus 872 | n03598930 jigsaw puzzle 873 | n03958227 plastic bag 874 | n04069434 reflex camera 875 | n03188531 diaper, nappy, napkin 876 | n02786058 Band Aid 877 | n07615774 ice lolly, lolly, lollipop, popsicle 878 | n04525038 velvet 879 | n04409515 tennis ball 880 | n03424325 gasmask, respirator, gas helmet 881 | n03223299 doormat, welcome mat 882 | n03680355 Loafer 883 | n07614500 ice cream, icecream 884 | n07695742 pretzel 885 | n04033995 quilt, comforter, comfort, puff 886 | n03710721 maillot, tank suit 887 | n04392985 tape player 888 | n03047690 clog, geta, patten, sabot 889 | n03584254 iPod 890 | n13054560 bolete 891 | n02138441 meerkat, mierkat 892 | n10565667 scuba diver 893 | n03950228 pitcher, ewer 894 | n03729826 matchstick 895 | n02837789 bikini, two-piece 896 | n04254777 sock 897 | n02988304 CD player 898 | n03657121 lens cap, lens cover 899 | n04417672 thatch, thatched roof 900 | n04523525 vault 901 | n02815834 beaker 902 | n09229709 bubble 903 | n07697313 cheeseburger 904 | n03888605 parallel bars, bars 905 | n03355925 flagpole, flagstaff 906 | n03063599 coffee mug 907 | n04116512 rubber eraser, rubber, pencil eraser 908 | n04325704 stole 909 | n07831146 carbonara 910 | n03255030 dumbbell 911 | n02110185 Siberian husky 912 | n02102040 English springer, English springer spaniel 913 | n02110063 malamute, malemute, Alaskan malamute 914 | n02089867 Walker hound, Walker foxhound 915 | n02102177 Welsh springer spaniel 916 | n02091134 whippet 917 | n02092339 Weimaraner 918 | n02098105 soft-coated wheaten terrier 919 | n02096437 Dandie Dinmont, Dandie Dinmont terrier 920 | n02105641 Old English sheepdog, bobtail 921 | n02091635 otterhound, otter hound 922 | n02088466 bloodhound, sleuthhound 923 | n02096051 Airedale, Airedale terrier 924 | n02097130 giant schnauzer 925 | n02089078 black-and-tan coonhound 926 | n02086910 papillon 927 | n02113978 Mexican hairless 928 | n02113186 Cardigan, Cardigan Welsh corgi 929 | n02105162 malinois 930 | n02098413 Lhasa, Lhasa apso 931 | n02091467 Norwegian elkhound, elkhound 932 | n02106550 Rottweiler 933 | n02091831 Saluki, gazelle hound 934 | n02104365 schipperke 935 | n02112706 Brabancon griffon 936 | n02098286 West Highland white terrier 937 | n02095889 Sealyham terrier, Sealyham 938 | n02090721 Irish wolfhound 939 | n02108000 EntleBucher 940 | n02108915 French bulldog 941 | n02107683 Bernese mountain dog 942 | n02085936 Maltese dog, Maltese terrier, Maltese 943 | n02094114 Norfolk terrier 944 | n02087046 toy terrier 945 | n02096177 cairn, cairn terrier 946 | n02105056 groenendael 947 | n02101556 clumber, clumber spaniel 948 | n02088094 Afghan hound, Afghan 949 | n02085782 Japanese spaniel 950 | n02090622 borzoi, Russian wolfhound 951 | n02113624 toy poodle 952 | n02093859 Kerry blue terrier 953 | n02097298 Scotch terrier, Scottish terrier, Scottie 954 | n02096585 Boston bull, Boston terrier 955 | n02107574 Greater Swiss Mountain dog 956 | n02107908 Appenzeller 957 | n02086240 Shih-Tzu 958 | n02102973 Irish water spaniel 959 | n02112018 Pomeranian 960 | n02093647 Bedlington terrier 961 | n02097047 miniature schnauzer 962 | n02106030 collie 963 | n02093991 Irish terrier 964 | n02110627 affenpinscher, monkey pinscher, monkey dog 965 | n02097658 silky terrier, Sydney silky 966 | n02088364 beagle 967 | n02111129 Leonberg 968 | n02100236 German short-haired pointer 969 | n02115913 dhole, Cuon alpinus 970 | n02099849 Chesapeake Bay retriever 971 | n02108422 bull mastiff 972 | n02104029 kuvasz 973 | n02110958 pug, pug-dog 974 | n02099429 curly-coated retriever 975 | n02094258 Norwich terrier 976 | n02112350 keeshond 977 | n02095570 Lakeland terrier 978 | n02097209 standard schnauzer 979 | n02097474 Tibetan terrier, chrysanthemum dog 980 | n02095314 wire-haired fox terrier 981 | n02088238 basset, basset hound 982 | n02112137 chow, chow chow 983 | n02093428 American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier 984 | n02105855 Shetland sheepdog, Shetland sheep dog, Shetland 985 | n02111500 Great Pyrenees 986 | n02085620 Chihuahua 987 | n02099712 Labrador retriever 988 | n02111889 Samoyed, Samoyede 989 | n02088632 bluetick 990 | n02105412 kelpie 991 | n02107312 miniature pinscher 992 | n02091032 Italian greyhound 993 | n02102318 cocker spaniel, English cocker spaniel, cocker 994 | n02102480 Sussex spaniel 995 | n02113023 Pembroke, Pembroke Welsh corgi 996 | n02086646 Blenheim spaniel 997 | n02091244 Ibizan hound, Ibizan Podenco 998 | n02089973 English foxhound 999 | n02105251 briard 1000 | n02093754 Border terrier 1001 | -------------------------------------------------------------------------------- /imagenet_labels/ILSVRC2012_mapping.txt: -------------------------------------------------------------------------------- 1 | 1 n02119789 2 | 2 n02100735 3 | 3 n02110185 4 | 4 n02096294 5 | 5 n02102040 6 | 6 n02066245 7 | 7 n02509815 8 | 8 n02124075 9 | 9 n02417914 10 | 10 n02123394 11 | 11 n02125311 12 | 12 n02423022 13 | 13 n02346627 14 | 14 n02077923 15 | 15 n02110063 16 | 16 n02447366 17 | 17 n02109047 18 | 18 n02089867 19 | 19 n02102177 20 | 20 n02091134 21 | 21 n02092002 22 | 22 n02071294 23 | 23 n02442845 24 | 24 n02504458 25 | 25 n02092339 26 | 26 n02098105 27 | 27 n02096437 28 | 28 n02114712 29 | 29 n02105641 30 | 30 n02128925 31 | 31 n02091635 32 | 32 n02088466 33 | 33 n02096051 34 | 34 n02117135 35 | 35 n02138441 36 | 36 n02097130 37 | 37 n02493509 38 | 38 n02457408 39 | 39 n02389026 40 | 40 n02443484 41 | 41 n02110341 42 | 42 n02089078 43 | 43 n02086910 44 | 44 n02445715 45 | 45 n02093256 46 | 46 n02113978 47 | 47 n02106382 48 | 48 n02441942 49 | 49 n02113712 50 | 50 n02113186 51 | 51 n02105162 52 | 52 n02415577 53 | 53 n02356798 54 | 54 n02488702 55 | 55 n02123159 56 | 56 n02098413 57 | 57 n02422699 58 | 58 n02114855 59 | 59 n02094433 60 | 60 n02111277 61 | 61 n02132136 62 | 62 n02119022 63 | 63 n02091467 64 | 64 n02106550 65 | 65 n02422106 66 | 66 n02091831 67 | 67 n02120505 68 | 68 n02104365 69 | 69 n02086079 70 | 70 n02112706 71 | 71 n02098286 72 | 72 n02095889 73 | 73 n02484975 74 | 74 n02137549 75 | 75 n02500267 76 | 76 n02129604 77 | 77 n02090721 78 | 78 n02396427 79 | 79 n02108000 80 | 80 n02391049 81 | 81 n02412080 82 | 82 n02108915 83 | 83 n02480495 84 | 84 n02110806 85 | 85 n02128385 86 | 86 n02107683 87 | 87 n02085936 88 | 88 n02094114 89 | 89 n02087046 90 | 90 n02100583 91 | 91 n02096177 92 | 92 n02494079 93 | 93 n02105056 94 | 94 n02101556 95 | 95 n02123597 96 | 96 n02481823 97 | 97 n02105505 98 | 98 n02088094 99 | 99 n02085782 100 | 100 n02489166 101 | 101 n02364673 102 | 102 n02114548 103 | 103 n02134084 104 | 104 n02480855 105 | 105 n02090622 106 | 106 n02113624 107 | 107 n02093859 108 | 108 n02403003 109 | 109 n02097298 110 | 110 n02108551 111 | 111 n02493793 112 | 112 n02107142 113 | 113 n02096585 114 | 114 n02107574 115 | 115 n02107908 116 | 116 n02086240 117 | 117 n02102973 118 | 118 n02112018 119 | 119 n02093647 120 | 120 n02397096 121 | 121 n02437312 122 | 122 n02483708 123 | 123 n02097047 124 | 124 n02106030 125 | 125 n02099601 126 | 126 n02093991 127 | 127 n02110627 128 | 128 n02106166 129 | 129 n02326432 130 | 130 n02108089 131 | 131 n02097658 132 | 132 n02088364 133 | 133 n02111129 134 | 134 n02100236 135 | 135 n02486261 136 | 136 n02115913 137 | 137 n02486410 138 | 138 n02487347 139 | 139 n02099849 140 | 140 n02108422 141 | 141 n02104029 142 | 142 n02492035 143 | 143 n02110958 144 | 144 n02099429 145 | 145 n02094258 146 | 146 n02099267 147 | 147 n02395406 148 | 148 n02112350 149 | 149 n02109961 150 | 150 n02101388 151 | 151 n02113799 152 | 152 n02095570 153 | 153 n02128757 154 | 154 n02101006 155 | 155 n02115641 156 | 156 n02097209 157 | 157 n02342885 158 | 158 n02097474 159 | 159 n02120079 160 | 160 n02095314 161 | 161 n02088238 162 | 162 n02408429 163 | 163 n02133161 164 | 164 n02328150 165 | 165 n02410509 166 | 166 n02492660 167 | 167 n02398521 168 | 168 n02112137 169 | 169 n02510455 170 | 170 n02093428 171 | 171 n02105855 172 | 172 n02111500 173 | 173 n02085620 174 | 174 n02123045 175 | 175 n02490219 176 | 176 n02099712 177 | 177 n02109525 178 | 178 n02454379 179 | 179 n02111889 180 | 180 n02088632 181 | 181 n02090379 182 | 182 n02443114 183 | 183 n02361337 184 | 184 n02105412 185 | 185 n02483362 186 | 186 n02437616 187 | 187 n02107312 188 | 188 n02325366 189 | 189 n02091032 190 | 190 n02129165 191 | 191 n02102318 192 | 192 n02100877 193 | 193 n02074367 194 | 194 n02504013 195 | 195 n02363005 196 | 196 n02102480 197 | 197 n02113023 198 | 198 n02086646 199 | 199 n02497673 200 | 200 n02087394 201 | 201 n02127052 202 | 202 n02116738 203 | 203 n02488291 204 | 204 n02091244 205 | 205 n02114367 206 | 206 n02130308 207 | 207 n02089973 208 | 208 n02105251 209 | 209 n02134418 210 | 210 n02093754 211 | 211 n02106662 212 | 212 n02444819 213 | 213 n01882714 214 | 214 n01871265 215 | 215 n01872401 216 | 216 n01877812 217 | 217 n01873310 218 | 218 n01883070 219 | 219 n04086273 220 | 220 n04507155 221 | 221 n04147183 222 | 222 n04254680 223 | 223 n02672831 224 | 224 n02219486 225 | 225 n02317335 226 | 226 n01968897 227 | 227 n03452741 228 | 228 n03642806 229 | 229 n07745940 230 | 230 n02690373 231 | 231 n04552348 232 | 232 n02692877 233 | 233 n02782093 234 | 234 n04266014 235 | 235 n03344393 236 | 236 n03447447 237 | 237 n04273569 238 | 238 n03662601 239 | 239 n02951358 240 | 240 n04612504 241 | 241 n02981792 242 | 242 n04483307 243 | 243 n03095699 244 | 244 n03673027 245 | 245 n03947888 246 | 246 n02687172 247 | 247 n04347754 248 | 248 n04606251 249 | 249 n03478589 250 | 250 n04389033 251 | 251 n03773504 252 | 252 n02860847 253 | 253 n03218198 254 | 254 n02835271 255 | 255 n03792782 256 | 256 n03393912 257 | 257 n03895866 258 | 258 n02797295 259 | 259 n04204347 260 | 260 n03791053 261 | 261 n03384352 262 | 262 n03272562 263 | 263 n04310018 264 | 264 n02704792 265 | 265 n02701002 266 | 266 n02814533 267 | 267 n02930766 268 | 268 n03100240 269 | 269 n03594945 270 | 270 n03670208 271 | 271 n03770679 272 | 272 n03777568 273 | 273 n04037443 274 | 274 n04285008 275 | 275 n03444034 276 | 276 n03445924 277 | 277 n03785016 278 | 278 n04252225 279 | 279 n03345487 280 | 280 n03417042 281 | 281 n03930630 282 | 282 n04461696 283 | 283 n04467665 284 | 284 n03796401 285 | 285 n03977966 286 | 286 n04065272 287 | 287 n04335435 288 | 288 n04252077 289 | 289 n04465501 290 | 290 n03776460 291 | 291 n04482393 292 | 292 n04509417 293 | 293 n03538406 294 | 294 n03599486 295 | 295 n03868242 296 | 296 n02804414 297 | 297 n03125729 298 | 298 n03131574 299 | 299 n03388549 300 | 300 n02870880 301 | 301 n03018349 302 | 302 n03742115 303 | 303 n03016953 304 | 304 n04380533 305 | 305 n03337140 306 | 306 n03891251 307 | 307 n02791124 308 | 308 n04429376 309 | 309 n03376595 310 | 310 n04099969 311 | 311 n04344873 312 | 312 n04447861 313 | 313 n03179701 314 | 314 n03982430 315 | 315 n03201208 316 | 316 n03290653 317 | 317 n04550184 318 | 318 n07742313 319 | 319 n07747607 320 | 320 n07749582 321 | 321 n07753113 322 | 322 n07753275 323 | 323 n07753592 324 | 324 n07754684 325 | 325 n07760859 326 | 326 n07768694 327 | 327 n12267677 328 | 328 n12620546 329 | 329 n13133613 330 | 330 n11879895 331 | 331 n12144580 332 | 332 n12768682 333 | 333 n03854065 334 | 334 n04515003 335 | 335 n03017168 336 | 336 n03249569 337 | 337 n03447721 338 | 338 n03720891 339 | 339 n03721384 340 | 340 n04311174 341 | 341 n02787622 342 | 342 n02992211 343 | 343 n04536866 344 | 344 n03495258 345 | 345 n02676566 346 | 346 n03272010 347 | 347 n03110669 348 | 348 n03394916 349 | 349 n04487394 350 | 350 n03494278 351 | 351 n03840681 352 | 352 n03884397 353 | 353 n02804610 354 | 354 n03838899 355 | 355 n04141076 356 | 356 n03372029 357 | 357 n11939491 358 | 358 n12057211 359 | 359 n09246464 360 | 360 n09468604 361 | 361 n09193705 362 | 362 n09472597 363 | 363 n09399592 364 | 364 n09421951 365 | 365 n09256479 366 | 366 n09332890 367 | 367 n09428293 368 | 368 n09288635 369 | 369 n03498962 370 | 370 n03041632 371 | 371 n03658185 372 | 372 n03954731 373 | 373 n03995372 374 | 374 n03649909 375 | 375 n03481172 376 | 376 n03109150 377 | 377 n02951585 378 | 378 n03970156 379 | 379 n04154565 380 | 380 n04208210 381 | 381 n03967562 382 | 382 n03000684 383 | 383 n01514668 384 | 384 n01514859 385 | 385 n01518878 386 | 386 n01530575 387 | 387 n01531178 388 | 388 n01532829 389 | 389 n01534433 390 | 390 n01537544 391 | 391 n01558993 392 | 392 n01560419 393 | 393 n01580077 394 | 394 n01582220 395 | 395 n01592084 396 | 396 n01601694 397 | 397 n01608432 398 | 398 n01614925 399 | 399 n01616318 400 | 400 n01622779 401 | 401 n01795545 402 | 402 n01796340 403 | 403 n01797886 404 | 404 n01798484 405 | 405 n01806143 406 | 406 n01806567 407 | 407 n01807496 408 | 408 n01817953 409 | 409 n01818515 410 | 410 n01819313 411 | 411 n01820546 412 | 412 n01824575 413 | 413 n01828970 414 | 414 n01829413 415 | 415 n01833805 416 | 416 n01843065 417 | 417 n01843383 418 | 418 n01847000 419 | 419 n01855032 420 | 420 n01855672 421 | 421 n01860187 422 | 422 n02002556 423 | 423 n02002724 424 | 424 n02006656 425 | 425 n02007558 426 | 426 n02009912 427 | 427 n02009229 428 | 428 n02011460 429 | 429 n02012849 430 | 430 n02013706 431 | 431 n02018207 432 | 432 n02018795 433 | 433 n02025239 434 | 434 n02027492 435 | 435 n02028035 436 | 436 n02033041 437 | 437 n02037110 438 | 438 n02017213 439 | 439 n02051845 440 | 440 n02056570 441 | 441 n02058221 442 | 442 n01484850 443 | 443 n01491361 444 | 444 n01494475 445 | 445 n01496331 446 | 446 n01498041 447 | 447 n02514041 448 | 448 n02536864 449 | 449 n01440764 450 | 450 n01443537 451 | 451 n02526121 452 | 452 n02606052 453 | 453 n02607072 454 | 454 n02643566 455 | 455 n02655020 456 | 456 n02640242 457 | 457 n02641379 458 | 458 n01664065 459 | 459 n01665541 460 | 460 n01667114 461 | 461 n01667778 462 | 462 n01669191 463 | 463 n01675722 464 | 464 n01677366 465 | 465 n01682714 466 | 466 n01685808 467 | 467 n01687978 468 | 468 n01688243 469 | 469 n01689811 470 | 470 n01692333 471 | 471 n01693334 472 | 472 n01694178 473 | 473 n01695060 474 | 474 n01704323 475 | 475 n01697457 476 | 476 n01698640 477 | 477 n01728572 478 | 478 n01728920 479 | 479 n01729322 480 | 480 n01729977 481 | 481 n01734418 482 | 482 n01735189 483 | 483 n01737021 484 | 484 n01739381 485 | 485 n01740131 486 | 486 n01742172 487 | 487 n01744401 488 | 488 n01748264 489 | 489 n01749939 490 | 490 n01751748 491 | 491 n01753488 492 | 492 n01755581 493 | 493 n01756291 494 | 494 n01629819 495 | 495 n01630670 496 | 496 n01631663 497 | 497 n01632458 498 | 498 n01632777 499 | 499 n01641577 500 | 500 n01644373 501 | 501 n01644900 502 | 502 n04579432 503 | 503 n04592741 504 | 504 n03876231 505 | 505 n03483316 506 | 506 n03868863 507 | 507 n04251144 508 | 508 n03691459 509 | 509 n03759954 510 | 510 n04152593 511 | 511 n03793489 512 | 512 n03271574 513 | 513 n03843555 514 | 514 n04332243 515 | 515 n04265275 516 | 516 n04330267 517 | 517 n03467068 518 | 518 n02794156 519 | 519 n04118776 520 | 520 n03841143 521 | 521 n04141975 522 | 522 n02708093 523 | 523 n03196217 524 | 524 n04548280 525 | 525 n03544143 526 | 526 n04355338 527 | 527 n03891332 528 | 528 n04328186 529 | 529 n03197337 530 | 530 n04317175 531 | 531 n04376876 532 | 532 n03706229 533 | 533 n02841315 534 | 534 n04009552 535 | 535 n04356056 536 | 536 n03692522 537 | 537 n04044716 538 | 538 n02879718 539 | 539 n02950826 540 | 540 n02749479 541 | 541 n04090263 542 | 542 n04008634 543 | 543 n03085013 544 | 544 n04505470 545 | 545 n03126707 546 | 546 n03666591 547 | 547 n02666196 548 | 548 n02977058 549 | 549 n04238763 550 | 550 n03180011 551 | 551 n03485407 552 | 552 n03832673 553 | 553 n06359193 554 | 554 n03496892 555 | 555 n04428191 556 | 556 n04004767 557 | 557 n04243546 558 | 558 n04525305 559 | 559 n04179913 560 | 560 n03602883 561 | 561 n04372370 562 | 562 n03532672 563 | 563 n02974003 564 | 564 n03874293 565 | 565 n03944341 566 | 566 n03992509 567 | 567 n03425413 568 | 568 n02966193 569 | 569 n04371774 570 | 570 n04067472 571 | 571 n04040759 572 | 572 n04019541 573 | 573 n03492542 574 | 574 n04355933 575 | 575 n03929660 576 | 576 n02965783 577 | 577 n04258138 578 | 578 n04074963 579 | 579 n03208938 580 | 580 n02910353 581 | 581 n03476684 582 | 582 n03627232 583 | 583 n03075370 584 | 584 n03874599 585 | 585 n03804744 586 | 586 n04127249 587 | 587 n04153751 588 | 588 n03803284 589 | 589 n04162706 590 | 590 n04228054 591 | 591 n02948072 592 | 592 n03590841 593 | 593 n04286575 594 | 594 n04456115 595 | 595 n03814639 596 | 596 n03933933 597 | 597 n04485082 598 | 598 n03733131 599 | 599 n03794056 600 | 600 n04275548 601 | 601 n01768244 602 | 602 n01770081 603 | 603 n01770393 604 | 604 n01773157 605 | 605 n01773549 606 | 606 n01773797 607 | 607 n01774384 608 | 608 n01774750 609 | 609 n01775062 610 | 610 n01776313 611 | 611 n01784675 612 | 612 n01990800 613 | 613 n01978287 614 | 614 n01978455 615 | 615 n01980166 616 | 616 n01981276 617 | 617 n01983481 618 | 618 n01984695 619 | 619 n01985128 620 | 620 n01986214 621 | 621 n02165105 622 | 622 n02165456 623 | 623 n02167151 624 | 624 n02168699 625 | 625 n02169497 626 | 626 n02172182 627 | 627 n02174001 628 | 628 n02177972 629 | 629 n02190166 630 | 630 n02206856 631 | 631 n02226429 632 | 632 n02229544 633 | 633 n02231487 634 | 634 n02233338 635 | 635 n02236044 636 | 636 n02256656 637 | 637 n02259212 638 | 638 n02264363 639 | 639 n02268443 640 | 640 n02268853 641 | 641 n02276258 642 | 642 n02277742 643 | 643 n02279972 644 | 644 n02280649 645 | 645 n02281406 646 | 646 n02281787 647 | 647 n01910747 648 | 648 n01914609 649 | 649 n01917289 650 | 650 n01924916 651 | 651 n01930112 652 | 652 n01943899 653 | 653 n01944390 654 | 654 n01945685 655 | 655 n01950731 656 | 656 n01955084 657 | 657 n02319095 658 | 658 n02321529 659 | 659 n03584829 660 | 660 n03297495 661 | 661 n03761084 662 | 662 n03259280 663 | 663 n04111531 664 | 664 n04442312 665 | 665 n04542943 666 | 666 n04517823 667 | 667 n03207941 668 | 668 n04070727 669 | 669 n04554684 670 | 670 n03133878 671 | 671 n03400231 672 | 672 n04596742 673 | 673 n02939185 674 | 674 n03063689 675 | 675 n04398044 676 | 676 n04270147 677 | 677 n02699494 678 | 678 n04486054 679 | 679 n03899768 680 | 680 n04311004 681 | 681 n04366367 682 | 682 n04532670 683 | 683 n02793495 684 | 684 n03457902 685 | 685 n03877845 686 | 686 n03781244 687 | 687 n03661043 688 | 688 n02727426 689 | 689 n02859443 690 | 690 n03028079 691 | 691 n03788195 692 | 692 n04346328 693 | 693 n03956157 694 | 694 n04081281 695 | 695 n03032252 696 | 696 n03529860 697 | 697 n03697007 698 | 698 n03065424 699 | 699 n03837869 700 | 700 n04458633 701 | 701 n02980441 702 | 702 n04005630 703 | 703 n03461385 704 | 704 n02776631 705 | 705 n02791270 706 | 706 n02871525 707 | 707 n02927161 708 | 708 n03089624 709 | 709 n04200800 710 | 710 n04443257 711 | 711 n04462240 712 | 712 n03388043 713 | 713 n03042490 714 | 714 n04613696 715 | 715 n03216828 716 | 716 n02892201 717 | 717 n03743016 718 | 718 n02788148 719 | 719 n02894605 720 | 720 n03160309 721 | 721 n03000134 722 | 722 n03930313 723 | 723 n04604644 724 | 724 n04326547 725 | 725 n03459775 726 | 726 n04239074 727 | 727 n04501370 728 | 728 n03792972 729 | 729 n04149813 730 | 730 n03530642 731 | 731 n03961711 732 | 732 n03903868 733 | 733 n02814860 734 | 734 n07711569 735 | 735 n07720875 736 | 736 n07714571 737 | 737 n07714990 738 | 738 n07715103 739 | 739 n07716358 740 | 740 n07716906 741 | 741 n07717410 742 | 742 n07717556 743 | 743 n07718472 744 | 744 n07718747 745 | 745 n07730033 746 | 746 n07734744 747 | 747 n04209239 748 | 748 n03594734 749 | 749 n02971356 750 | 750 n03485794 751 | 751 n04133789 752 | 752 n02747177 753 | 753 n04125021 754 | 754 n07579787 755 | 755 n03814906 756 | 756 n03134739 757 | 757 n03404251 758 | 758 n04423845 759 | 759 n03877472 760 | 760 n04120489 761 | 761 n03062245 762 | 762 n03014705 763 | 763 n03717622 764 | 764 n03777754 765 | 765 n04493381 766 | 766 n04476259 767 | 767 n02777292 768 | 768 n07693725 769 | 769 n03998194 770 | 770 n03617480 771 | 771 n07590611 772 | 772 n04579145 773 | 773 n03623198 774 | 774 n07248320 775 | 775 n04277352 776 | 776 n04229816 777 | 777 n02823428 778 | 778 n03127747 779 | 779 n02877765 780 | 780 n04435653 781 | 781 n03724870 782 | 782 n03710637 783 | 783 n03920288 784 | 784 n03379051 785 | 785 n02807133 786 | 786 n04399382 787 | 787 n03527444 788 | 788 n03983396 789 | 789 n03924679 790 | 790 n04532106 791 | 791 n06785654 792 | 792 n03445777 793 | 793 n07613480 794 | 794 n04350905 795 | 795 n04562935 796 | 796 n03325584 797 | 797 n03045698 798 | 798 n07892512 799 | 799 n03250847 800 | 800 n04192698 801 | 801 n03026506 802 | 802 n03534580 803 | 803 n07565083 804 | 804 n04296562 805 | 805 n02869837 806 | 806 n07871810 807 | 807 n02799071 808 | 808 n03314780 809 | 809 n04141327 810 | 810 n04357314 811 | 811 n02823750 812 | 812 n13052670 813 | 813 n07583066 814 | 814 n03637318 815 | 815 n04599235 816 | 816 n07802026 817 | 817 n02883205 818 | 818 n03709823 819 | 819 n04560804 820 | 820 n02909870 821 | 821 n03207743 822 | 822 n04263257 823 | 823 n07932039 824 | 824 n03786901 825 | 825 n04479046 826 | 826 n03873416 827 | 827 n02999410 828 | 828 n04367480 829 | 829 n03775546 830 | 830 n07875152 831 | 831 n04591713 832 | 832 n04201297 833 | 833 n02916936 834 | 834 n03240683 835 | 835 n02840245 836 | 836 n02963159 837 | 837 n04370456 838 | 838 n03991062 839 | 839 n02843684 840 | 840 n03482405 841 | 841 n03942813 842 | 842 n03908618 843 | 843 n03902125 844 | 844 n07584110 845 | 845 n02730930 846 | 846 n04023962 847 | 847 n02769748 848 | 848 n10148035 849 | 849 n02817516 850 | 850 n03908714 851 | 851 n02906734 852 | 852 n03788365 853 | 853 n02667093 854 | 854 n03787032 855 | 855 n03980874 856 | 856 n03141823 857 | 857 n03976467 858 | 858 n04264628 859 | 859 n07930864 860 | 860 n04039381 861 | 861 n06874185 862 | 862 n04033901 863 | 863 n04041544 864 | 864 n07860988 865 | 865 n03146219 866 | 866 n03763968 867 | 867 n03676483 868 | 868 n04209133 869 | 869 n03782006 870 | 870 n03857828 871 | 871 n03775071 872 | 872 n02892767 873 | 873 n07684084 874 | 874 n04522168 875 | 875 n03764736 876 | 876 n04118538 877 | 877 n03887697 878 | 878 n13044778 879 | 879 n03291819 880 | 880 n03770439 881 | 881 n03124170 882 | 882 n04487081 883 | 883 n03916031 884 | 884 n02808440 885 | 885 n07697537 886 | 886 n12985857 887 | 887 n02917067 888 | 888 n03938244 889 | 889 n15075141 890 | 890 n02978881 891 | 891 n02966687 892 | 892 n03633091 893 | 893 n13040303 894 | 894 n03690938 895 | 895 n03476991 896 | 896 n02669723 897 | 897 n03220513 898 | 898 n03127925 899 | 899 n04584207 900 | 900 n07880968 901 | 901 n03937543 902 | 902 n03000247 903 | 903 n04418357 904 | 904 n04590129 905 | 905 n02795169 906 | 906 n04553703 907 | 907 n02783161 908 | 908 n02802426 909 | 909 n02808304 910 | 910 n03124043 911 | 911 n03450230 912 | 912 n04589890 913 | 913 n12998815 914 | 914 n02992529 915 | 915 n03825788 916 | 916 n02790996 917 | 917 n03710193 918 | 918 n03630383 919 | 919 n03347037 920 | 920 n03769881 921 | 921 n03871628 922 | 922 n03733281 923 | 923 n03976657 924 | 924 n03535780 925 | 925 n04259630 926 | 926 n03929855 927 | 927 n04049303 928 | 928 n04548362 929 | 929 n02979186 930 | 930 n06596364 931 | 931 n03935335 932 | 932 n06794110 933 | 933 n02825657 934 | 934 n03388183 935 | 935 n04591157 936 | 936 n04540053 937 | 937 n03866082 938 | 938 n04136333 939 | 939 n04026417 940 | 940 n02865351 941 | 941 n02834397 942 | 942 n03888257 943 | 943 n04235860 944 | 944 n04404412 945 | 945 n04371430 946 | 946 n03733805 947 | 947 n07920052 948 | 948 n07873807 949 | 949 n02895154 950 | 950 n04204238 951 | 951 n04597913 952 | 952 n04131690 953 | 953 n07836838 954 | 954 n09835506 955 | 955 n03443371 956 | 956 n13037406 957 | 957 n04336792 958 | 958 n04557648 959 | 959 n03187595 960 | 960 n04254120 961 | 961 n03595614 962 | 962 n04146614 963 | 963 n03598930 964 | 964 n03958227 965 | 965 n04069434 966 | 966 n03188531 967 | 967 n02786058 968 | 968 n07615774 969 | 969 n04525038 970 | 970 n04409515 971 | 971 n03424325 972 | 972 n03223299 973 | 973 n03680355 974 | 974 n07614500 975 | 975 n07695742 976 | 976 n04033995 977 | 977 n03710721 978 | 978 n04392985 979 | 979 n03047690 980 | 980 n03584254 981 | 981 n13054560 982 | 982 n10565667 983 | 983 n03950228 984 | 984 n03729826 985 | 985 n02837789 986 | 986 n04254777 987 | 987 n02988304 988 | 988 n03657121 989 | 989 n04417672 990 | 990 n04523525 991 | 991 n02815834 992 | 992 n09229709 993 | 993 n07697313 994 | 994 n03888605 995 | 995 n03355925 996 | 996 n03063599 997 | 997 n04116512 998 | 998 n04325704 999 | 999 n07831146 1000 | 1000 n03255030 1001 | 1001 n00001740 1002 | 1002 n00001930 1003 | 1003 n00020827 1004 | 1004 n00020090 1005 | 1005 n00021265 1006 | 1006 n07566340 1007 | 1007 n07566863 1008 | 1008 n07710616 1009 | 1009 n07679356 1010 | 1010 n07683786 1011 | 1011 n07681926 1012 | 1012 n07680932 1013 | 1013 n07809096 1014 | 1014 n07809368 1015 | 1015 n07810907 1016 | 1016 n07582609 1017 | 1017 n07829412 1018 | 1018 n07838233 1019 | 1019 n07882497 1020 | 1020 n07560652 1021 | 1021 n07570720 1022 | 1022 n07557434 1023 | 1023 n07588947 1024 | 1024 n07583197 1025 | 1025 n07712382 1026 | 1026 n07695965 1027 | 1027 n07697100 1028 | 1028 n07556970 1029 | 1029 n07579575 1030 | 1030 n07609840 1031 | 1031 n07611358 1032 | 1032 n07612996 1033 | 1033 n07800091 1034 | 1034 n07800740 1035 | 1035 n07881800 1036 | 1036 n07929519 1037 | 1037 n07884567 1038 | 1038 n07891726 1039 | 1039 n07911371 1040 | 1040 n07930554 1041 | 1041 n14778436 1042 | 1042 n03247620 1043 | 1043 n03248958 1044 | 1044 n03740161 1045 | 1045 n03994008 1046 | 1046 n00019613 1047 | 1047 n14580897 1048 | 1048 n14974264 1049 | 1049 n15074962 1050 | 1050 n14939900 1051 | 1051 n14940386 1052 | 1052 n15046900 1053 | 1053 n07555863 1054 | 1054 n07622061 1055 | 1055 n07705711 1056 | 1056 n07705931 1057 | 1057 n07739125 1058 | 1058 n07739506 1059 | 1059 n07742704 1060 | 1060 n07747055 1061 | 1061 n07707451 1062 | 1062 n07710007 1063 | 1063 n07720442 1064 | 1064 n07720615 1065 | 1065 n07710283 1066 | 1066 n07713395 1067 | 1067 n07713895 1068 | 1068 n07715561 1069 | 1069 n07715721 1070 | 1070 n07717070 1071 | 1071 n00007347 1072 | 1072 n00007846 1073 | 1073 n09613191 1074 | 1074 n10439851 1075 | 1075 n09820263 1076 | 1076 n10072708 1077 | 1077 n10019552 1078 | 1078 n09626238 1079 | 1079 n09816771 1080 | 1080 n10401829 1081 | 1081 n00002684 1082 | 1082 n09287968 1083 | 1083 n09366017 1084 | 1084 n09366317 1085 | 1085 n09359803 1086 | 1086 n09409512 1087 | 1087 n09214060 1088 | 1088 n09406793 1089 | 1089 n09433442 1090 | 1090 n09443453 1091 | 1091 n00027167 1092 | 1092 n08620061 1093 | 1093 n08578706 1094 | 1094 n04602044 1095 | 1095 n00003553 1096 | 1096 n00019128 1097 | 1097 n09349797 1098 | 1098 n09214581 1099 | 1099 n13086908 1100 | 1100 n13087625 1101 | 1101 n11675842 1102 | 1102 n13134947 1103 | 1103 n13135832 1104 | 1104 n11689483 1105 | 1105 n12156819 1106 | 1106 n12157056 1107 | 1107 n13138842 1108 | 1108 n00004258 1109 | 1109 n00004475 1110 | 1110 n00017222 1111 | 1111 n13083586 1112 | 1112 n11552386 1113 | 1113 n11665372 1114 | 1114 n11669921 1115 | 1115 n12041446 1116 | 1116 n12056217 1117 | 1117 n12992868 1118 | 1118 n12997654 1119 | 1119 n00015388 1120 | 1120 n01905661 1121 | 1121 n01767661 1122 | 1122 n01769347 1123 | 1123 n01772222 1124 | 1124 n01776192 1125 | 1125 n01974773 1126 | 1126 n01975687 1127 | 1127 n01976146 1128 | 1128 n01976957 1129 | 1129 n01982650 1130 | 1130 n01983048 1131 | 1131 n02159955 1132 | 1132 n02164464 1133 | 1133 n02171453 1134 | 1134 n02171869 1135 | 1135 n02188699 1136 | 1136 n02206270 1137 | 1137 n02226183 1138 | 1138 n02231052 1139 | 1139 n02232951 1140 | 1140 n02246011 1141 | 1141 n02263378 1142 | 1142 n02268148 1143 | 1143 n02274024 1144 | 1144 n02274259 1145 | 1145 n02274822 1146 | 1146 n02279637 1147 | 1147 n02280458 1148 | 1148 n01909422 1149 | 1149 n01914163 1150 | 1150 n01915811 1151 | 1151 n01916925 1152 | 1152 n01922303 1153 | 1153 n01940736 1154 | 1154 n01942177 1155 | 1155 n01968315 1156 | 1156 n02316707 1157 | 1157 n01317541 1158 | 1158 n02121808 1159 | 1159 n02084071 1160 | 1160 n02087122 1161 | 1161 n02098550 1162 | 1162 n02099997 1163 | 1163 n02100399 1164 | 1164 n02101108 1165 | 1165 n02102605 1166 | 1166 n02101861 1167 | 1167 n02099029 1168 | 1168 n02092468 1169 | 1169 n02095050 1170 | 1170 n02095412 1171 | 1171 n02095727 1172 | 1172 n02096756 1173 | 1173 n02093056 1174 | 1174 n02087551 1175 | 1175 n02090475 1176 | 1176 n02088839 1177 | 1177 n02089555 1178 | 1178 n02090827 1179 | 1179 n02112826 1180 | 1180 n02113335 1181 | 1181 n02112497 1182 | 1182 n02103406 1183 | 1183 n02104523 1184 | 1184 n02104882 1185 | 1185 n02103841 1186 | 1186 n02106966 1187 | 1187 n02109811 1188 | 1188 n02107420 1189 | 1189 n02108254 1190 | 1190 n02108672 1191 | 1191 n02111626 1192 | 1192 n02085374 1193 | 1193 n02086346 1194 | 1194 n02086478 1195 | 1195 n02152991 1196 | 1196 n02153203 1197 | 1197 n01795088 1198 | 1198 n01802721 1199 | 1199 n01803078 1200 | 1200 n01805801 1201 | 1201 n02384858 1202 | 1202 n01466257 1203 | 1203 n01471682 1204 | 1204 n01861778 1205 | 1205 n01886756 1206 | 1206 n02323449 1207 | 1207 n02323902 1208 | 1208 n02324045 1209 | 1209 n02469914 1210 | 1210 n02496913 1211 | 1211 n02470325 1212 | 1212 n02470899 1213 | 1213 n02480153 1214 | 1214 n02483092 1215 | 1215 n02484322 1216 | 1216 n02489589 1217 | 1217 n02484473 1218 | 1218 n02503127 1219 | 1219 n02503517 1220 | 1220 n02075296 1221 | 1221 n02131653 1222 | 1222 n02441326 1223 | 1223 n02507649 1224 | 1224 n02134971 1225 | 1225 n02083346 1226 | 1226 n02115335 1227 | 1227 n02118333 1228 | 1228 n02114100 1229 | 1229 n02120997 1230 | 1230 n02121620 1231 | 1231 n02124623 1232 | 1232 n02127808 1233 | 1233 n02062017 1234 | 1234 n02073250 1235 | 1235 n02075927 1236 | 1236 n02076196 1237 | 1237 n02076779 1238 | 1238 n02062430 1239 | 1239 n02062744 1240 | 1240 n02066707 1241 | 1241 n02068974 1242 | 1242 n02063224 1243 | 1243 n02370806 1244 | 1244 n02373336 1245 | 1245 n02374149 1246 | 1246 n02374451 1247 | 1247 n02394477 1248 | 1248 n02437136 1249 | 1249 n02399000 1250 | 1250 n02401031 1251 | 1251 n02411705 1252 | 1252 n02419796 1253 | 1253 n02414578 1254 | 1254 n02415435 1255 | 1255 n02416519 1256 | 1256 n02417534 1257 | 1257 n02407959 1258 | 1258 n02402010 1259 | 1259 n02402425 1260 | 1260 n02395003 1261 | 1261 n02453611 1262 | 1262 n02456962 1263 | 1263 n02453108 1264 | 1264 n02329401 1265 | 1265 n02364520 1266 | 1266 n02355227 1267 | 1267 n02355477 1268 | 1268 n01871543 1269 | 1269 n01871875 1270 | 1270 n01873982 1271 | 1271 n01874434 1272 | 1272 n01881171 1273 | 1273 n01877134 1274 | 1274 n01503061 1275 | 1275 n01517565 1276 | 1276 n01524359 1277 | 1277 n01525720 1278 | 1278 n01529672 1279 | 1279 n01537134 1280 | 1280 n01557185 1281 | 1281 n01560105 1282 | 1282 n01578575 1283 | 1283 n01591697 1284 | 1284 n01604330 1285 | 1285 n01605630 1286 | 1286 n01613294 1287 | 1287 n01621127 1288 | 1288 n01789386 1289 | 1289 n01816887 1290 | 1290 n01819115 1291 | 1291 n01820348 1292 | 1292 n01822602 1293 | 1293 n01823013 1294 | 1294 n01825930 1295 | 1295 n01831712 1296 | 1296 n01838038 1297 | 1297 n01844917 1298 | 1298 n01845132 1299 | 1299 n01845477 1300 | 1300 n01846331 1301 | 1301 n01852861 1302 | 1302 n01854415 1303 | 1303 n01858441 1304 | 1304 n02000954 1305 | 1305 n02002075 1306 | 1306 n02008041 1307 | 1307 n02008796 1308 | 1308 n02014941 1309 | 1309 n02018027 1310 | 1310 n02022684 1311 | 1311 n02023341 1312 | 1312 n02025043 1313 | 1313 n02026059 1314 | 1314 n02031934 1315 | 1315 n02016358 1316 | 1316 n02016956 1317 | 1317 n02021795 1318 | 1318 n02051474 1319 | 1319 n02055658 1320 | 1320 n02055803 1321 | 1321 n02057731 1322 | 1322 n01661091 1323 | 1323 n01661592 1324 | 1324 n01662622 1325 | 1325 n01662784 1326 | 1326 n01663401 1327 | 1327 n01661818 1328 | 1328 n01674216 1329 | 1329 n01674464 1330 | 1330 n01674990 1331 | 1331 n01676755 1332 | 1332 n01685439 1333 | 1333 n01687665 1334 | 1334 n01689411 1335 | 1335 n01691951 1336 | 1336 n01692864 1337 | 1337 n01693783 1338 | 1338 n01694709 1339 | 1339 n01695681 1340 | 1340 n01699831 1341 | 1341 n01700470 1342 | 1342 n01703569 1343 | 1343 n01696633 1344 | 1344 n01698434 1345 | 1345 n01697178 1346 | 1346 n01726692 1347 | 1347 n01727646 1348 | 1348 n01741562 1349 | 1349 n01741943 1350 | 1350 n01743605 1351 | 1351 n01745125 1352 | 1352 n01747885 1353 | 1353 n01749582 1354 | 1354 n01749742 1355 | 1355 n01752165 1356 | 1356 n01753959 1357 | 1357 n01754876 1358 | 1358 n01627424 1359 | 1359 n01639765 1360 | 1360 n01640846 1361 | 1361 n01629276 1362 | 1362 n01630284 1363 | 1363 n01632047 1364 | 1364 n01473806 1365 | 1365 n02512053 1366 | 1366 n01480516 1367 | 1367 n01482071 1368 | 1368 n01482330 1369 | 1369 n01483522 1370 | 1370 n01488918 1371 | 1371 n01495701 1372 | 1372 n02512938 1373 | 1373 n02534734 1374 | 1374 n02514825 1375 | 1375 n02528163 1376 | 1376 n01428580 1377 | 1377 n01438208 1378 | 1378 n01439121 1379 | 1379 n02534559 1380 | 1380 n02552171 1381 | 1381 n02554730 1382 | 1382 n02605316 1383 | 1383 n02606384 1384 | 1384 n02642107 1385 | 1385 n02642644 1386 | 1386 n02652668 1387 | 1387 n02638596 1388 | 1388 n00021939 1389 | 1389 n03575240 1390 | 1390 n03183080 1391 | 1391 n03800933 1392 | 1392 n03614532 1393 | 1393 n03928116 1394 | 1394 n03915437 1395 | 1395 n04338517 1396 | 1396 n02880546 1397 | 1397 n03025886 1398 | 1398 n03467517 1399 | 1399 n04586932 1400 | 1400 n02891788 1401 | 1401 n03393324 1402 | 1402 n03945615 1403 | 1403 n04598582 1404 | 1404 n02817799 1405 | 1405 n03228016 1406 | 1406 n04222847 1407 | 1407 n02676261 1408 | 1408 n02688443 1409 | 1409 n02730265 1410 | 1410 n02855089 1411 | 1411 n02895606 1412 | 1412 n03269401 1413 | 1413 n04470953 1414 | 1414 n03274561 1415 | 1415 n03277771 1416 | 1416 n03211117 1417 | 1417 n03320046 1418 | 1418 n03339643 1419 | 1419 n03508101 1420 | 1420 n03574816 1421 | 1421 n03575691 1422 | 1422 n03733925 1423 | 1423 n03735637 1424 | 1424 n03753077 1425 | 1425 n04437953 1426 | 1426 n04555897 1427 | 1427 n03046257 1428 | 1428 n04134632 1429 | 1429 n04438304 1430 | 1430 n03739693 1431 | 1431 n03813176 1432 | 1432 n03080497 1433 | 1433 n03852280 1434 | 1434 n04272054 1435 | 1435 n04147495 1436 | 1436 n03709206 1437 | 1437 n03760671 1438 | 1438 n03667829 1439 | 1439 n03484931 1440 | 1440 n04403638 1441 | 1441 n02751295 1442 | 1442 n04565375 1443 | 1443 n03467984 1444 | 1444 n03343853 1445 | 1445 n02759963 1446 | 1446 n02760429 1447 | 1447 n02760855 1448 | 1448 n03701391 1449 | 1449 n03948459 1450 | 1450 n03614007 1451 | 1451 n03664675 1452 | 1452 n03699975 1453 | 1453 n03997484 1454 | 1454 n03996145 1455 | 1455 n02938886 1456 | 1456 n03082979 1457 | 1457 n02708224 1458 | 1458 n03196324 1459 | 1459 n03918480 1460 | 1460 n03985232 1461 | 1461 n03322940 1462 | 1462 n04004475 1463 | 1463 n04243941 1464 | 1464 n04417180 1465 | 1465 n03738472 1466 | 1466 n03096960 1467 | 1467 n03736970 1468 | 1468 n03700963 1469 | 1469 n04574999 1470 | 1470 n04021798 1471 | 1471 n04088797 1472 | 1472 n04586421 1473 | 1473 n04110955 1474 | 1474 n03032811 1475 | 1475 n03208556 1476 | 1476 n03744840 1477 | 1477 n03706653 1478 | 1478 n03851341 1479 | 1479 n03656484 1480 | 1480 n03099771 1481 | 1481 n04069276 1482 | 1482 n03773035 1483 | 1483 n04081844 1484 | 1484 n02889425 1485 | 1485 n02891188 1486 | 1486 n03551084 1487 | 1487 n03323703 1488 | 1488 n03043958 1489 | 1489 n03682487 1490 | 1490 n03940256 1491 | 1491 n04125853 1492 | 1492 n04120093 1493 | 1493 n04217718 1494 | 1494 n04263760 1495 | 1495 n03636248 1496 | 1496 n03640988 1497 | 1497 n03665366 1498 | 1498 n04336034 1499 | 1499 n02887209 1500 | 1500 n04359589 1501 | 1501 n04038440 1502 | 1502 n04341414 1503 | 1503 n04515129 1504 | 1504 n03988170 1505 | 1505 n04474466 1506 | 1506 n04568557 1507 | 1507 n03563967 1508 | 1508 n04451818 1509 | 1509 n03154446 1510 | 1510 n03154073 1511 | 1511 n03265032 1512 | 1512 n02764044 1513 | 1513 n03623556 1514 | 1514 n03239726 1515 | 1515 n03418242 1516 | 1516 n03489162 1517 | 1517 n03848348 1518 | 1518 n02877962 1519 | 1519 n04516672 1520 | 1520 n03621049 1521 | 1521 n03101986 1522 | 1522 n03101156 1523 | 1523 n03880531 1524 | 1524 n03990474 1525 | 1525 n04500060 1526 | 1526 n03039947 1527 | 1527 n02908217 1528 | 1528 n03294833 1529 | 1529 n04185071 1530 | 1530 n03837422 1531 | 1531 n04100174 1532 | 1532 n04608567 1533 | 1533 n03906997 1534 | 1534 n04317420 1535 | 1535 n04296261 1536 | 1536 n02788689 1537 | 1537 n03659292 1538 | 1538 n03613592 1539 | 1539 n04285622 1540 | 1540 n03094503 1541 | 1541 n04576211 1542 | 1542 n02834778 1543 | 1543 n02959942 1544 | 1544 n03484083 1545 | 1545 n04170037 1546 | 1546 n02740533 1547 | 1547 n03684823 1548 | 1548 n03791235 1549 | 1549 n02958343 1550 | 1550 n03790512 1551 | 1551 n03769722 1552 | 1552 n04490091 1553 | 1553 n04520170 1554 | 1554 n03896419 1555 | 1555 n04464852 1556 | 1556 n04467099 1557 | 1557 n04543158 1558 | 1558 n02970849 1559 | 1559 n02801938 1560 | 1560 n02773037 1561 | 1561 n04284002 1562 | 1562 n04139859 1563 | 1563 n03206908 1564 | 1564 n02880940 1565 | 1565 n02839910 1566 | 1566 n04183329 1567 | 1567 n04531098 1568 | 1568 n02801525 1569 | 1569 n03593526 1570 | 1570 n02876657 1571 | 1571 n03603722 1572 | 1572 n04388743 1573 | 1573 n04078574 1574 | 1574 n03035510 1575 | 1575 n03241496 1576 | 1576 n03797390 1577 | 1577 n02946921 1578 | 1578 n03438257 1579 | 1579 n04060904 1580 | 1580 n03210683 1581 | 1581 n03871083 1582 | 1582 n02883344 1583 | 1583 n04340750 1584 | 1584 n03733644 1585 | 1585 n02974697 1586 | 1586 n03294048 1587 | 1587 n04285146 1588 | 1588 n03446832 1589 | 1589 n02799897 1590 | 1590 n03472232 1591 | 1591 n02802721 1592 | 1592 n03134853 1593 | 1593 n04571292 1594 | 1594 n03414162 1595 | 1595 n03413828 1596 | 1596 n04028315 1597 | 1597 n02778669 1598 | 1598 n03926148 1599 | 1599 n02942699 1600 | 1600 n03430959 1601 | 1601 n03619396 1602 | 1602 n04091097 1603 | 1603 n03241093 1604 | 1604 n04137444 1605 | 1605 n04264914 1606 | 1606 n03278248 1607 | 1607 n02757462 1608 | 1608 n04077430 1609 | 1609 n04315948 1610 | 1610 n03916720 1611 | 1611 n03163973 1612 | 1612 n04401088 1613 | 1613 n04044498 1614 | 1614 n02727825 1615 | 1615 n03257586 1616 | 1616 n04077734 1617 | 1617 n04447443 1618 | 1618 n03128519 1619 | 1619 n03113152 1620 | 1620 n03714235 1621 | 1621 n03100490 1622 | 1622 n04524313 1623 | 1623 n03125870 1624 | 1624 n02686568 1625 | 1625 n03510583 1626 | 1626 n02691156 1627 | 1627 n03666917 1628 | 1628 n04530566 1629 | 1629 n02858304 1630 | 1630 n03790230 1631 | 1631 n04158807 1632 | 1632 n04244997 1633 | 1633 n04128837 1634 | 1634 n04128499 1635 | 1635 n04194289 1636 | 1636 n02965300 1637 | 1637 n03896103 1638 | 1638 n04552696 1639 | 1639 n04348184 1640 | 1640 n03764276 1641 | 1641 n04099429 1642 | 1642 n04235291 1643 | 1643 n03678362 1644 | 1644 n04019101 1645 | 1645 n04468005 1646 | 1646 n03896233 1647 | 1647 n02924116 1648 | 1648 n03091374 1649 | 1649 n02755352 1650 | 1650 n03664943 1651 | 1651 n03405265 1652 | 1652 n03405725 1653 | 1653 n02766320 1654 | 1654 n02821943 1655 | 1655 n02818832 1656 | 1656 n02933112 1657 | 1657 n03015254 1658 | 1658 n03636649 1659 | 1659 n03842156 1660 | 1660 n04161981 1661 | 1661 n02828884 1662 | 1662 n03001627 1663 | 1663 n03002210 1664 | 1664 n04256520 1665 | 1665 n03100346 1666 | 1666 n04379243 1667 | 1667 n04379964 1668 | 1668 n04549122 1669 | 1669 n04118021 1670 | 1670 n03151077 1671 | 1671 n06254669 1672 | 1672 n06263609 1673 | 1673 n06263369 1674 | 1674 n06595351 1675 | 1675 n04377057 1676 | 1676 n03078287 1677 | 1677 n04400289 1678 | 1678 n04341686 1679 | 1679 n02733524 1680 | 1680 n02735688 1681 | 1681 n02898711 1682 | 1682 n02913152 1683 | 1683 n03322570 1684 | 1684 n03544360 1685 | 1685 n04079244 1686 | 1686 n04073948 1687 | 1687 n03859280 1688 | 1688 n04187547 1689 | 1689 n03953416 1690 | 1690 n04210390 1691 | 1691 n04417809 1692 | 1692 n02914991 1693 | 1693 n03956922 1694 | 1694 n03316406 1695 | 1695 n03074380 1696 | 1696 n03171356 1697 | 1697 n03385557 1698 | 1698 n03297735 1699 | 1699 n03574555 1700 | 1700 n03907654 1701 | 1701 n03111690 1702 | 1702 n03953020 1703 | 1703 n03748162 1704 | 1704 n03722288 1705 | 1705 n04202417 1706 | 1706 n03546340 1707 | 1707 n03259505 1708 | 1708 n03638321 1709 | 1709 n03743902 1710 | 1710 n03839993 1711 | 1711 n02796623 1712 | 1712 n03327234 1713 | 1713 n04046974 1714 | 1714 n03454707 1715 | 1715 n03795580 1716 | 1716 n03221720 1717 | 1717 n03427296 1718 | 1718 n04191595 1719 | 1719 n04411264 1720 | 1720 n04217882 1721 | 1721 n04361095 1722 | 1722 n03391770 1723 | 1723 n04038727 1724 | 1724 n04360501 1725 | 1725 n04460130 1726 | 1726 n03129123 1727 | 1727 n04007894 1728 | 1728 n04599396 1729 | 1729 n06589574 1730 | 1730 n04188643 1731 | 1731 n02856463 1732 | 1732 n03959936 1733 | 1733 n03309808 1734 | 1734 n03932670 1735 | 1735 n04459362 1736 | 1736 n03122748 1737 | 1737 n04605726 1738 | 1738 n03590306 1739 | 1739 n04151940 1740 | 1740 n03380867 1741 | 1741 n04199027 1742 | 1742 n02872752 1743 | 1743 n03050026 1744 | 1744 n03237639 1745 | 1745 n02785648 1746 | 1746 n02680110 1747 | 1747 n02820210 1748 | 1748 n03366823 1749 | 1749 n03727837 1750 | 1750 n03051540 1751 | 1751 n02756098 1752 | 1752 n03206718 1753 | 1753 n03476083 1754 | 1754 n04015204 1755 | 1755 n02671780 1756 | 1756 n02827606 1757 | 1757 n03859495 1758 | 1758 n03450516 1759 | 1759 n03419014 1760 | 1760 n04531873 1761 | 1761 n04371563 1762 | 1762 n04489008 1763 | 1763 n04143897 1764 | 1764 n03816005 1765 | 1765 n03815615 1766 | 1766 n04230808 1767 | 1767 n03863923 1768 | 1768 n03045337 1769 | 1769 n03057021 1770 | 1770 n04049405 1771 | 1771 n04370048 1772 | 1772 n04021028 1773 | 1773 n04097866 1774 | 1774 n04197391 1775 | 1775 n04508163 1776 | 1776 n03490324 1777 | 1777 n03441112 1778 | 1778 n03502509 1779 | 1779 n03513137 1780 | 1780 n02954340 1781 | 1781 n03497657 1782 | 1782 n03381126 1783 | 1783 n03540267 1784 | 1784 n04434932 1785 | 1785 n04323819 1786 | 1786 n04509592 1787 | 1787 n03825080 1788 | 1788 n02728440 1789 | 1789 n04603872 1790 | 1790 n04596852 1791 | 1791 n03236735 1792 | 1792 n04014297 1793 | 1793 n04192858 1794 | 1794 n03959701 1795 | 1795 n02740764 1796 | 1796 n03513376 1797 | 1797 n02955065 1798 | 1798 n04105068 1799 | 1799 n02851099 1800 | 1800 n04589190 1801 | 1801 n04181718 1802 | 1802 n04151581 1803 | 1803 n02739668 1804 | 1804 n02862048 1805 | 1805 n03725035 1806 | 1806 n03314378 1807 | 1807 n04187061 1808 | 1808 n04191943 1809 | 1809 n02951843 1810 | 1810 n04453910 1811 | 1811 n02954938 1812 | 1812 n03873064 1813 | 1813 n03151500 1814 | 1814 n03076708 1815 | 1815 n03093574 1816 | 1816 n03257877 1817 | 1817 n02729837 1818 | 1818 n03251766 1819 | 1819 n03528263 1820 | 1820 n03620052 1821 | 1821 n03063338 1822 | 1822 n03862676 1823 | 1823 n04580493 1824 | 1824 n03252064 1825 | 1825 n04580298 1826 | 1826 n03672352 1827 | 1827 n02807260 1828 | 1828 n03302121 1829 | 1829 n03982060 1830 | 1830 n03169390 1831 | 1831 n02681518 1832 | 1832 n03597469 1833 | 1833 n03178782 1834 | 1834 n03282591 1835 | 1835 n03964744 1836 | 1836 n00022903 1837 | 1837 n04550840 1838 | 1838 n04597804 1839 | 1839 n04381994 1840 | 1840 n03153375 1841 | 1841 n03133538 1842 | 1842 n04362025 1843 | 1843 n03536348 1844 | 1844 n03961939 1845 | 1845 n03892891 1846 | 1846 n03932203 1847 | 1847 n00002137 1848 | 1848 n00024264 1849 | 1849 n00027807 1850 | 1850 n13865483 1851 | 1851 n13899200 1852 | 1852 n13899404 1853 | 1853 n09289709 1854 | 1854 n00033020 1855 | 1855 n06793231 1856 | 1856 n06791372 1857 | 1857 n06873571 1858 | 1858 n06874019 1859 | 1859 n00031921 1860 | 1860 n13809207 1861 | --------------------------------------------------------------------------------