├── LICENSE ├── lstm_encoder.py ├── bbox.pyx ├── train_nce_distill_model.py ├── README.md ├── eval_model.py ├── dataset_utils.py ├── nce_distill_model.py └── oiv2_classes.py /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Jing Huang 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /lstm_encoder.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | class Encoder: 4 | """LSTM Encoder.""" 5 | 6 | def __init__(self, rnn_hidden_size, num_rnn_steps): 7 | self.rnn_hidden_size = rnn_hidden_size 8 | self.num_rnn_steps = num_rnn_steps 9 | self.rnn = tf.nn.rnn_cell.LSTMCell(self.rnn_hidden_size) 10 | 11 | def encode(self, phrases, lstm_mask): 12 | # phrases 16kb * 10 * 300 13 | # lstm_mask 16kb * 10 14 | batch_size = tf.shape(phrases)[0] 15 | with tf.variable_scope('encoder') as scope: 16 | output_embed = tf.zeros([batch_size, self.rnn_hidden_size]) # 16kb * 300 17 | state = self.rnn.zero_state(batch_size, tf.float32) 18 | with tf.variable_scope('rnn') as scope_rnn: 19 | for step in range(self.num_rnn_steps): 20 | if step > 0: 21 | scope_rnn.reuse_variables() 22 | hidden, state = self.rnn(phrases[:, step, :], state, scope=scope_rnn) # 16kb * 300 23 | # Max pool all the steps/tokens. 24 | # If mask is True for ith example in the batch, update the max hidden value. 25 | if step == 0: 26 | output_embed = output_embed + hidden 27 | else: 28 | output_embed = tf.where(lstm_mask[:, step], tf.maximum(output_embed, hidden), output_embed) 29 | return output_embed 30 | -------------------------------------------------------------------------------- /bbox.pyx: -------------------------------------------------------------------------------- 1 | # -------------------------------------------------------- 2 | # Fast R-CNN 3 | # Copyright (c) 2015 Microsoft 4 | # Licensed under The MIT License [see LICENSE for details] 5 | # Written by Sergey Karayev 6 | # -------------------------------------------------------- 7 | 8 | cimport cython 9 | import numpy as np 10 | cimport numpy as np 11 | 12 | DTYPE = np.float32 13 | ctypedef np.float32_t DTYPE_t 14 | 15 | def bbox_overlaps( 16 | np.ndarray[DTYPE_t, ndim=2] boxes, 17 | np.ndarray[DTYPE_t, ndim=2] query_boxes): 18 | """ 19 | Parameters 20 | ---------- 21 | boxes: (N, 4) ndarray of float 22 | query_boxes: (K, 4) ndarray of float 23 | Returns 24 | ------- 25 | overlaps: (N, K) ndarray of overlap between boxes and query_boxes 26 | """ 27 | cdef unsigned int N = boxes.shape[0] 28 | cdef unsigned int K = query_boxes.shape[0] 29 | cdef np.ndarray[DTYPE_t, ndim=2] overlaps = np.zeros((N, K), dtype=DTYPE) 30 | cdef DTYPE_t iw, ih, box_area 31 | cdef DTYPE_t ua 32 | cdef unsigned int k, n 33 | for k in range(K): 34 | box_area = ( 35 | (query_boxes[k, 2] - query_boxes[k, 0] + 0.001) * 36 | (query_boxes[k, 3] - query_boxes[k, 1] + 0.001) 37 | ) 38 | for n in range(N): 39 | iw = ( 40 | min(boxes[n, 2], query_boxes[k, 2]) - 41 | max(boxes[n, 0], query_boxes[k, 0]) + 0.001 42 | ) 43 | if iw > 0: 44 | ih = ( 45 | min(boxes[n, 3], query_boxes[k, 3]) - 46 | max(boxes[n, 1], query_boxes[k, 1]) + 0.001 47 | ) 48 | if ih > 0: 49 | ua = float( 50 | (boxes[n, 2] - boxes[n, 0] + 0.001) * 51 | (boxes[n, 3] - boxes[n, 1] + 0.001) + 52 | box_area - iw * ih 53 | ) 54 | overlaps[n, k] = iw * ih / ua 55 | return overlaps 56 | -------------------------------------------------------------------------------- /train_nce_distill_model.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import sys 3 | 4 | import numpy as np 5 | import tensorflow as tf 6 | # Disable deprecation warnings. 7 | tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR) 8 | 9 | from nce_distill_model import main 10 | 11 | 12 | FLAGS = None 13 | 14 | 15 | if __name__ == '__main__': 16 | np.random.seed(0) 17 | tf.set_random_seed(0) 18 | 19 | parser = argparse.ArgumentParser() 20 | # Dataset and checkpoints. 21 | parser.add_argument('--region_feat_path', type=str, 22 | help='Path to the region feature hdf5 file.') 23 | parser.add_argument('--phrase_feat_path', type=str, 24 | help='Path to the phrase feature hdf5 file.') 25 | parser.add_argument('--glove_path', type=str, 26 | help='Path to the glove embedding hdf5 file.') 27 | parser.add_argument('--phrase_to_label_json', type=str, 28 | help='Path to the phrase to label mapping JSON file.') 29 | parser.add_argument('--save_dir', type=str, 30 | default='checkpoints/nce-distill-model', 31 | help='Directory for saving checkpoints.') 32 | parser.add_argument('--restore_path', type=str, 33 | help='Path to the restoring checkpoint MetaGraph file.') 34 | # Training parameters. 35 | parser.add_argument('--batch_size', type=int, default=32, help='Batch size for training.') 36 | parser.add_argument('--sample_size', type=int, default=1, help='Number of positive pair to sample. Must be 1.') 37 | parser.add_argument('--max_num_epoch', type=int, default=6, help='Max number of epochs to train.') 38 | parser.add_argument('--num_neg_sample', type=int, default=16, help='Number of negative example to sample.') 39 | parser.add_argument('--num_region_proposals', type=int, default=30, 40 | help='Number of region proposals generated by detector.Default to 30 for VG features.') 41 | parser.add_argument('--init_learning_rate', type=float, default=1E-4, help='Initial learning rate.') 42 | parser.add_argument('--decay_steps', type=int, default=0) 43 | parser.add_argument('--decay_rate', type=float, default=1) 44 | FLAGS, unparsed = parser.parse_known_args() 45 | main(FLAGS) 46 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Improving Weakly Supervised Visual Grounding by Contrastive Knowledge Distillation 2 | 3 | This repository is the official implementation of CVPR 2021 paper: [Improving Weakly Supervised Visual Grounding by Contrastive Knowledge Distillation](https://arxiv.org/pdf/2007.01951.pdf). 4 | 5 | 6 | ## Requirements 7 | 8 | * Tensorflow-1-15 9 | 10 | ## Training 11 | 12 | To train the NCE model(s) in the paper, run this command: 13 | 14 | ```train 15 | python train_nce_distill_model.py \ 16 | --region_feat_path=region_features.hdf5 \ 17 | --phrase_feat_path=phrase_features.hdf5 \ 18 | --glove_path=glove.hdf5 19 | ``` 20 | 21 | To train the NCE+Distill model(s) in the paper, run this command: 22 | 23 | ```train 24 | python train_nce_distill_model.py \ 25 | --region_feat_path=region_features.hdf5 \ 26 | --phrase_feat_path=phrase_features.hdf5 \ 27 | --glove_path=glove.hdf5 \ 28 | --phrase_to_label_json=phrase_to_label.json 29 | ``` 30 | 31 | ## Evaluation 32 | 33 | To evaluate the model on Flickr30K, run: 34 | 35 | ```eval 36 | python eval_model.py \ 37 | --region_feat_path=region_features_test.hdf5 \ 38 | --phrase_feat_path=phrase_features_test.hdf5 \ 39 | --glove_path=glove.hdf5 \ 40 | --restore_path=checkpoint.meta 41 | ``` 42 | 43 | 44 | ## Pre-trained Models 45 | 46 | You can download pretrained models using `Res101 VG` features here: 47 | 48 | - [NCE+Distill](https://drive.google.com/drive/folders/1q8MCAdNOXaEHAIQBqw4dcd402xdZUqsU) 49 | - [NCE](https://drive.google.com/drive/folders/1VOuhMGeCGhfSpbKixCcnzX06MztEeMGA) 50 | 51 | You can also find the features on Flickr30K test split [here](https://drive.google.com/drive/folders/1pIF6K4Rs_0HJeAeN4q281SOBbqwnMuVv). 52 | 53 | The pretrained models achieve the following performance on Flickr30K test split: 54 | 55 | | Model Name | R@1 | R@5 | R@10 | 56 | |----------- | ---- | ---- | ---- | 57 | | NCE+Distill | 0.5310 | 0.7394 | 0.7875 | 58 | | NCE | 0.5135 | 0.7338 | 0.7833 | 59 | 60 | 61 | ## Citation 62 | 63 | If you use our implementation in your research or wish to refer to the results published in our paper, please use the following BibTeX entry. 64 | 65 | ``` 66 | @InProceedings{Wang_2021_CVPR, 67 | author = {Wang, Liwei and Huang, Jing and Li, Yin and Xu, Kun and Yang, Zhengyuan and Yu, Dong}, 68 | title = {Improving Weakly Supervised Visual Grounding by Contrastive Knowledge Distillation}, 69 | booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, 70 | month = {June}, 71 | year = {2021}, 72 | pages = {14090-14100} 73 | } 74 | 75 | ``` 76 | -------------------------------------------------------------------------------- /eval_model.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import sys 4 | 5 | import numpy as np 6 | import tensorflow as tf 7 | # Disable deprecation warnings. 8 | tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR) 9 | 10 | from dataset_utils import DatasetLoader 11 | from nce_distill_model import get_input_variables, get_batch, lstm_cpc_model 12 | 13 | FLAGS = None 14 | 15 | 16 | def eval_main(_): 17 | # Load data. 18 | data_loader = DatasetLoader(FLAGS.region_feat_path, FLAGS.phrase_feat_path, FLAGS.glove_path, split='eval') 19 | 20 | input_variables = get_input_variables(data_loader) 21 | for var_name in input_variables: 22 | input_variables[var_name] = tf.placeholder(*(input_variables[var_name])) 23 | input_variables['train_phase'] = tf.placeholder(tf.bool) 24 | FLAGS.num_region_proposals = data_loader.num_rp 25 | FLAGS.batch_size = 1 26 | input_variables['args'] = FLAGS 27 | # Override to suport variable #phrase per image. 28 | NUM_T = 10 29 | DIM_T = 300 30 | DIM_R = data_loader.dim_r 31 | NUM_R = data_loader.num_rp 32 | input_variables['region_feats'] = tf.placeholder(tf.float32, [None, DIM_R]) 33 | input_variables['token_feats'] = tf.placeholder(tf.float32, [None, NUM_T, DIM_T]) 34 | input_variables['lstm_mask'] = tf.placeholder(tf.bool) 35 | # Not used in eval. 36 | input_variables['distill_labels'] = tf.placeholder(tf.float32) 37 | input_variables['alignment_mask'] = tf.placeholder(tf.bool) 38 | input_variables['alignment_gt'] = tf.placeholder(tf.float32) 39 | 40 | # Setup testing operation. 41 | NUM_R = data_loader.num_rp 42 | _, (_, _, (value_embed, phrase_embed)) = lstm_cpc_model(**input_variables) 43 | value_embed = tf.reshape(value_embed, [NUM_R, 512]) 44 | phrase_embed = tf.reshape(phrase_embed, [-1, 512]) 45 | # Transpose to make it consistent with the similarity models. 46 | similarity = tf.matmul(value_embed, 47 | tf.transpose(phrase_embed)) # 200 * 16 48 | 49 | with tf.Session() as sess: 50 | # Restore latest checkpoint or the given MetaGraph. 51 | if FLAGS.restore_path.endswith('.meta'): 52 | ckpt_path = FLAGS.restore_path.replace('.meta', '') 53 | else: 54 | ckpt_path = tf.train.latest_checkpoint(FLAGS.restore_path) 55 | 56 | print('Restoring checkpoint', ckpt_path) 57 | tensors = tf.global_variables() 58 | saver = tf.train.Saver(tensors) 59 | saver.restore(sess, ckpt_path) 60 | print('Done') 61 | 62 | # For testing and validation,vthere will be multiple image-sentence batch, 63 | # but the result should be independent of batch size. 64 | NUM_PAIRS = len(data_loader.example_inds) 65 | print('Evaluating %d pairs' % NUM_PAIRS) 66 | assert(FLAGS.batch_size == 1) 67 | ks = [1, 5, 10] 68 | correct_count, total_count = np.zeros(len(ks), dtype=np.int32), 0 69 | 70 | for i in range(NUM_PAIRS): 71 | input_values = get_batch(data_loader, i) 72 | input_values['train_phase'] = False 73 | feed_dict = {input_variables[name] : input_values[name] 74 | for name in input_variables if name in input_values} 75 | [similarity_val] = sess.run([similarity], feed_dict = feed_dict) # 200 * 16 76 | sorted_region_index = np.argsort(similarity_val.T, axis=-1) # 16 * 200 77 | query_index = sorted_region_index[:, -ks[-1]:] # 16 * 10 78 | num_phrase = int(np.sum(input_values['gt_boxes'][0, :, -1] > 0)) 79 | if num_phrase == 0: 80 | continue 81 | total_count += num_phrase 82 | is_correct = np.array([input_values['alignment_gt'][0, query_index[k, :], k] 83 | for k in range(num_phrase)]) 84 | correct_count += np.array([np.sum(np.max(is_correct[:, -k:], axis=-1)) for k in ks]) 85 | for i in range(len(ks)): 86 | print('Recall@%d %f (%d/%d)' % (ks[i], correct_count[i] * 1.0 / total_count, correct_count[i], total_count)) 87 | 88 | 89 | if __name__ == '__main__': 90 | np.random.seed(0) 91 | tf.set_random_seed(0) 92 | 93 | parser = argparse.ArgumentParser() 94 | # Dataset and checkpoints. 95 | parser.add_argument('--region_feat_path', type=str, 96 | help='Path to the region feature hdf5 file.') 97 | parser.add_argument('--phrase_feat_path', type=str, 98 | help='Path to the phrase feature hdf5 file.') 99 | parser.add_argument('--glove_path', type=str, 100 | help='Path to the glove embedding hdf5 file.') 101 | parser.add_argument('--restore_path', type=str, 102 | help='Directory for restoring the newest checkpoint or \ 103 | path to a restoring checkpoint MetaGraph file.') 104 | # Training parameters. 105 | parser.add_argument('--batch_size', type=int, default=1, help='Batch size for evaluation.') 106 | FLAGS, unparsed = parser.parse_known_args() 107 | tf.app.run(main=eval_main, argv=[sys.argv[0]] + unparsed) 108 | -------------------------------------------------------------------------------- /dataset_utils.py: -------------------------------------------------------------------------------- 1 | import h5py 2 | import numpy as np 3 | np.set_printoptions(threshold=np.inf) 4 | import pyximport; pyximport.install(setup_args={"include_dirs":np.get_include()}) 5 | 6 | from bbox import bbox_overlaps 7 | 8 | 9 | _PASCAL_CLASSES = ['airplane', 'bicycle', 'bird', 'boat', 10 | 'bottle', 'bus', 'car', 'cat', 'chair', 11 | 'cow', 'table', 'dog', 'horse', 12 | 'motorbike', 'person', 'plant', 13 | 'sheep', 'sofa', 'train', 'monitor'] 14 | 15 | _COCO_CLASSES = [ 16 | 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 17 | 'boat', 'traffic light', 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 18 | 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 19 | 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard', 20 | 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', 'tennis racket', 'bottle', 21 | 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 22 | 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 23 | 'chair', 'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 24 | 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave', 'oven', 'toaster', 'sink', 25 | 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', 'hair drier', 'toothbrush'] 26 | 27 | 28 | CLASSES = _COCO_CLASSES 29 | 30 | _REGION_MEAN_AND_STD = { 31 | 'orig': (0.0, 1.0), 32 | # Flickr. 33 | 'flickr-vgg-coco' : (0.1454045, 0.068037**0.5), 34 | 'flickr-vgg-oidv2' : (0.1454045, 0.068037**0.5), 35 | 'flickr-res-coco-200' : (0.2981893, 0.2709116**0.5), 36 | 'flickr-vgg-pascal' : (0.1408328, 0.067438**0.5), 37 | 'flickr-res-coco' : (0.203574, 0.1843775**0.5), 38 | 'flickr-res-oidv2' : (0.203574, 0.1843775**0.5), 39 | 'flickr-irv2-oidv2' : (0.410128, 0.581294), 40 | # ReferIt. 41 | 'referit-vgg-pascal' : (0.15869775, 0.07185528**0.5), 42 | 'referit-irv2-oidv2' : (0.410128, 0.581294), # Based on flickr30k. 43 | 'referit-vgg-coco' : (0.158470, 0.069272**0.5), 44 | 'referit-res-coco' : (0.1984466, 0.20089**0.5), 45 | 'referit-res-oidv2' : (0.1984466, 0.20089**0.5), 46 | 'referit-vgg-oidv2' : (0.1454045, 0.068037**0.5), 47 | } 48 | 49 | class DatasetLoader: 50 | """ Loads batched region and phrase features.""" 51 | def __init__(self, region_feat_path, phrase_feat_path, glove_path, split='train'): 52 | self.f_feats = {} 53 | print('Loading region features from', region_feat_path) 54 | self.f_feats['region'] = h5py.File(region_feat_path, 'r') 55 | print('Found %d region features.' % len(self.f_feats['region'])) 56 | print('Loading phrase features from', phrase_feat_path) 57 | self.f_feats['phrase'] = h5py.File(phrase_feat_path, 'r') 58 | print('Found %d phrase features.' % len(self.f_feats['phrase'])) 59 | self.glove = h5py.File(glove_path, 'r') 60 | print('Found %d tokens in glove.' % len(self.glove)) 61 | self.split = split 62 | self.sent_im_ratio = 5 if 'flickr' in phrase_feat_path and 'merge' not in phrase_feat_path else 1 63 | print('Using %d sentence per image.' % self.sent_im_ratio) 64 | self.max_phrase_per_sentence = 16 65 | self.max_token_per_phrase = 10 66 | self.im_names = list(self.f_feats['region'].keys()) 67 | print('Found %d images.' % len(self.im_names)) 68 | self.example_inds = np.arange(len(self.im_names) * self.sent_im_ratio) 69 | # Shapes for variables returned. 70 | self.num_rp, self.full_dim = self.f_feats['region'][self.im_names[0]].shape 71 | print('Using %d region proposals per image.' % self.num_rp) 72 | arch = 'res' 73 | self.dim_r = 2048 74 | if 'vgg' in region_feat_path: 75 | self.dim_r = 4096 76 | arch = 'vgg' 77 | elif 'irv2' in region_feat_path: 78 | self.dim_r = 1536 79 | arch = 'irv2' 80 | det_ds = 'coco' 81 | if 'pascal' in region_feat_path: 82 | det_ds = 'pascal' 83 | elif 'oidv2' in region_feat_path: 84 | det_ds = 'oidv2' 85 | ds = 'flickr' if 'flickr' in region_feat_path else 'referit' 86 | print('Using region feature dimension %d.' % self.dim_r) 87 | self._region_mean, self._region_std = _REGION_MEAN_AND_STD['%s-%s-%s' % (ds, arch, det_ds)] 88 | print('Using region feature mean %f, std %f' % (self._region_mean, self._region_std)) 89 | self.sample_k = 5 90 | self.shape = {'region_feats' : (1 * self.num_rp, self.dim_r), 91 | 'token_feats' : (1 * 1 * 16, 10, 300), 92 | 'alignment_mask' : (1 * 1, 16), 93 | 'lstm_mask' : (1 * 1 * 16, 10), 94 | 'alignment_gt' : (1 * 1, self.num_rp, 16)} 95 | 96 | def get_input_variables(self, batch_size=1, sample_size=None): 97 | input_vars = {} 98 | for name in self.shape: 99 | var_shape = np.array(self.shape[name]) 100 | var_shape[0] = var_shape[0] * batch_size 101 | if name == 'image_sample_weights': 102 | var_shape = (None, None) 103 | dtype = np.float32 if not name.endswith('mask') else np.bool 104 | input_vars[name] = (dtype, var_shape) 105 | return input_vars 106 | 107 | def get_glove_embeds(self, sentences): 108 | S, P, T = sentences.shape 109 | # sentences s * 16 * 10 110 | embeds = np.zeros([S, P, T, 300], dtype=np.float32) 111 | lstm_mask = np.zeros([S, P, T], dtype=np.bool) 112 | for i in range(S): 113 | for j in range(P): 114 | if not sentences[i, j, 0]: # If empty string, no more phrase. 115 | break 116 | for k in range(T): 117 | if not sentences[i, j, k]: # If empty string, no more token. 118 | break 119 | if sentences[i, j, k] in self.glove: 120 | embeds[i, j, k, :] = self.glove[sentences[i, j, k]] 121 | lstm_mask[i, j, k] = True 122 | return embeds, lstm_mask 123 | 124 | def shuffle_inds(self): 125 | ''' 126 | shuffle the indices in training (run this once per epoch) 127 | nop for testing and validation 128 | ''' 129 | if self.split == 'train': 130 | np.random.shuffle(self.example_inds) 131 | 132 | def get_region_features(self, im_names): 133 | region_feat_b = np.zeros([len(im_names), self.num_rp, self.full_dim], dtype=np.float32) 134 | for i in range(len(im_names)): 135 | region_feat_raw = self.f_feats['region'][im_names[i]][:self.num_rp, :] 136 | # Add padding if less than NUM_R 137 | region_feat_b[i, :len(region_feat_raw), :] = region_feat_raw 138 | region_feat_b = np.reshape(region_feat_b, [-1, self.full_dim]) # 200n * (2048+x+4) 139 | # Normalize to zero-mean, unit-variance. 140 | normalized_region_feats = (region_feat_b[:, :self.dim_r] - self._region_mean) / self._region_std 141 | region_feat_b = np.c_[normalized_region_feats, region_feat_b[:, self.dim_r:]] 142 | return region_feat_b 143 | 144 | def sample_items(self, sample_inds, sample_size): 145 | '''Return region-phrase features and region phrases from memory module.''' 146 | region_feats_b, phrase_feats_b = [], [] 147 | lstm_mask_b, bbox_gt_b = [], [] 148 | phrase_b = [] 149 | for ind in sample_inds: # ind is an index for sentence 150 | im_ind = ind // self.sent_im_ratio 151 | im_name = self.im_names[im_ind] 152 | # positive sentence sampling 153 | sent_index = [] 154 | if sample_size > 1: 155 | sent_index = np.random.choice( 156 | [i for i in range(self.sent_im_ratio) if i != (ind % self.sent_im_ratio)], 157 | sample_size - 1, replace=False) 158 | sent_index = sorted(np.append(sent_index, ind % self.sent_im_ratio)) 159 | region_feats_b.append(self.get_region_features([im_name])) 160 | phrase = self.f_feats['phrase'][im_name][sent_index, :, :] 161 | num_phrase = phrase.shape[1] 162 | phrase_index = np.arange(num_phrase) 163 | # For referit dataset, the number of phrase per sentence can vary and may exceed 16. 164 | if self.split == 'train' and num_phrase > self.max_phrase_per_sentence: 165 | phrase_index = np.random.choice(num_phrase, self.max_phrase_per_sentence, replace=False) 166 | phrase = phrase[:, phrase_index, :] 167 | phrase = phrase.astype(np.str) # Python3 compatibility. 168 | # For off-diag negative image mask. 169 | phrase_b.append(phrase) 170 | embed, lstm_mask = self.get_glove_embeds(phrase) 171 | phrase_feats_b.append(embed) 172 | lstm_mask_b.append(lstm_mask) 173 | # Select bbox. Index sent and phrase separately as numpy only allows one indexing at a time 174 | bbox = self.f_feats['phrase'][im_name + '_gt'][sent_index][:, phrase_index, :].astype(np.float32) 175 | bbox_gt_b.append(bbox) 176 | region_feats_b = np.concatenate(region_feats_b, axis=0) # 200b * (2048+4) 177 | phrase_feats_b = np.concatenate(phrase_feats_b, axis=0) # kb * 16 * 10 * 300 178 | lstm_mask_b = np.concatenate(lstm_mask_b, axis=0) # kb * 16 * 10 179 | bbox_gt_b = np.concatenate(bbox_gt_b, axis=0) # kb * 16 * 4 180 | phrase_b = np.concatenate(phrase_b, axis=0) # kb * 16 * 10 181 | return (region_feats_b, phrase_feats_b, lstm_mask_b, bbox_gt_b, phrase_b) 182 | 183 | def get_groundtruth(self, gt_boxes, query_boxes, batch_size, sample_size): 184 | """Compute iou and return a mask with iou>0.5 region set to True.""" 185 | iou = np.stack([bbox_overlaps( 186 | gt_boxes[i], 187 | query_boxes[i // sample_size * self.num_rp : i // sample_size * self.num_rp + self.num_rp, :]) 188 | for i in range(batch_size * sample_size)]) # (b*s) * 16 * 200 189 | # In evaluation, count all regions with IoU > 0.5 as positive. 190 | alignment_gt = iou > 0.5 191 | return alignment_gt 192 | 193 | def get_batch(self, batch_index, batch_size, sample_size=1): 194 | input_values = {} 195 | start_ind = batch_index * batch_size 196 | end_ind = start_ind + batch_size 197 | sample_inds = self.example_inds[start_ind : end_ind] 198 | (region_feats, phrase_feats, lstm_mask, gt_boxes, phrases) = \ 199 | self.sample_items(sample_inds, sample_size) 200 | # Split logits and coordinates from region features. 201 | normalized_region_feats = region_feats[:, :self.shape['region_feats'][-1]] 202 | input_values['region_feats'] = normalized_region_feats 203 | input_values['region_logits'] = region_feats[:, self.shape['region_feats'][-1] : -4] # 200b * (20+1) 204 | query_boxes = region_feats[:, -4:] 205 | input_values['token_feats'] = phrase_feats.reshape([-1, 10, 300]) 206 | # Mask for padding tokens. 207 | lstm_mask = lstm_mask.reshape([-1, 10]) 208 | input_values['lstm_mask'] = lstm_mask 209 | # Bbox. 210 | input_values['query_boxes'] = query_boxes 211 | input_values['gt_boxes'] = gt_boxes 212 | input_values['phrases'] = phrases 213 | # Groundtruth is used for computing grounding metrics in training. 214 | alignment_gt = self.get_groundtruth(gt_boxes, query_boxes, batch_size, sample_size) 215 | alignment_gt = alignment_gt.transpose([0, 2, 1]) # b * 200 * 16 216 | input_values['alignment_gt'] = alignment_gt 217 | # Mask for padding phrases. 218 | if self.split == 'train': 219 | input_values['alignment_mask'] = np.any( 220 | lstm_mask.reshape([batch_size, self.max_phrase_per_sentence, 10]), axis=-1) # b * 16 221 | else: 222 | input_values['alignment_mask'] = gt_boxes[:, :, -1] > 0 # b * 16 223 | # For logging. 224 | input_values['image_names'] = np.array([self.im_names[i // self.sent_im_ratio] for i in sample_inds]) 225 | return input_values 226 | 227 | -------------------------------------------------------------------------------- /nce_distill_model.py: -------------------------------------------------------------------------------- 1 | # Compiled with TensorFlow 1.15. 2 | 3 | import argparse 4 | import json 5 | import sys 6 | 7 | import numpy as np 8 | import pyximport; pyximport.install(setup_args={"include_dirs":np.get_include()}) 9 | import tensorflow as tf 10 | 11 | from bbox import bbox_overlaps 12 | from dataset_utils import DatasetLoader 13 | from lstm_encoder import Encoder 14 | 15 | # For distillation only. 16 | # COCO 17 | # from dataset_utils import _COCO_CLASSES as CLASS_NAME 18 | # OI 19 | from oiv2_classes import CLASS_NAME 20 | 21 | 22 | # Constants for distillation. 23 | BG_AND_CLASSES = ['__background__'] + CLASS_NAME 24 | # Enable distillation with phrase to class label mapping computed offline. 25 | PHRASE_TO_CLASS_INDEX = {} 26 | GLOBAL_VARS = {} 27 | 28 | 29 | # Constants for command line flags. 30 | FLAGS = None 31 | 32 | 33 | def feedforward_net(features, layer_out_dims=[1024, 512], scope_in=''): 34 | """ 35 | Encodes features into lower dimensional embeddings. 36 | """ 37 | with tf.variable_scope(scope_in) as scope: 38 | outputs = features 39 | for i in range(len(layer_out_dims) - 1): 40 | outputs = tf.compat.v1.layers.dense( 41 | inputs=outputs, units=layer_out_dims[i], activation=tf.nn.relu, name='fc%d' % i) 42 | outputs = tf.compat.v1.layers.dense( 43 | inputs=outputs, units=layer_out_dims[-1], activation=None, 44 | name='fc%d' % (len(layer_out_dims) - 1)) 45 | return outputs 46 | 47 | 48 | def cpc_loss(logits, labels, weights=None): 49 | cpc_loss = tf.nn.softmax_cross_entropy_with_logits( 50 | labels=tf.stop_gradient(labels), logits=logits) 51 | if weights is not None: 52 | cpc_loss = weights * cpc_loss 53 | cpc_loss = tf.reduce_mean(cpc_loss) 54 | return cpc_loss 55 | 56 | 57 | def lstm_cpc_model(region_feats, token_feats, alignment_mask, lstm_mask, train_phase, 58 | alignment_gt, distill_labels, region_ious, args): 59 | fc_dim = 1024 60 | embed_dim = 512 61 | lstm_dim = 300 62 | B = args.batch_size 63 | NUM_R = args.num_region_proposals 64 | NUM_P = 16 65 | 66 | # Enocde phrase features. 67 | with tf.variable_scope('phrase'): 68 | lstm_encoder = Encoder(lstm_dim, 10) 69 | phrase_feats = lstm_encoder.encode(token_feats, lstm_mask) 70 | # Compute emebeddings. 71 | region_embed = feedforward_net(region_feats, layer_out_dims=[fc_dim, embed_dim], 72 | scope_in='region') 73 | phrase_embed = feedforward_net(phrase_feats, layer_out_dims=[fc_dim, embed_dim], 74 | scope_in='phrase') 75 | # L2 normalize for margin. 76 | region_embed = tf.nn.l2_normalize(region_embed, axis=-1) 77 | phrase_embed = tf.nn.l2_normalize(phrase_embed, axis=-1) 78 | if args.restore_path: 79 | # Return if evaluation. 80 | return (None, None), (None, None, (region_embed, phrase_embed)) 81 | 82 | # Loss. 83 | similarity = tf.matmul(region_embed, tf.transpose(phrase_embed)) # 200b * 16b 84 | phrase_region_similarity = tf.transpose(similarity) # 16b * 200b 85 | # Log index of the selected region for computing training gournding accuracy. 86 | _, region_indices = tf.nn.top_k(tf.reshape( 87 | phrase_region_similarity, [B*NUM_P, B, NUM_R]), k=1) # 16b * b * 1 88 | region_indices = tf.reshape(region_indices, [B*NUM_P, B]) 89 | 90 | # Remove paddings. 91 | alignment_mask = tf.reshape(alignment_mask, [B * NUM_P]) # 16b 92 | phrase_region_similarity = tf.boolean_mask( 93 | phrase_region_similarity, alignment_mask) # P * 200b 94 | phrase_region_similarity = tf.reshape( 95 | phrase_region_similarity, [-1, B, NUM_R]) 96 | # Estimate MI(I, p) as max{MI(r_i, p)}. 97 | phrase_region_similarity = tf.reduce_max( 98 | phrase_region_similarity, axis=-1) # P * b 99 | indices = tf.where(alignment_mask) # P * 1 100 | phrase_region_similarity = tf.scatter_nd( 101 | indices=indices, updates=phrase_region_similarity, 102 | shape=[NUM_P*B, B]) 103 | phrase_region_similarity = tf.reshape( 104 | phrase_region_similarity, [B, NUM_P, B]) 105 | phrase_region_similarity = phrase_region_similarity * 2.0 106 | logits = tf.reduce_sum(phrase_region_similarity, axis=1) # b * b 107 | labels = tf.eye(B) # b * b 108 | loss = cpc_loss(logits, labels) 109 | 110 | if len(PHRASE_TO_CLASS_INDEX) == 0: 111 | return (loss, tf.zeros(1)), (logits, region_indices, (region_embed, phrase_embed)) 112 | 113 | # Distillation. 114 | distill_mask = tf.reduce_any( 115 | ~tf.math.equal(distill_labels, tf.zeros_like(distill_labels)), axis=-1) # 16b 116 | K = 8 117 | _, nn_index = tf.nn.top_k(region_ious, k=K) # b * 200 * K 118 | pos_pair_mask = tf.reshape(tf.tile(tf.eye(B, dtype=tf.bool), [1, NUM_P]), [B * NUM_P, B]) # 16b * b 119 | pos_region_indices = tf.boolean_mask(region_indices, pos_pair_mask) # 16b 120 | pos_region_indices = tf.reshape(pos_region_indices, [B, NUM_P]) # b * 16 121 | nn_index = tf.concat( 122 | [tf.gather(nn_index[i], pos_region_indices[i]) for i in range(B)], axis=0) # 16b * K 123 | row_index = tf.tile(tf.reshape(tf.range(B * NUM_P), [-1, 1]), [1, K]) # 16b * K 124 | nn_index = tf.stack([row_index, nn_index], axis=-1) # 16b * K *2 125 | distill_logits = tf.reshape(tf.transpose(similarity), [B*NUM_P, B, NUM_R]) # 16b * b * 200 126 | distill_logits = tf.boolean_mask(distill_logits, pos_pair_mask) # 16b * 200 127 | distill_logits = tf.gather_nd(distill_logits, nn_index) # 16b * K 128 | distill_labels = tf.gather_nd(distill_labels, nn_index) # 16b * K 129 | distill_logits = tf.boolean_mask(distill_logits, distill_mask) # P * K 130 | distill_labels = tf.boolean_mask(distill_labels, distill_mask) # P * K 131 | 132 | # Normalizes detector class predictions by shifting or scaling. 133 | # Due to the different number of classes, the logits need to be normalized differently. 134 | if len(CLASS_NAME) == 80: # COCO 135 | distill_labels = distill_labels / 2.0 136 | elif len(CLASS_NAME) == 545: # OI 137 | distill_labels = distill_labels - tf.reduce_mean(distill_labels, axis=-1, keepdims=True) 138 | else: 139 | raise NotImplementedError 140 | distill_labels = tf.nn.softmax(distill_labels, axis=-1) 141 | distill_loss = cpc_loss(logits=distill_logits, labels=distill_labels) 142 | # Ramping factor to be tuned based on the distillation features used. 143 | ramping_factor = tf.minimum(0.001 + tf.cast(GLOBAL_VARS['global_step'] // 500, tf.float32), 3.0) 144 | # ramping_factor = tf.minimum(0.001 + tf.cast(GLOBAL_VARS['global_step'] // 200, tf.float32), 3.0) 145 | loss = 1.0 * loss + ramping_factor * distill_loss 146 | return (loss, distill_loss), (logits, region_indices, (region_embed, phrase_embed)) 147 | 148 | 149 | def setup_train_model(region_feats, token_feats, alignment_mask, lstm_mask, train_phase, 150 | alignment_gt, distill_labels, region_ious, args): 151 | (loss, distillation_loss), tensors = lstm_cpc_model( 152 | region_feats, token_feats, alignment_mask, lstm_mask, train_phase, 153 | alignment_gt, distill_labels, region_ious, args) 154 | metrics = [tf.constant(0.0) for _ in range(3)] 155 | # Evaluates training/eval objective. 156 | logits, region_indices, _ = tensors 157 | pos_mask = tf.eye(tf.shape(logits)[0], dtype=tf.bool) 158 | pos_logits = tf.reshape(tf.boolean_mask(logits, pos_mask), 159 | [tf.shape(logits)[0], 1]) # k 160 | neg_logits = tf.reshape(tf.boolean_mask(logits, ~pos_mask), 161 | [tf.shape(logits)[0], -1]) # k * (k-1) 162 | # Precision@1 163 | metrics[0] = tf.reduce_mean( 164 | tf.cast( 165 | pos_logits > tf.reduce_max(neg_logits, axis=-1, keepdims=True), 166 | tf.float32)) 167 | # Avg #(pos > neg). 168 | metrics[1] = tf.reduce_mean(tf.cast(pos_logits > neg_logits, tf.float32)) 169 | # Grounding accuracy. 170 | B = args.batch_size 171 | NUM_P = 16 172 | NUM_R = args.num_region_proposals 173 | pos_pair_mask = tf.reshape(tf.tile(tf.eye(B, dtype=tf.bool), [1, NUM_P]), [B * NUM_P, B]) # 16b * b 174 | pos_region_indices = tf.boolean_mask(region_indices, pos_pair_mask) # 16b 175 | phrase_indices = tf.range(B * NUM_P) # 16b 176 | pos_region_indices = tf.stack([phrase_indices, pos_region_indices], axis=-1) # 16b * 2 177 | alignment_gt = tf.reshape(tf.transpose(alignment_gt, [0, 2, 1]), [B*NUM_P, NUM_R]) # 16b * 200 178 | alignment_mask = tf.reshape(alignment_mask, [B * NUM_P]) # 16b 179 | metrics[2] = tf.reduce_mean( 180 | tf.boolean_mask(tf.gather_nd(alignment_gt, pos_region_indices), alignment_mask)) 181 | return (loss, distillation_loss) + tuple(metrics) 182 | 183 | 184 | # Functions for model training. 185 | 186 | 187 | def softmax(x, axis=-1): 188 | """Compute softmax values for each sets of scores in x.""" 189 | e_x = np.exp(x - np.max(x, axis=axis, keepdims=True)) 190 | return e_x / np.sum(e_x, axis=axis, keepdims=True) 191 | 192 | 193 | def get_input_variables(data_loader): 194 | B = FLAGS.batch_size if FLAGS is not None else 1 195 | DIM_R = data_loader.dim_r 196 | NUM_R = data_loader.num_rp 197 | NUM_P = data_loader.max_phrase_per_sentence 198 | input_variables = data_loader.get_input_variables(batch_size=B) 199 | input_variables['region_ious'] = (tf.float32, (B, NUM_R, NUM_R)) 200 | input_variables['distill_labels'] = (tf.float32, (B*NUM_P , NUM_R)) 201 | return input_variables 202 | 203 | 204 | def get_batch(data_loader, batch_index): 205 | DIM_R = data_loader.dim_r 206 | NUM_R = data_loader.num_rp 207 | B = FLAGS.batch_size if FLAGS is not None else 1 208 | NUM_P = data_loader.max_phrase_per_sentence 209 | C = len(CLASS_NAME) 210 | NUM_T = data_loader.max_token_per_phrase 211 | input_values = data_loader.get_batch(batch_index, B) 212 | region_feats = input_values['region_feats'] # (B * NUM_R) * DIM_R 213 | query_boxes = input_values['query_boxes'].reshape([B, NUM_R, 4]) 214 | 215 | enable_distill = len(PHRASE_TO_CLASS_INDEX) > 0 and data_loader.split == 'train' 216 | distill_labels = np.zeros([B, NUM_P, NUM_R]) 217 | if enable_distill: 218 | region_ious = np.stack([ 219 | bbox_overlaps(query_boxes[i].reshape([-1, 4]), 220 | query_boxes[i].reshape([-1, 4])) for i in range(B)]) # B * NUM_R * NUM_R 221 | input_values['region_ious'] = region_ious 222 | else: 223 | input_values['region_ious'] = np.zeros([B, NUM_R, NUM_R]) 224 | 225 | if enable_distill: 226 | # Compute region logits. 227 | # OID 228 | region_logits_oid = input_values['region_logits'] 229 | # Do NOT use the logits due to numerical stability issue. 230 | region_logits = softmax(region_logits_oid, axis=-1) 231 | # For each phrase, find a class label and copy over logits if exists. 232 | for i in range(B): 233 | for p in range(NUM_P): 234 | phrase = ' '.join(input_values['phrases'][i, p]).strip() 235 | if not phrase or phrase not in PHRASE_TO_CLASS_INDEX: 236 | continue 237 | cls_index = PHRASE_TO_CLASS_INDEX[phrase] 238 | if cls_index >= region_logits.shape[-1]: # For res101-oid combined features 239 | continue 240 | cls_logits = region_logits[i*NUM_R:(i+1)*NUM_R, cls_index] # 200 241 | distill_labels[i, p] = cls_logits 242 | input_values['distill_labels'] = np.reshape(distill_labels, [B*NUM_P, NUM_R]) # 16B * 200 243 | return input_values 244 | 245 | 246 | def main(args): 247 | global FLAGS 248 | FLAGS = args 249 | 250 | # Enable distillation. 251 | if FLAGS.phrase_to_label_json: 252 | phrase_to_label = json.load(open(FLAGS.phrase_to_label_json, 'r')) 253 | PHRASE_TO_CLASS_INDEX.update({ 254 | p : BG_AND_CLASSES.index(phrase_to_label[p]) for p in phrase_to_label}) 255 | print('Enable distillation: #mapped phrases=%d' % len(PHRASE_TO_CLASS_INDEX)) 256 | else: 257 | print('NO distillation.') 258 | 259 | 260 | # Load data. 261 | data_loader = DatasetLoader(FLAGS.region_feat_path, FLAGS.phrase_feat_path, FLAGS.glove_path) 262 | steps_per_epoch = data_loader.example_inds.size // FLAGS.batch_size 263 | num_steps = steps_per_epoch * FLAGS.max_num_epoch + 1 264 | print('#steps: %d' % num_steps, '#steps per epoch: %d' % steps_per_epoch) 265 | print('batch size: %d' % FLAGS.batch_size, 'smaple size: %d' % FLAGS.sample_size) 266 | print('learning rate %.6f' % FLAGS.init_learning_rate) 267 | 268 | # Setup placeholders for input variables. 269 | input_variables = get_input_variables(data_loader) 270 | for var_name in input_variables: 271 | input_variables[var_name] = tf.placeholder(*(input_variables[var_name])) 272 | input_variables['train_phase'] = tf.placeholder(tf.bool) 273 | input_variables['args'] = FLAGS 274 | 275 | # Setup training operation. 276 | global_step = tf.Variable(0, trainable=False) 277 | # GLOBAL_VARS are used in model initialization, hence need to be decleared 278 | # before calling setup_train_model. 279 | GLOBAL_VARS['global_step'] = global_step 280 | losses = setup_train_model(**input_variables) 281 | 282 | # Get model variables. 283 | tensors = tf.global_variables() 284 | print([t.name for t in tensors]) 285 | 286 | # Setup optimizer. 287 | decay_steps = FLAGS.decay_steps if FLAGS.decay_steps > 0 else steps_per_epoch 288 | learning_rate = tf.train.exponential_decay(FLAGS.init_learning_rate, global_step, 289 | decay_steps, FLAGS.decay_rate, staircase=True) 290 | optim = tf.train.AdamOptimizer(learning_rate) 291 | update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) 292 | with tf.control_dependencies(update_ops): 293 | # ONLY optimize the first loss term. 294 | train_step = optim.minimize(losses[0], global_step=global_step) 295 | 296 | # Setup pretrained model saver. 297 | pretrained_vars = {t.name.replace('pretrained/', '').rstrip(':0') : t for t in tensors 298 | if 'pretrained' in t.name} 299 | pretrained_saver = tf.train.Saver(var_list=pretrained_vars) if pretrained_vars else None 300 | # Setup model saver. 301 | model_vars = [t for t in tensors if 'pretrained' not in t.name] 302 | print('#pretrained_vars=%d, #model_vars=%d' % (len(pretrained_vars), len(model_vars))) 303 | saver = tf.train.Saver(var_list=model_vars, max_to_keep=20) 304 | with tf.Session() as sess: 305 | sess.run(tf.global_variables_initializer()) 306 | if FLAGS.restore_path: 307 | if FLAGS.restore_path.endswith('.meta'): 308 | ckpt_path = FLAGS.restore_path.replace('.meta', '') 309 | else: 310 | ckpt_path = tf.train.latest_checkpoint(FLAGS.restore_path) 311 | print('Restoring checkpoint', ckpt_path) 312 | saver.restore(sess, ckpt_path) 313 | print('Done') 314 | if pretrained_saver and FLAGS.pretrained_model_path: 315 | if FLAGS.pretrained_model_path.endswith('.meta'): 316 | pretrained_ckpt_path = FLAGS.pretrained_model_path.replace('.meta', '') 317 | print('Restoring pretrained checkpoint', pretrained_ckpt_path) 318 | pretrained_saver.restore(sess, pretrained_ckpt_path) 319 | print('Done') 320 | print('#global_variables=', len(tf.global_variables())) 321 | avg_losses = np.zeros(len(losses)) 322 | for i in range(num_steps): 323 | if i % steps_per_epoch == 0: 324 | # shuffle the indices. 325 | data_loader.shuffle_inds() 326 | # Reset to 0. 327 | avg_losses = np.zeros(len(losses)) 328 | input_values = get_batch(data_loader, i % steps_per_epoch) 329 | input_values['train_phase'] = False # False to turn off dropout. 330 | feed_dict = {input_variables[name] : input_values[name] 331 | for name in input_variables if name in input_values} 332 | train_ops = (train_step,) + losses 333 | train_ops_val = sess.run(train_ops, feed_dict = feed_dict) 334 | losses_val = np.array(train_ops_val[1:]) # Exclude the first value which is returned by train_step. 335 | avg_losses = (losses_val + avg_losses * (i % steps_per_epoch)) / (i % steps_per_epoch + 1) 336 | if i % 50 == 0: 337 | print('Epoch: %d Step: %d Loss:' % (i // steps_per_epoch, i) + 338 | ' %.3f' * len(avg_losses) % tuple(avg_losses)) 339 | if (i % 500 == 0 or i % steps_per_epoch == 0) and i > 0: 340 | saver.save(sess, FLAGS.save_dir, global_step = global_step) 341 | print('Saved checkpoint at step %d' % i) 342 | -------------------------------------------------------------------------------- /oiv2_classes.py: -------------------------------------------------------------------------------- 1 | CLASSES_PBTXT = """ 2 | item { 3 | name: "/m/01g317" 4 | id: 1 5 | display_name: "Person" 6 | } 7 | item { 8 | name: "/m/09j2d" 9 | id: 2 10 | display_name: "Clothing" 11 | } 12 | item { 13 | name: "/m/04yx4" 14 | id: 3 15 | display_name: "Man" 16 | } 17 | item { 18 | name: "/m/0dzct" 19 | id: 4 20 | display_name: "Face" 21 | } 22 | item { 23 | name: "/m/07j7r" 24 | id: 5 25 | display_name: "Tree" 26 | } 27 | item { 28 | name: "/m/05s2s" 29 | id: 6 30 | display_name: "Plant" 31 | } 32 | item { 33 | name: "/m/03bt1vf" 34 | id: 7 35 | display_name: "Woman" 36 | } 37 | item { 38 | name: "/m/07yv9" 39 | id: 8 40 | display_name: "Vehicle" 41 | } 42 | item { 43 | name: "/m/0cgh4" 44 | id: 9 45 | display_name: "Building" 46 | } 47 | item { 48 | name: "/m/01prls" 49 | id: 10 50 | display_name: "Land vehicle" 51 | } 52 | item { 53 | name: "/m/09j5n" 54 | id: 11 55 | display_name: "Footwear" 56 | } 57 | item { 58 | name: "/m/05r655" 59 | id: 12 60 | display_name: "Girl" 61 | } 62 | item { 63 | name: "/m/0jbk" 64 | id: 13 65 | display_name: "Animal" 66 | } 67 | item { 68 | name: "/m/0k4j" 69 | id: 14 70 | display_name: "Car" 71 | } 72 | item { 73 | name: "/m/02wbm" 74 | id: 15 75 | display_name: "Food" 76 | } 77 | item { 78 | name: "/m/083wq" 79 | id: 16 80 | display_name: "Wheel" 81 | } 82 | item { 83 | name: "/m/0c9ph5" 84 | id: 17 85 | display_name: "Flower" 86 | } 87 | item { 88 | name: "/m/0c_jw" 89 | id: 18 90 | display_name: "Furniture" 91 | } 92 | item { 93 | name: "/m/0d4v4" 94 | id: 19 95 | display_name: "Window" 96 | } 97 | item { 98 | name: "/m/03jm5" 99 | id: 20 100 | display_name: "House" 101 | } 102 | item { 103 | name: "/m/01bl7v" 104 | id: 21 105 | display_name: "Boy" 106 | } 107 | item { 108 | name: "/m/0463sg" 109 | id: 22 110 | display_name: "Fashion accessory" 111 | } 112 | item { 113 | name: "/m/04bcr3" 114 | id: 23 115 | display_name: "Table" 116 | } 117 | item { 118 | name: "/m/0jyfg" 119 | id: 24 120 | display_name: "Glasses" 121 | } 122 | item { 123 | name: "/m/01xyhv" 124 | id: 25 125 | display_name: "Suit" 126 | } 127 | item { 128 | name: "/m/08dz3q" 129 | id: 26 130 | display_name: "Auto part" 131 | } 132 | item { 133 | name: "/m/015p6" 134 | id: 27 135 | display_name: "Bird" 136 | } 137 | item { 138 | name: "/m/05y5lj" 139 | id: 28 140 | display_name: "Sports equipment" 141 | } 142 | item { 143 | name: "/m/01d40f" 144 | id: 29 145 | display_name: "Dress" 146 | } 147 | item { 148 | name: "/m/0bt9lr" 149 | id: 30 150 | display_name: "Dog" 151 | } 152 | item { 153 | name: "/m/01lrl" 154 | id: 31 155 | display_name: "Carnivore" 156 | } 157 | item { 158 | name: "/m/02p0tk3" 159 | id: 32 160 | display_name: "Human body" 161 | } 162 | item { 163 | name: "/m/0fly7" 164 | id: 33 165 | display_name: "Jeans" 166 | } 167 | item { 168 | name: "/m/04szw" 169 | id: 34 170 | display_name: "Musical instrument" 171 | } 172 | item { 173 | name: "/m/0271t" 174 | id: 35 175 | display_name: "Drink" 176 | } 177 | item { 178 | name: "/m/019jd" 179 | id: 36 180 | display_name: "Boat" 181 | } 182 | item { 183 | name: "/m/03q69" 184 | id: 37 185 | display_name: "Hair" 186 | } 187 | item { 188 | name: "/m/0h9mv" 189 | id: 38 190 | display_name: "Tire" 191 | } 192 | item { 193 | name: "/m/04hgtk" 194 | id: 39 195 | display_name: "Head" 196 | } 197 | item { 198 | name: "/m/01yrx" 199 | id: 40 200 | display_name: "Cat" 201 | } 202 | item { 203 | name: "/m/01rzcn" 204 | id: 41 205 | display_name: "Watercraft" 206 | } 207 | item { 208 | name: "/m/01mzpv" 209 | id: 42 210 | display_name: "Chair" 211 | } 212 | item { 213 | name: "/m/0199g" 214 | id: 43 215 | display_name: "Bike" 216 | } 217 | item { 218 | name: "/m/01fdzj" 219 | id: 44 220 | display_name: "Tower" 221 | } 222 | item { 223 | name: "/m/04rky" 224 | id: 45 225 | display_name: "Mammal" 226 | } 227 | item { 228 | name: "/m/079cl" 229 | id: 46 230 | display_name: "Skyscraper" 231 | } 232 | item { 233 | name: "/m/0dzf4" 234 | id: 47 235 | display_name: "Arm" 236 | } 237 | item { 238 | name: "/m/0138tl" 239 | id: 48 240 | display_name: "Toy" 241 | } 242 | item { 243 | name: "/m/06msq" 244 | id: 49 245 | display_name: "Sculpture" 246 | } 247 | item { 248 | name: "/m/03xxp" 249 | id: 50 250 | display_name: "Invertebrate" 251 | } 252 | item { 253 | name: "/m/0hg7b" 254 | id: 51 255 | display_name: "Microphone" 256 | } 257 | item { 258 | name: "/m/01n5jq" 259 | id: 52 260 | display_name: "Poster" 261 | } 262 | item { 263 | name: "/m/03vt0" 264 | id: 53 265 | display_name: "Insect" 266 | } 267 | item { 268 | name: "/m/0342h" 269 | id: 54 270 | display_name: "Guitar" 271 | } 272 | item { 273 | name: "/m/0k0pj" 274 | id: 55 275 | display_name: "Nose" 276 | } 277 | item { 278 | name: "/m/02dl1y" 279 | id: 56 280 | display_name: "Hat" 281 | } 282 | item { 283 | name: "/m/04brg2" 284 | id: 57 285 | display_name: "Tableware" 286 | } 287 | item { 288 | name: "/m/02dgv" 289 | id: 58 290 | display_name: "Door" 291 | } 292 | item { 293 | name: "/m/01bqk0" 294 | id: 59 295 | display_name: "Bicycle wheel" 296 | } 297 | item { 298 | name: "/m/017ftj" 299 | id: 60 300 | display_name: "Sunglasses" 301 | } 302 | item { 303 | name: "/m/052lwg6" 304 | id: 61 305 | display_name: "Baked goods" 306 | } 307 | item { 308 | name: "/m/014sv8" 309 | id: 62 310 | display_name: "Eye" 311 | } 312 | item { 313 | name: "/m/0270h" 314 | id: 63 315 | display_name: "Dessert" 316 | } 317 | item { 318 | name: "/m/0283dt1" 319 | id: 64 320 | display_name: "Mouth" 321 | } 322 | item { 323 | name: "/m/0k5j" 324 | id: 65 325 | display_name: "Aircraft" 326 | } 327 | item { 328 | name: "/m/0cmf2" 329 | id: 66 330 | display_name: "Airplane" 331 | } 332 | item { 333 | name: "/m/07jdr" 334 | id: 67 335 | display_name: "Train" 336 | } 337 | item { 338 | name: "/m/032b3c" 339 | id: 68 340 | display_name: "Jacket" 341 | } 342 | item { 343 | name: "/m/033rq4" 344 | id: 69 345 | display_name: "Street light" 346 | } 347 | item { 348 | name: "/m/0k65p" 349 | id: 70 350 | display_name: "Hand" 351 | } 352 | item { 353 | name: "/m/01ww8y" 354 | id: 71 355 | display_name: "Snack" 356 | } 357 | item { 358 | name: "/m/0zvk5" 359 | id: 72 360 | display_name: "Helmet" 361 | } 362 | item { 363 | name: "/m/07mhn" 364 | id: 73 365 | display_name: "Trousers" 366 | } 367 | item { 368 | name: "/m/04dr76w" 369 | id: 74 370 | display_name: "Bottle" 371 | } 372 | item { 373 | name: "/m/03fp41" 374 | id: 75 375 | display_name: "Houseplant" 376 | } 377 | item { 378 | name: "/m/03k3r" 379 | id: 76 380 | display_name: "Horse" 381 | } 382 | item { 383 | name: "/m/01y9k5" 384 | id: 77 385 | display_name: "Desk" 386 | } 387 | item { 388 | name: "/m/0cdl1" 389 | id: 78 390 | display_name: "Palm tree" 391 | } 392 | item { 393 | name: "/m/0f4s2w" 394 | id: 79 395 | display_name: "Vegetable" 396 | } 397 | item { 398 | name: "/m/02xwb" 399 | id: 80 400 | display_name: "Fruit" 401 | } 402 | item { 403 | name: "/m/035r7c" 404 | id: 81 405 | display_name: "Leg" 406 | } 407 | item { 408 | name: "/m/0bt_c3" 409 | id: 82 410 | display_name: "Book" 411 | } 412 | item { 413 | name: "/m/01_bhs" 414 | id: 83 415 | display_name: "Fast food" 416 | } 417 | item { 418 | name: "/m/01599" 419 | id: 84 420 | display_name: "Beer" 421 | } 422 | item { 423 | name: "/m/03120" 424 | id: 85 425 | display_name: "Flag" 426 | } 427 | item { 428 | name: "/m/026t6" 429 | id: 86 430 | display_name: "Drum" 431 | } 432 | item { 433 | name: "/m/01bjv" 434 | id: 87 435 | display_name: "Bus" 436 | } 437 | item { 438 | name: "/m/07r04" 439 | id: 88 440 | display_name: "Truck" 441 | } 442 | item { 443 | name: "/m/018xm" 444 | id: 89 445 | display_name: "Ball" 446 | } 447 | item { 448 | name: "/m/01rkbr" 449 | id: 90 450 | display_name: "Tie" 451 | } 452 | item { 453 | name: "/m/0fm3zh" 454 | id: 91 455 | display_name: "Flowerpot" 456 | } 457 | item { 458 | name: "/m/02_n6y" 459 | id: 92 460 | display_name: "Goggles" 461 | } 462 | item { 463 | name: "/m/04_sv" 464 | id: 93 465 | display_name: "Motorcycle" 466 | } 467 | item { 468 | name: "/m/06z37_" 469 | id: 94 470 | display_name: "Picture frame" 471 | } 472 | item { 473 | name: "/m/01bfm9" 474 | id: 95 475 | display_name: "Shorts" 476 | } 477 | item { 478 | name: "/m/0h8mhzd" 479 | id: 96 480 | display_name: "Sports uniform" 481 | } 482 | item { 483 | name: "/m/0d_2m" 484 | id: 97 485 | display_name: "Moths and butterflies" 486 | } 487 | item { 488 | name: "/m/0gjbg72" 489 | id: 98 490 | display_name: "Shelf" 491 | } 492 | item { 493 | name: "/m/01n4qj" 494 | id: 99 495 | display_name: "Shirt" 496 | } 497 | item { 498 | name: "/m/0ch_cf" 499 | id: 100 500 | display_name: "Fish" 501 | } 502 | item { 503 | name: "/m/06m11" 504 | id: 101 505 | display_name: "Rose" 506 | } 507 | item { 508 | name: "/m/01jfm_" 509 | id: 102 510 | display_name: "Licence plate" 511 | } 512 | item { 513 | name: "/m/02crq1" 514 | id: 103 515 | display_name: "Couch" 516 | } 517 | item { 518 | name: "/m/083kb" 519 | id: 104 520 | display_name: "Weapon" 521 | } 522 | item { 523 | name: "/m/01c648" 524 | id: 105 525 | display_name: "Laptop" 526 | } 527 | item { 528 | name: "/m/09tvcd" 529 | id: 106 530 | display_name: "Wine glass" 531 | } 532 | item { 533 | name: "/m/0h2r6" 534 | id: 107 535 | display_name: "Van" 536 | } 537 | item { 538 | name: "/m/081qc" 539 | id: 108 540 | display_name: "Wine" 541 | } 542 | item { 543 | name: "/m/09ddx" 544 | id: 109 545 | display_name: "Duck" 546 | } 547 | item { 548 | name: "/m/03p3bw" 549 | id: 110 550 | display_name: "Bicycle helmet" 551 | } 552 | item { 553 | name: "/m/0cyf8" 554 | id: 111 555 | display_name: "Butterfly" 556 | } 557 | item { 558 | name: "/m/0b_rs" 559 | id: 112 560 | display_name: "Swimming pool" 561 | } 562 | item { 563 | name: "/m/039xj_" 564 | id: 113 565 | display_name: "Ear" 566 | } 567 | item { 568 | name: "/m/021sj1" 569 | id: 114 570 | display_name: "Office" 571 | } 572 | item { 573 | name: "/m/0dv5r" 574 | id: 115 575 | display_name: "Camera" 576 | } 577 | item { 578 | name: "/m/01lynh" 579 | id: 116 580 | display_name: "Stairs" 581 | } 582 | item { 583 | name: "/m/06bt6" 584 | id: 117 585 | display_name: "Reptile" 586 | } 587 | item { 588 | name: "/m/01226z" 589 | id: 118 590 | display_name: "Football" 591 | } 592 | item { 593 | name: "/m/0fszt" 594 | id: 119 595 | display_name: "Cake" 596 | } 597 | item { 598 | name: "/m/050k8" 599 | id: 120 600 | display_name: "Mobile phone" 601 | } 602 | item { 603 | name: "/m/02wbtzl" 604 | id: 121 605 | display_name: "Sun hat" 606 | } 607 | item { 608 | name: "/m/02p5f1q" 609 | id: 122 610 | display_name: "Coffee cup" 611 | } 612 | item { 613 | name: "/m/025nd" 614 | id: 123 615 | display_name: "Christmas tree" 616 | } 617 | item { 618 | name: "/m/02522" 619 | id: 124 620 | display_name: "Computer monitor" 621 | } 622 | item { 623 | name: "/m/09ct_" 624 | id: 125 625 | display_name: "Helicopter" 626 | } 627 | item { 628 | name: "/m/0cvnqh" 629 | id: 126 630 | display_name: "Bench" 631 | } 632 | item { 633 | name: "/m/0d5gx" 634 | id: 127 635 | display_name: "Castle" 636 | } 637 | item { 638 | name: "/m/01xygc" 639 | id: 128 640 | display_name: "Coat" 641 | } 642 | item { 643 | name: "/m/04m6gz" 644 | id: 129 645 | display_name: "Porch" 646 | } 647 | item { 648 | name: "/m/01gkx_" 649 | id: 130 650 | display_name: "Swimwear" 651 | } 652 | item { 653 | name: "/m/01s105" 654 | id: 131 655 | display_name: "Cabinetry" 656 | } 657 | item { 658 | name: "/m/01j61q" 659 | id: 132 660 | display_name: "Tent" 661 | } 662 | item { 663 | name: "/m/0hnnb" 664 | id: 133 665 | display_name: "Umbrella" 666 | } 667 | item { 668 | name: "/m/01j51" 669 | id: 134 670 | display_name: "Balloon" 671 | } 672 | item { 673 | name: "/m/01knjb" 674 | id: 135 675 | display_name: "Billboard" 676 | } 677 | item { 678 | name: "/m/03__z0" 679 | id: 136 680 | display_name: "Bookcase" 681 | } 682 | item { 683 | name: "/m/01m2v" 684 | id: 137 685 | display_name: "Computer keyboard" 686 | } 687 | item { 688 | name: "/m/0167gd" 689 | id: 138 690 | display_name: "Doll" 691 | } 692 | item { 693 | name: "/m/0284d" 694 | id: 139 695 | display_name: "Dairy" 696 | } 697 | item { 698 | name: "/m/03ssj5" 699 | id: 140 700 | display_name: "Bed" 701 | } 702 | item { 703 | name: "/m/02fq_6" 704 | id: 141 705 | display_name: "Fedora" 706 | } 707 | item { 708 | name: "/m/06nwz" 709 | id: 142 710 | display_name: "Seafood" 711 | } 712 | item { 713 | name: "/m/0220r2" 714 | id: 143 715 | display_name: "Fountain" 716 | } 717 | item { 718 | name: "/m/01mqdt" 719 | id: 144 720 | display_name: "Traffic sign" 721 | } 722 | item { 723 | name: "/m/0268lbt" 724 | id: 145 725 | display_name: "Hiking equipment" 726 | } 727 | item { 728 | name: "/m/07c52" 729 | id: 146 730 | display_name: "Television" 731 | } 732 | item { 733 | name: "/m/0grw1" 734 | id: 147 735 | display_name: "Salad" 736 | } 737 | item { 738 | name: "/m/01h3n" 739 | id: 148 740 | display_name: "Bee" 741 | } 742 | item { 743 | name: "/m/078n6m" 744 | id: 149 745 | display_name: "Coffee table" 746 | } 747 | item { 748 | name: "/m/01xq0k1" 749 | id: 150 750 | display_name: "Cattle" 751 | } 752 | item { 753 | name: "/m/0gd2v" 754 | id: 151 755 | display_name: "Marine mammal" 756 | } 757 | item { 758 | name: "/m/0dbvp" 759 | id: 152 760 | display_name: "Goose" 761 | } 762 | item { 763 | name: "/m/03rszm" 764 | id: 153 765 | display_name: "Curtain" 766 | } 767 | item { 768 | name: "/m/0h8n5zk" 769 | id: 154 770 | display_name: "Kitchen & dining room table" 771 | } 772 | item { 773 | name: "/m/019dx1" 774 | id: 155 775 | display_name: "Home appliance" 776 | } 777 | item { 778 | name: "/m/03hl4l9" 779 | id: 156 780 | display_name: "Marine invertebrates" 781 | } 782 | item { 783 | name: "/m/0b3fp9" 784 | id: 157 785 | display_name: "Countertop" 786 | } 787 | item { 788 | name: "/m/02rdsp" 789 | id: 158 790 | display_name: "Office supplies" 791 | } 792 | item { 793 | name: "/m/0hf58v5" 794 | id: 159 795 | display_name: "Luggage and bags" 796 | } 797 | item { 798 | name: "/m/04h7h" 799 | id: 160 800 | display_name: "Lighthouse" 801 | } 802 | item { 803 | name: "/m/024g6" 804 | id: 161 805 | display_name: "Cocktail" 806 | } 807 | item { 808 | name: "/m/0cffdh" 809 | id: 162 810 | display_name: "Maple" 811 | } 812 | item { 813 | name: "/m/03q5c7" 814 | id: 163 815 | display_name: "Saucer" 816 | } 817 | item { 818 | name: "/m/014y4n" 819 | id: 164 820 | display_name: "Paddle" 821 | } 822 | item { 823 | name: "/m/01yx86" 824 | id: 165 825 | display_name: "Bronze sculpture" 826 | } 827 | item { 828 | name: "/m/020jm" 829 | id: 166 830 | display_name: "Beetle" 831 | } 832 | item { 833 | name: "/m/025dyy" 834 | id: 167 835 | display_name: "Box" 836 | } 837 | item { 838 | name: "/m/01llwg" 839 | id: 168 840 | display_name: "Necklace" 841 | } 842 | item { 843 | name: "/m/08pbxl" 844 | id: 169 845 | display_name: "Monkey" 846 | } 847 | item { 848 | name: "/m/02d9qx" 849 | id: 170 850 | display_name: "Whiteboard" 851 | } 852 | item { 853 | name: "/m/02pkr5" 854 | id: 171 855 | display_name: "Plumbing fixture" 856 | } 857 | item { 858 | name: "/m/0h99cwc" 859 | id: 172 860 | display_name: "Kitchen appliance" 861 | } 862 | item { 863 | name: "/m/050gv4" 864 | id: 173 865 | display_name: "Plate" 866 | } 867 | item { 868 | name: "/m/02vqfm" 869 | id: 174 870 | display_name: "Coffee" 871 | } 872 | item { 873 | name: "/m/09kx5" 874 | id: 175 875 | display_name: "Deer" 876 | } 877 | item { 878 | name: "/m/019w40" 879 | id: 176 880 | display_name: "Surfboard" 881 | } 882 | item { 883 | name: "/m/09dzg" 884 | id: 177 885 | display_name: "Turtle" 886 | } 887 | item { 888 | name: "/m/07k1x" 889 | id: 178 890 | display_name: "Tool" 891 | } 892 | item { 893 | name: "/m/080hkjn" 894 | id: 179 895 | display_name: "Handbag" 896 | } 897 | item { 898 | name: "/m/07qxg_" 899 | id: 180 900 | display_name: "Football helmet" 901 | } 902 | item { 903 | name: "/m/0ph39" 904 | id: 181 905 | display_name: "Canoe" 906 | } 907 | item { 908 | name: "/m/018p4k" 909 | id: 182 910 | display_name: "Cart" 911 | } 912 | item { 913 | name: "/m/02h19r" 914 | id: 183 915 | display_name: "Scarf" 916 | } 917 | item { 918 | name: "/m/015h_t" 919 | id: 184 920 | display_name: "Beard" 921 | } 922 | item { 923 | name: "/m/0fqfqc" 924 | id: 185 925 | display_name: "Drawer" 926 | } 927 | item { 928 | name: "/m/025rp__" 929 | id: 186 930 | display_name: "Cowboy hat" 931 | } 932 | item { 933 | name: "/m/01x3z" 934 | id: 187 935 | display_name: "Clock" 936 | } 937 | item { 938 | name: "/m/0crjs" 939 | id: 188 940 | display_name: "Convenience store" 941 | } 942 | item { 943 | name: "/m/0l515" 944 | id: 189 945 | display_name: "Sandwich" 946 | } 947 | item { 948 | name: "/m/015qff" 949 | id: 190 950 | display_name: "Traffic light" 951 | } 952 | item { 953 | name: "/m/09kmb" 954 | id: 191 955 | display_name: "Spider" 956 | } 957 | item { 958 | name: "/m/09728" 959 | id: 192 960 | display_name: "Bread" 961 | } 962 | item { 963 | name: "/m/071qp" 964 | id: 193 965 | display_name: "Squirrel" 966 | } 967 | item { 968 | name: "/m/02s195" 969 | id: 194 970 | display_name: "Vase" 971 | } 972 | item { 973 | name: "/m/06c54" 974 | id: 195 975 | display_name: "Rifle" 976 | } 977 | item { 978 | name: "/m/01xqw" 979 | id: 196 980 | display_name: "Cello" 981 | } 982 | item { 983 | name: "/m/05zsy" 984 | id: 197 985 | display_name: "Pumpkin" 986 | } 987 | item { 988 | name: "/m/0bwd_0j" 989 | id: 198 990 | display_name: "Elephant" 991 | } 992 | item { 993 | name: "/m/04m9y" 994 | id: 199 995 | display_name: "Lizard" 996 | } 997 | item { 998 | name: "/m/052sf" 999 | id: 200 1000 | display_name: "Mushroom" 1001 | } 1002 | item { 1003 | name: "/m/03grzl" 1004 | id: 201 1005 | display_name: "Baseball glove" 1006 | } 1007 | item { 1008 | name: "/m/01z1kdw" 1009 | id: 202 1010 | display_name: "Juice" 1011 | } 1012 | item { 1013 | name: "/m/02wv6h6" 1014 | id: 203 1015 | display_name: "Skirt" 1016 | } 1017 | item { 1018 | name: "/m/016m2d" 1019 | id: 204 1020 | display_name: "Skull" 1021 | } 1022 | item { 1023 | name: "/m/0dtln" 1024 | id: 205 1025 | display_name: "Lamp" 1026 | } 1027 | item { 1028 | name: "/m/057cc" 1029 | id: 206 1030 | display_name: "Musical keyboard" 1031 | } 1032 | item { 1033 | name: "/m/06k2mb" 1034 | id: 207 1035 | display_name: "High heels" 1036 | } 1037 | item { 1038 | name: "/m/0f6wt" 1039 | id: 208 1040 | display_name: "Falcon" 1041 | } 1042 | item { 1043 | name: "/m/0cxn2" 1044 | id: 209 1045 | display_name: "Ice cream" 1046 | } 1047 | item { 1048 | name: "/m/02jvh9" 1049 | id: 210 1050 | display_name: "Mug" 1051 | } 1052 | item { 1053 | name: "/m/0gjkl" 1054 | id: 211 1055 | display_name: "Watch" 1056 | } 1057 | item { 1058 | name: "/m/01b638" 1059 | id: 212 1060 | display_name: "Boot" 1061 | } 1062 | item { 1063 | name: "/m/071p9" 1064 | id: 213 1065 | display_name: "Ski" 1066 | } 1067 | item { 1068 | name: "/m/0pg52" 1069 | id: 214 1070 | display_name: "Taxi" 1071 | } 1072 | item { 1073 | name: "/m/0ftb8" 1074 | id: 215 1075 | display_name: "Sunflower" 1076 | } 1077 | item { 1078 | name: "/m/0hnyx" 1079 | id: 216 1080 | display_name: "Pastry" 1081 | } 1082 | item { 1083 | name: "/m/02jz0l" 1084 | id: 217 1085 | display_name: "Tap" 1086 | } 1087 | item { 1088 | name: "/m/04kkgm" 1089 | id: 218 1090 | display_name: "Bowl" 1091 | } 1092 | item { 1093 | name: "/m/0174n1" 1094 | id: 219 1095 | display_name: "Glove" 1096 | } 1097 | item { 1098 | name: "/m/0gv1x" 1099 | id: 220 1100 | display_name: "Parrot" 1101 | } 1102 | item { 1103 | name: "/m/09csl" 1104 | id: 221 1105 | display_name: "Eagle" 1106 | } 1107 | item { 1108 | name: "/m/02jnhm" 1109 | id: 222 1110 | display_name: "Tin can" 1111 | } 1112 | item { 1113 | name: "/m/099ssp" 1114 | id: 223 1115 | display_name: "Platter" 1116 | } 1117 | item { 1118 | name: "/m/03nfch" 1119 | id: 224 1120 | display_name: "Sandal" 1121 | } 1122 | item { 1123 | name: "/m/07y_7" 1124 | id: 225 1125 | display_name: "Violin" 1126 | } 1127 | item { 1128 | name: "/m/05z6w" 1129 | id: 226 1130 | display_name: "Penguin" 1131 | } 1132 | item { 1133 | name: "/m/03m3pdh" 1134 | id: 227 1135 | display_name: "Sofa bed" 1136 | } 1137 | item { 1138 | name: "/m/09ld4" 1139 | id: 228 1140 | display_name: "Frog" 1141 | } 1142 | item { 1143 | name: "/m/09b5t" 1144 | id: 229 1145 | display_name: "Chicken" 1146 | } 1147 | item { 1148 | name: "/m/054xkw" 1149 | id: 230 1150 | display_name: "Lifejacket" 1151 | } 1152 | item { 1153 | name: "/m/0130jx" 1154 | id: 231 1155 | display_name: "Sink" 1156 | } 1157 | item { 1158 | name: "/m/07fbm7" 1159 | id: 232 1160 | display_name: "Strawberry" 1161 | } 1162 | item { 1163 | name: "/m/01dws" 1164 | id: 233 1165 | display_name: "Bear" 1166 | } 1167 | item { 1168 | name: "/m/01tcjp" 1169 | id: 234 1170 | display_name: "Muffin" 1171 | } 1172 | item { 1173 | name: "/m/0dftk" 1174 | id: 235 1175 | display_name: "Swan" 1176 | } 1177 | item { 1178 | name: "/m/0c06p" 1179 | id: 236 1180 | display_name: "Candle" 1181 | } 1182 | item { 1183 | name: "/m/034c16" 1184 | id: 237 1185 | display_name: "Pillow" 1186 | } 1187 | item { 1188 | name: "/m/09d5_" 1189 | id: 238 1190 | display_name: "Owl" 1191 | } 1192 | item { 1193 | name: "/m/03hlz0c" 1194 | id: 239 1195 | display_name: "Kitchen utensil" 1196 | } 1197 | item { 1198 | name: "/m/0ft9s" 1199 | id: 240 1200 | display_name: "Dragonfly" 1201 | } 1202 | item { 1203 | name: "/m/011k07" 1204 | id: 241 1205 | display_name: "Tortoise" 1206 | } 1207 | item { 1208 | name: "/m/054_l" 1209 | id: 242 1210 | display_name: "Mirror" 1211 | } 1212 | item { 1213 | name: "/m/0jqgx" 1214 | id: 243 1215 | display_name: "Lily" 1216 | } 1217 | item { 1218 | name: "/m/0663v" 1219 | id: 244 1220 | display_name: "Pizza" 1221 | } 1222 | item { 1223 | name: "/m/0242l" 1224 | id: 245 1225 | display_name: "Coin" 1226 | } 1227 | item { 1228 | name: "/m/014trl" 1229 | id: 246 1230 | display_name: "Cosmetics" 1231 | } 1232 | item { 1233 | name: "/m/05r5c" 1234 | id: 247 1235 | display_name: "Piano" 1236 | } 1237 | item { 1238 | name: "/m/07j87" 1239 | id: 248 1240 | display_name: "Tomato" 1241 | } 1242 | item { 1243 | name: "/m/05kyg_" 1244 | id: 249 1245 | display_name: "Chest of drawers" 1246 | } 1247 | item { 1248 | name: "/m/0kmg4" 1249 | id: 250 1250 | display_name: "Teddy bear" 1251 | } 1252 | item { 1253 | name: "/m/07cmd" 1254 | id: 251 1255 | display_name: "Tank" 1256 | } 1257 | item { 1258 | name: "/m/0dv77" 1259 | id: 252 1260 | display_name: "Squash" 1261 | } 1262 | item { 1263 | name: "/m/096mb" 1264 | id: 253 1265 | display_name: "Lion" 1266 | } 1267 | item { 1268 | name: "/m/01gmv2" 1269 | id: 254 1270 | display_name: "Brassiere" 1271 | } 1272 | item { 1273 | name: "/m/07bgp" 1274 | id: 255 1275 | display_name: "Sheep" 1276 | } 1277 | item { 1278 | name: "/m/0cmx8" 1279 | id: 256 1280 | display_name: "Spoon" 1281 | } 1282 | item { 1283 | name: "/m/029tx" 1284 | id: 257 1285 | display_name: "Dinosaur" 1286 | } 1287 | item { 1288 | name: "/m/073bxn" 1289 | id: 258 1290 | display_name: "Tripod" 1291 | } 1292 | item { 1293 | name: "/m/0bh9flk" 1294 | id: 259 1295 | display_name: "Tablet computer" 1296 | } 1297 | item { 1298 | name: "/m/06mf6" 1299 | id: 260 1300 | display_name: "Rabbit" 1301 | } 1302 | item { 1303 | name: "/m/06_fw" 1304 | id: 261 1305 | display_name: "Skateboard" 1306 | } 1307 | item { 1308 | name: "/m/078jl" 1309 | id: 262 1310 | display_name: "Snake" 1311 | } 1312 | item { 1313 | name: "/m/0fbdv" 1314 | id: 263 1315 | display_name: "Shellfish" 1316 | } 1317 | item { 1318 | name: "/m/0h23m" 1319 | id: 264 1320 | display_name: "Sparrow" 1321 | } 1322 | item { 1323 | name: "/m/014j1m" 1324 | id: 265 1325 | display_name: "Apple" 1326 | } 1327 | item { 1328 | name: "/m/03fwl" 1329 | id: 266 1330 | display_name: "Goat" 1331 | } 1332 | item { 1333 | name: "/m/02y6n" 1334 | id: 267 1335 | display_name: "French fries" 1336 | } 1337 | item { 1338 | name: "/m/06c7f7" 1339 | id: 268 1340 | display_name: "Lipstick" 1341 | } 1342 | item { 1343 | name: "/m/026qbn5" 1344 | id: 269 1345 | display_name: "studio couch" 1346 | } 1347 | item { 1348 | name: "/m/0cdn1" 1349 | id: 270 1350 | display_name: "Hamburger" 1351 | } 1352 | item { 1353 | name: "/m/07clx" 1354 | id: 271 1355 | display_name: "Tea" 1356 | } 1357 | item { 1358 | name: "/m/07cx4" 1359 | id: 272 1360 | display_name: "Telephone" 1361 | } 1362 | item { 1363 | name: "/m/03g8mr" 1364 | id: 273 1365 | display_name: "Baseball bat" 1366 | } 1367 | item { 1368 | name: "/m/0cnyhnx" 1369 | id: 274 1370 | display_name: "Bull" 1371 | } 1372 | item { 1373 | name: "/m/01b7fy" 1374 | id: 275 1375 | display_name: "Headphones" 1376 | } 1377 | item { 1378 | name: "/m/04gth" 1379 | id: 276 1380 | display_name: "Lavender" 1381 | } 1382 | item { 1383 | name: "/m/0cyfs" 1384 | id: 277 1385 | display_name: "Parachute" 1386 | } 1387 | item { 1388 | name: "/m/021mn" 1389 | id: 278 1390 | display_name: "Cookie" 1391 | } 1392 | item { 1393 | name: "/m/07dm6" 1394 | id: 279 1395 | display_name: "Tiger" 1396 | } 1397 | item { 1398 | name: "/m/0k1tl" 1399 | id: 280 1400 | display_name: "Pen" 1401 | } 1402 | item { 1403 | name: "/m/0dv9c" 1404 | id: 281 1405 | display_name: "Racket" 1406 | } 1407 | item { 1408 | name: "/m/0dt3t" 1409 | id: 282 1410 | display_name: "Fork" 1411 | } 1412 | item { 1413 | name: "/m/04yqq2" 1414 | id: 283 1415 | display_name: "Bust" 1416 | } 1417 | item { 1418 | name: "/m/01cmb2" 1419 | id: 284 1420 | display_name: "Miniskirt" 1421 | } 1422 | item { 1423 | name: "/m/0gd36" 1424 | id: 285 1425 | display_name: "Sea lion" 1426 | } 1427 | item { 1428 | name: "/m/033cnk" 1429 | id: 286 1430 | display_name: "Egg" 1431 | } 1432 | item { 1433 | name: "/m/06ncr" 1434 | id: 287 1435 | display_name: "Saxophone" 1436 | } 1437 | item { 1438 | name: "/m/03bk1" 1439 | id: 288 1440 | display_name: "Giraffe" 1441 | } 1442 | item { 1443 | name: "/m/0bjyj5" 1444 | id: 289 1445 | display_name: "Waste container" 1446 | } 1447 | item { 1448 | name: "/m/06__v" 1449 | id: 290 1450 | display_name: "Snowboard" 1451 | } 1452 | item { 1453 | name: "/m/0qmmr" 1454 | id: 291 1455 | display_name: "Wheelchair" 1456 | } 1457 | item { 1458 | name: "/m/01xgg_" 1459 | id: 292 1460 | display_name: "Medical equipment" 1461 | } 1462 | item { 1463 | name: "/m/0czz2" 1464 | id: 293 1465 | display_name: "Antelope" 1466 | } 1467 | item { 1468 | name: "/m/02l8p9" 1469 | id: 294 1470 | display_name: "Harbor seal" 1471 | } 1472 | item { 1473 | name: "/m/09g1w" 1474 | id: 295 1475 | display_name: "Toilet" 1476 | } 1477 | item { 1478 | name: "/m/0ll1f78" 1479 | id: 296 1480 | display_name: "Shrimp" 1481 | } 1482 | item { 1483 | name: "/m/0cyhj_" 1484 | id: 297 1485 | display_name: "Orange" 1486 | } 1487 | item { 1488 | name: "/m/0642b4" 1489 | id: 298 1490 | display_name: "Cupboard" 1491 | } 1492 | item { 1493 | name: "/m/0h8mzrc" 1494 | id: 299 1495 | display_name: "Wall clock" 1496 | } 1497 | item { 1498 | name: "/m/068zj" 1499 | id: 300 1500 | display_name: "Pig" 1501 | } 1502 | item { 1503 | name: "/m/02z51p" 1504 | id: 301 1505 | display_name: "Nightstand" 1506 | } 1507 | item { 1508 | name: "/m/0h8nr_l" 1509 | id: 302 1510 | display_name: "Bathroom accessory" 1511 | } 1512 | item { 1513 | name: "/m/0388q" 1514 | id: 303 1515 | display_name: "Grape" 1516 | } 1517 | item { 1518 | name: "/m/02hj4" 1519 | id: 304 1520 | display_name: "Dolphin" 1521 | } 1522 | item { 1523 | name: "/m/01jfsr" 1524 | id: 305 1525 | display_name: "Lantern" 1526 | } 1527 | item { 1528 | name: "/m/07gql" 1529 | id: 306 1530 | display_name: "Trumpet" 1531 | } 1532 | item { 1533 | name: "/m/0h8my_4" 1534 | id: 307 1535 | display_name: "Tennis racket" 1536 | } 1537 | item { 1538 | name: "/m/0n28_" 1539 | id: 308 1540 | display_name: "Crab" 1541 | } 1542 | item { 1543 | name: "/m/0120dh" 1544 | id: 309 1545 | display_name: "Sea turtle" 1546 | } 1547 | item { 1548 | name: "/m/020kz" 1549 | id: 310 1550 | display_name: "Cannon" 1551 | } 1552 | item { 1553 | name: "/m/0mkg" 1554 | id: 311 1555 | display_name: "Accordion" 1556 | } 1557 | item { 1558 | name: "/m/03c7gz" 1559 | id: 312 1560 | display_name: "Door handle" 1561 | } 1562 | item { 1563 | name: "/m/09k_b" 1564 | id: 313 1565 | display_name: "Lemon" 1566 | } 1567 | item { 1568 | name: "/m/031n1" 1569 | id: 314 1570 | display_name: "Foot" 1571 | } 1572 | item { 1573 | name: "/m/04rmv" 1574 | id: 315 1575 | display_name: "Mouse" 1576 | } 1577 | item { 1578 | name: "/m/084rd" 1579 | id: 316 1580 | display_name: "Wok" 1581 | } 1582 | item { 1583 | name: "/m/02rgn06" 1584 | id: 317 1585 | display_name: "Volleyball" 1586 | } 1587 | item { 1588 | name: "/m/05z55" 1589 | id: 318 1590 | display_name: "Pasta" 1591 | } 1592 | item { 1593 | name: "/m/01r546" 1594 | id: 319 1595 | display_name: "Earrings" 1596 | } 1597 | item { 1598 | name: "/m/09qck" 1599 | id: 320 1600 | display_name: "Banana" 1601 | } 1602 | item { 1603 | name: "/m/012w5l" 1604 | id: 321 1605 | display_name: "Ladder" 1606 | } 1607 | item { 1608 | name: "/m/01940j" 1609 | id: 322 1610 | display_name: "Backpack" 1611 | } 1612 | item { 1613 | name: "/m/09f_2" 1614 | id: 323 1615 | display_name: "Crocodile" 1616 | } 1617 | item { 1618 | name: "/m/02p3w7d" 1619 | id: 324 1620 | display_name: "Roller skates" 1621 | } 1622 | item { 1623 | name: "/m/057p5t" 1624 | id: 325 1625 | display_name: "Scoreboard" 1626 | } 1627 | item { 1628 | name: "/m/0d8zb" 1629 | id: 326 1630 | display_name: "Jellyfish" 1631 | } 1632 | item { 1633 | name: "/m/01nq26" 1634 | id: 327 1635 | display_name: "Sock" 1636 | } 1637 | item { 1638 | name: "/m/01x_v" 1639 | id: 328 1640 | display_name: "Camel" 1641 | } 1642 | item { 1643 | name: "/m/05gqfk" 1644 | id: 329 1645 | display_name: "Plastic bag" 1646 | } 1647 | item { 1648 | name: "/m/0cydv" 1649 | id: 330 1650 | display_name: "Caterpillar" 1651 | } 1652 | item { 1653 | name: "/m/07030" 1654 | id: 331 1655 | display_name: "Sushi" 1656 | } 1657 | item { 1658 | name: "/m/084zz" 1659 | id: 332 1660 | display_name: "Whale" 1661 | } 1662 | item { 1663 | name: "/m/0c29q" 1664 | id: 333 1665 | display_name: "Leopard" 1666 | } 1667 | item { 1668 | name: "/m/02zn6n" 1669 | id: 334 1670 | display_name: "Barrel" 1671 | } 1672 | item { 1673 | name: "/m/03tw93" 1674 | id: 335 1675 | display_name: "Fireplace" 1676 | } 1677 | item { 1678 | name: "/m/0fqt361" 1679 | id: 336 1680 | display_name: "Stool" 1681 | } 1682 | item { 1683 | name: "/m/0f9_l" 1684 | id: 337 1685 | display_name: "Snail" 1686 | } 1687 | item { 1688 | name: "/m/0gm28" 1689 | id: 338 1690 | display_name: "Candy" 1691 | } 1692 | item { 1693 | name: "/m/09rvcxw" 1694 | id: 339 1695 | display_name: "Rocket" 1696 | } 1697 | item { 1698 | name: "/m/01nkt" 1699 | id: 340 1700 | display_name: "Cheese" 1701 | } 1702 | item { 1703 | name: "/m/04p0qw" 1704 | id: 341 1705 | display_name: "Billiard table" 1706 | } 1707 | item { 1708 | name: "/m/03hj559" 1709 | id: 342 1710 | display_name: "Mixing bowl" 1711 | } 1712 | item { 1713 | name: "/m/07pj7bq" 1714 | id: 343 1715 | display_name: "Bowling equipment" 1716 | } 1717 | item { 1718 | name: "/m/04ctx" 1719 | id: 344 1720 | display_name: "Knife" 1721 | } 1722 | item { 1723 | name: "/m/0703r8" 1724 | id: 345 1725 | display_name: "Loveseat" 1726 | } 1727 | item { 1728 | name: "/m/03qrc" 1729 | id: 346 1730 | display_name: "Hamster" 1731 | } 1732 | item { 1733 | name: "/m/020lf" 1734 | id: 347 1735 | display_name: "Mouse" 1736 | } 1737 | item { 1738 | name: "/m/0by6g" 1739 | id: 348 1740 | display_name: "Shark" 1741 | } 1742 | item { 1743 | name: "/m/01fh4r" 1744 | id: 349 1745 | display_name: "Teapot" 1746 | } 1747 | item { 1748 | name: "/m/07c6l" 1749 | id: 350 1750 | display_name: "Trombone" 1751 | } 1752 | item { 1753 | name: "/m/03bj1" 1754 | id: 351 1755 | display_name: "Panda" 1756 | } 1757 | item { 1758 | name: "/m/0898b" 1759 | id: 352 1760 | display_name: "Zebra" 1761 | } 1762 | item { 1763 | name: "/m/02x984l" 1764 | id: 353 1765 | display_name: "Mechanical fan" 1766 | } 1767 | item { 1768 | name: "/m/0fj52s" 1769 | id: 354 1770 | display_name: "Carrot" 1771 | } 1772 | item { 1773 | name: "/m/0cd4d" 1774 | id: 355 1775 | display_name: "Cheetah" 1776 | } 1777 | item { 1778 | name: "/m/02068x" 1779 | id: 356 1780 | display_name: "Gondola" 1781 | } 1782 | item { 1783 | name: "/m/01vbnl" 1784 | id: 357 1785 | display_name: "Bidet" 1786 | } 1787 | item { 1788 | name: "/m/0449p" 1789 | id: 358 1790 | display_name: "Jaguar" 1791 | } 1792 | item { 1793 | name: "/m/0gj37" 1794 | id: 359 1795 | display_name: "Ladybug" 1796 | } 1797 | item { 1798 | name: "/m/0nl46" 1799 | id: 360 1800 | display_name: "Crown" 1801 | } 1802 | item { 1803 | name: "/m/0152hh" 1804 | id: 361 1805 | display_name: "Snowman" 1806 | } 1807 | item { 1808 | name: "/m/03dnzn" 1809 | id: 362 1810 | display_name: "Bathtub" 1811 | } 1812 | item { 1813 | name: "/m/05_5p_0" 1814 | id: 363 1815 | display_name: "Table tennis racket" 1816 | } 1817 | item { 1818 | name: "/m/02jfl0" 1819 | id: 364 1820 | display_name: "Sombrero" 1821 | } 1822 | item { 1823 | name: "/m/01dxs" 1824 | id: 365 1825 | display_name: "Brown bear" 1826 | } 1827 | item { 1828 | name: "/m/0cjq5" 1829 | id: 366 1830 | display_name: "Lobster" 1831 | } 1832 | item { 1833 | name: "/m/040b_t" 1834 | id: 367 1835 | display_name: "Refrigerator" 1836 | } 1837 | item { 1838 | name: "/m/0_cp5" 1839 | id: 368 1840 | display_name: "Oyster" 1841 | } 1842 | item { 1843 | name: "/m/0gxl3" 1844 | id: 369 1845 | display_name: "Handgun" 1846 | } 1847 | item { 1848 | name: "/m/029bxz" 1849 | id: 370 1850 | display_name: "Oven" 1851 | } 1852 | item { 1853 | name: "/m/02zt3" 1854 | id: 371 1855 | display_name: "Kite" 1856 | } 1857 | item { 1858 | name: "/m/03d443" 1859 | id: 372 1860 | display_name: "Rhinoceros" 1861 | } 1862 | item { 1863 | name: "/m/0306r" 1864 | id: 373 1865 | display_name: "Fox" 1866 | } 1867 | item { 1868 | name: "/m/0h8l4fh" 1869 | id: 374 1870 | display_name: "Light bulb" 1871 | } 1872 | item { 1873 | name: "/m/0633h" 1874 | id: 375 1875 | display_name: "Polar bear" 1876 | } 1877 | item { 1878 | name: "/m/01s55n" 1879 | id: 376 1880 | display_name: "Suitcase" 1881 | } 1882 | item { 1883 | name: "/m/0hkxq" 1884 | id: 377 1885 | display_name: "Broccoli" 1886 | } 1887 | item { 1888 | name: "/m/0cn6p" 1889 | id: 378 1890 | display_name: "Otter" 1891 | } 1892 | item { 1893 | name: "/m/0dbzx" 1894 | id: 379 1895 | display_name: "Mule" 1896 | } 1897 | item { 1898 | name: "/m/01dy8n" 1899 | id: 380 1900 | display_name: "Woodpecker" 1901 | } 1902 | item { 1903 | name: "/m/01h8tj" 1904 | id: 381 1905 | display_name: "Starfish" 1906 | } 1907 | item { 1908 | name: "/m/03s_tn" 1909 | id: 382 1910 | display_name: "Kettle" 1911 | } 1912 | item { 1913 | name: "/m/01xs3r" 1914 | id: 383 1915 | display_name: "Jet ski" 1916 | } 1917 | item { 1918 | name: "/m/031b6r" 1919 | id: 384 1920 | display_name: "Window blind" 1921 | } 1922 | item { 1923 | name: "/m/06j2d" 1924 | id: 385 1925 | display_name: "Raven" 1926 | } 1927 | item { 1928 | name: "/m/0hqkz" 1929 | id: 386 1930 | display_name: "Grapefruit" 1931 | } 1932 | item { 1933 | name: "/m/01_5g" 1934 | id: 387 1935 | display_name: "Chopsticks" 1936 | } 1937 | item { 1938 | name: "/m/02zvsm" 1939 | id: 388 1940 | display_name: "Tart" 1941 | } 1942 | item { 1943 | name: "/m/0kpqd" 1944 | id: 389 1945 | display_name: "Watermelon" 1946 | } 1947 | item { 1948 | name: "/m/015x4r" 1949 | id: 390 1950 | display_name: "Cucumber" 1951 | } 1952 | item { 1953 | name: "/m/061hd_" 1954 | id: 391 1955 | display_name: "Infant bed" 1956 | } 1957 | item { 1958 | name: "/m/04ylt" 1959 | id: 392 1960 | display_name: "Missile" 1961 | } 1962 | item { 1963 | name: "/m/02wv84t" 1964 | id: 393 1965 | display_name: "Gas stove" 1966 | } 1967 | item { 1968 | name: "/m/04y4h8h" 1969 | id: 394 1970 | display_name: "Bathroom cabinet" 1971 | } 1972 | item { 1973 | name: "/m/01gllr" 1974 | id: 395 1975 | display_name: "Beehive" 1976 | } 1977 | item { 1978 | name: "/m/0pcr" 1979 | id: 396 1980 | display_name: "Alpaca" 1981 | } 1982 | item { 1983 | name: "/m/0jy4k" 1984 | id: 397 1985 | display_name: "Doughnut" 1986 | } 1987 | item { 1988 | name: "/m/09f20" 1989 | id: 398 1990 | display_name: "Hippopotamus" 1991 | } 1992 | item { 1993 | name: "/m/0mcx2" 1994 | id: 399 1995 | display_name: "Ipod" 1996 | } 1997 | item { 1998 | name: "/m/04c0y" 1999 | id: 400 2000 | display_name: "Kangaroo" 2001 | } 2002 | item { 2003 | name: "/m/0_k2" 2004 | id: 401 2005 | display_name: "Ant" 2006 | } 2007 | item { 2008 | name: "/m/0jg57" 2009 | id: 402 2010 | display_name: "Bell pepper" 2011 | } 2012 | item { 2013 | name: "/m/03fj2" 2014 | id: 403 2015 | display_name: "Goldfish" 2016 | } 2017 | item { 2018 | name: "/m/03ldnb" 2019 | id: 404 2020 | display_name: "Ceiling fan" 2021 | } 2022 | item { 2023 | name: "/m/06nrc" 2024 | id: 405 2025 | display_name: "Shotgun" 2026 | } 2027 | item { 2028 | name: "/m/01btn" 2029 | id: 406 2030 | display_name: "Barge" 2031 | } 2032 | item { 2033 | name: "/m/05vtc" 2034 | id: 407 2035 | display_name: "Potato" 2036 | } 2037 | item { 2038 | name: "/m/08hvt4" 2039 | id: 408 2040 | display_name: "Jug" 2041 | } 2042 | item { 2043 | name: "/m/0fx9l" 2044 | id: 409 2045 | display_name: "Microwave oven" 2046 | } 2047 | item { 2048 | name: "/m/01h44" 2049 | id: 410 2050 | display_name: "Bat" 2051 | } 2052 | item { 2053 | name: "/m/05n4y" 2054 | id: 411 2055 | display_name: "Ostrich" 2056 | } 2057 | item { 2058 | name: "/m/0jly1" 2059 | id: 412 2060 | display_name: "Turkey" 2061 | } 2062 | item { 2063 | name: "/m/06y5r" 2064 | id: 413 2065 | display_name: "Sword" 2066 | } 2067 | item { 2068 | name: "/m/05ctyq" 2069 | id: 414 2070 | display_name: "Tennis ball" 2071 | } 2072 | item { 2073 | name: "/m/0fp6w" 2074 | id: 415 2075 | display_name: "Pineapple" 2076 | } 2077 | item { 2078 | name: "/m/0d4w1" 2079 | id: 416 2080 | display_name: "Closet" 2081 | } 2082 | item { 2083 | name: "/m/02pv19" 2084 | id: 417 2085 | display_name: "Stop sign" 2086 | } 2087 | item { 2088 | name: "/m/07crc" 2089 | id: 418 2090 | display_name: "Taco" 2091 | } 2092 | item { 2093 | name: "/m/01dwwc" 2094 | id: 419 2095 | display_name: "Pancake" 2096 | } 2097 | item { 2098 | name: "/m/01b9xk" 2099 | id: 420 2100 | display_name: "Hot dog" 2101 | } 2102 | item { 2103 | name: "/m/013y1f" 2104 | id: 421 2105 | display_name: "Organ" 2106 | } 2107 | item { 2108 | name: "/m/0m53l" 2109 | id: 422 2110 | display_name: "Rays and skates" 2111 | } 2112 | item { 2113 | name: "/m/0174k2" 2114 | id: 423 2115 | display_name: "Washing machine" 2116 | } 2117 | item { 2118 | name: "/m/01dwsz" 2119 | id: 424 2120 | display_name: "Waffle" 2121 | } 2122 | item { 2123 | name: "/m/04vv5k" 2124 | id: 425 2125 | display_name: "Snowplow" 2126 | } 2127 | item { 2128 | name: "/m/04cp_" 2129 | id: 426 2130 | display_name: "Koala" 2131 | } 2132 | item { 2133 | name: "/m/0fz0h" 2134 | id: 427 2135 | display_name: "Honeycomb" 2136 | } 2137 | item { 2138 | name: "/m/0llzx" 2139 | id: 428 2140 | display_name: "Sewing machine" 2141 | } 2142 | item { 2143 | name: "/m/0319l" 2144 | id: 429 2145 | display_name: "Horn" 2146 | } 2147 | item { 2148 | name: "/m/04v6l4" 2149 | id: 430 2150 | display_name: "Frying pan" 2151 | } 2152 | item { 2153 | name: "/m/0dkzw" 2154 | id: 431 2155 | display_name: "Seat belt" 2156 | } 2157 | item { 2158 | name: "/m/027pcv" 2159 | id: 432 2160 | display_name: "Zucchini" 2161 | } 2162 | item { 2163 | name: "/m/0323sq" 2164 | id: 433 2165 | display_name: "Golf cart" 2166 | } 2167 | item { 2168 | name: "/m/054fyh" 2169 | id: 434 2170 | display_name: "Pitcher" 2171 | } 2172 | item { 2173 | name: "/m/01pns0" 2174 | id: 435 2175 | display_name: "Fire hydrant" 2176 | } 2177 | item { 2178 | name: "/m/012n7d" 2179 | id: 436 2180 | display_name: "Ambulance" 2181 | } 2182 | item { 2183 | name: "/m/044r5d" 2184 | id: 437 2185 | display_name: "Golf ball" 2186 | } 2187 | item { 2188 | name: "/m/01krhy" 2189 | id: 438 2190 | display_name: "Tiara" 2191 | } 2192 | item { 2193 | name: "/m/0dq75" 2194 | id: 439 2195 | display_name: "Raccoon" 2196 | } 2197 | item { 2198 | name: "/m/0176mf" 2199 | id: 440 2200 | display_name: "Belt" 2201 | } 2202 | item { 2203 | name: "/m/0h8lkj8" 2204 | id: 441 2205 | display_name: "Corded phone" 2206 | } 2207 | item { 2208 | name: "/m/04tn4x" 2209 | id: 442 2210 | display_name: "Swim cap" 2211 | } 2212 | item { 2213 | name: "/m/06l9r" 2214 | id: 443 2215 | display_name: "Red panda" 2216 | } 2217 | item { 2218 | name: "/m/0cjs7" 2219 | id: 444 2220 | display_name: "Asparagus" 2221 | } 2222 | item { 2223 | name: "/m/01lsmm" 2224 | id: 445 2225 | display_name: "Scissors" 2226 | } 2227 | item { 2228 | name: "/m/01lcw4" 2229 | id: 446 2230 | display_name: "Limousine" 2231 | } 2232 | item { 2233 | name: "/m/047j0r" 2234 | id: 447 2235 | display_name: "Filing cabinet" 2236 | } 2237 | item { 2238 | name: "/m/01fb_0" 2239 | id: 448 2240 | display_name: "Bagel" 2241 | } 2242 | item { 2243 | name: "/m/04169hn" 2244 | id: 449 2245 | display_name: "Wood-burning stove" 2246 | } 2247 | item { 2248 | name: "/m/076bq" 2249 | id: 450 2250 | display_name: "Segway" 2251 | } 2252 | item { 2253 | name: "/m/0hdln" 2254 | id: 451 2255 | display_name: "Ruler" 2256 | } 2257 | item { 2258 | name: "/m/01g3x7" 2259 | id: 452 2260 | display_name: "Bow and arrow" 2261 | } 2262 | item { 2263 | name: "/m/0l3ms" 2264 | id: 453 2265 | display_name: "Balance beam" 2266 | } 2267 | item { 2268 | name: "/m/058qzx" 2269 | id: 454 2270 | display_name: "Kitchen knife" 2271 | } 2272 | item { 2273 | name: "/m/0h8n6ft" 2274 | id: 455 2275 | display_name: "Cake stand" 2276 | } 2277 | item { 2278 | name: "/m/018j2" 2279 | id: 456 2280 | display_name: "Banjo" 2281 | } 2282 | item { 2283 | name: "/m/0l14j_" 2284 | id: 457 2285 | display_name: "Flute" 2286 | } 2287 | item { 2288 | name: "/m/0wdt60w" 2289 | id: 458 2290 | display_name: "Rugby ball" 2291 | } 2292 | item { 2293 | name: "/m/02gzp" 2294 | id: 459 2295 | display_name: "Dagger" 2296 | } 2297 | item { 2298 | name: "/m/0h8n6f9" 2299 | id: 460 2300 | display_name: "Dog bed" 2301 | } 2302 | item { 2303 | name: "/m/0fbw6" 2304 | id: 461 2305 | display_name: "Cabbage" 2306 | } 2307 | item { 2308 | name: "/m/07kng9" 2309 | id: 462 2310 | display_name: "Picnic basket" 2311 | } 2312 | item { 2313 | name: "/m/0dj6p" 2314 | id: 463 2315 | display_name: "Peach" 2316 | } 2317 | item { 2318 | name: "/m/06pcq" 2319 | id: 464 2320 | display_name: "Submarine sandwich" 2321 | } 2322 | item { 2323 | name: "/m/061_f" 2324 | id: 465 2325 | display_name: "Pear" 2326 | } 2327 | item { 2328 | name: "/m/04g2r" 2329 | id: 466 2330 | display_name: "Lynx" 2331 | } 2332 | item { 2333 | name: "/m/0jwn_" 2334 | id: 467 2335 | display_name: "Pomegranate" 2336 | } 2337 | item { 2338 | name: "/m/02f9f_" 2339 | id: 468 2340 | display_name: "Shower" 2341 | } 2342 | item { 2343 | name: "/m/01f8m5" 2344 | id: 469 2345 | display_name: "Blue jay" 2346 | } 2347 | item { 2348 | name: "/m/01m4t" 2349 | id: 470 2350 | display_name: "Printer" 2351 | } 2352 | item { 2353 | name: "/m/0cl4p" 2354 | id: 471 2355 | display_name: "Hedgehog" 2356 | } 2357 | item { 2358 | name: "/m/07xyvk" 2359 | id: 472 2360 | display_name: "Coffeemaker" 2361 | } 2362 | item { 2363 | name: "/m/084hf" 2364 | id: 473 2365 | display_name: "Worm" 2366 | } 2367 | item { 2368 | name: "/m/03v5tg" 2369 | id: 474 2370 | display_name: "Drinking straw" 2371 | } 2372 | item { 2373 | name: "/m/0qjjc" 2374 | id: 475 2375 | display_name: "Remote control" 2376 | } 2377 | item { 2378 | name: "/m/015x5n" 2379 | id: 476 2380 | display_name: "Radish" 2381 | } 2382 | item { 2383 | name: "/m/0ccs93" 2384 | id: 477 2385 | display_name: "Canary" 2386 | } 2387 | item { 2388 | name: "/m/0nybt" 2389 | id: 478 2390 | display_name: "Seahorse" 2391 | } 2392 | item { 2393 | name: "/m/02vkqh8" 2394 | id: 479 2395 | display_name: "Wardrobe" 2396 | } 2397 | item { 2398 | name: "/m/09gtd" 2399 | id: 480 2400 | display_name: "Toilet paper" 2401 | } 2402 | item { 2403 | name: "/m/019h78" 2404 | id: 481 2405 | display_name: "Centipede" 2406 | } 2407 | item { 2408 | name: "/m/015wgc" 2409 | id: 482 2410 | display_name: "Croissant" 2411 | } 2412 | item { 2413 | name: "/m/01x3jk" 2414 | id: 483 2415 | display_name: "Snowmobile" 2416 | } 2417 | item { 2418 | name: "/m/01j3zr" 2419 | id: 484 2420 | display_name: "Burrito" 2421 | } 2422 | item { 2423 | name: "/m/0c568" 2424 | id: 485 2425 | display_name: "Porcupine" 2426 | } 2427 | item { 2428 | name: "/m/02pdsw" 2429 | id: 486 2430 | display_name: "Cutting board" 2431 | } 2432 | item { 2433 | name: "/m/029b3" 2434 | id: 487 2435 | display_name: "Dice" 2436 | } 2437 | item { 2438 | name: "/m/03q5t" 2439 | id: 488 2440 | display_name: "Harpsichord" 2441 | } 2442 | item { 2443 | name: "/m/0p833" 2444 | id: 489 2445 | display_name: "Perfume" 2446 | } 2447 | item { 2448 | name: "/m/01d380" 2449 | id: 490 2450 | display_name: "Drill" 2451 | } 2452 | item { 2453 | name: "/m/024d2" 2454 | id: 491 2455 | display_name: "Calculator" 2456 | } 2457 | item { 2458 | name: "/m/0mw_6" 2459 | id: 492 2460 | display_name: "Willow" 2461 | } 2462 | item { 2463 | name: "/m/01f91_" 2464 | id: 493 2465 | display_name: "Pretzel" 2466 | } 2467 | item { 2468 | name: "/m/02g30s" 2469 | id: 494 2470 | display_name: "Guacamole" 2471 | } 2472 | item { 2473 | name: "/m/01hrv5" 2474 | id: 495 2475 | display_name: "Popcorn" 2476 | } 2477 | item { 2478 | name: "/m/03m5k" 2479 | id: 496 2480 | display_name: "Harp" 2481 | } 2482 | item { 2483 | name: "/m/0162_1" 2484 | id: 497 2485 | display_name: "Towel" 2486 | } 2487 | item { 2488 | name: "/m/063rgb" 2489 | id: 498 2490 | display_name: "Mixer" 2491 | } 2492 | item { 2493 | name: "/m/06_72j" 2494 | id: 499 2495 | display_name: "Digital clock" 2496 | } 2497 | item { 2498 | name: "/m/046dlr" 2499 | id: 500 2500 | display_name: "Alarm clock" 2501 | } 2502 | item { 2503 | name: "/m/047v4b" 2504 | id: 501 2505 | display_name: "Artichoke" 2506 | } 2507 | item { 2508 | name: "/m/04zpv" 2509 | id: 502 2510 | display_name: "Milk" 2511 | } 2512 | item { 2513 | name: "/m/043nyj" 2514 | id: 503 2515 | display_name: "Common fig" 2516 | } 2517 | item { 2518 | name: "/m/03bbps" 2519 | id: 504 2520 | display_name: "Power plugs and sockets" 2521 | } 2522 | item { 2523 | name: "/m/02w3r3" 2524 | id: 505 2525 | display_name: "Paper towel" 2526 | } 2527 | item { 2528 | name: "/m/02pjr4" 2529 | id: 506 2530 | display_name: "Blender" 2531 | } 2532 | item { 2533 | name: "/m/0755b" 2534 | id: 507 2535 | display_name: "Scorpion" 2536 | } 2537 | item { 2538 | name: "/m/02lbcq" 2539 | id: 508 2540 | display_name: "Stretcher" 2541 | } 2542 | item { 2543 | name: "/m/0fldg" 2544 | id: 509 2545 | display_name: "Mango" 2546 | } 2547 | item { 2548 | name: "/m/012074" 2549 | id: 510 2550 | display_name: "Magpie" 2551 | } 2552 | item { 2553 | name: "/m/035vxb" 2554 | id: 511 2555 | display_name: "Isopod" 2556 | } 2557 | item { 2558 | name: "/m/02w3_ws" 2559 | id: 512 2560 | display_name: "Personal care" 2561 | } 2562 | item { 2563 | name: "/m/0f6nr" 2564 | id: 513 2565 | display_name: "Unicycle" 2566 | } 2567 | item { 2568 | name: "/m/0420v5" 2569 | id: 514 2570 | display_name: "Punching bag" 2571 | } 2572 | item { 2573 | name: "/m/0frqm" 2574 | id: 515 2575 | display_name: "Envelope" 2576 | } 2577 | item { 2578 | name: "/m/03txqz" 2579 | id: 516 2580 | display_name: "Scale" 2581 | } 2582 | item { 2583 | name: "/m/0271qf7" 2584 | id: 517 2585 | display_name: "Wine rack" 2586 | } 2587 | item { 2588 | name: "/m/074d1" 2589 | id: 518 2590 | display_name: "Submarine" 2591 | } 2592 | item { 2593 | name: "/m/08p92x" 2594 | id: 519 2595 | display_name: "Cream" 2596 | } 2597 | item { 2598 | name: "/m/01j4z9" 2599 | id: 520 2600 | display_name: "Chainsaw" 2601 | } 2602 | item { 2603 | name: "/m/0kpt_" 2604 | id: 521 2605 | display_name: "Cantaloupe" 2606 | } 2607 | item { 2608 | name: "/m/0h8n27j" 2609 | id: 522 2610 | display_name: "Serving tray" 2611 | } 2612 | item { 2613 | name: "/m/03y6mg" 2614 | id: 523 2615 | display_name: "Food processor" 2616 | } 2617 | item { 2618 | name: "/m/04h8sr" 2619 | id: 524 2620 | display_name: "Dumbbell" 2621 | } 2622 | item { 2623 | name: "/m/065h6l" 2624 | id: 525 2625 | display_name: "Jacuzzi" 2626 | } 2627 | item { 2628 | name: "/m/02tsc9" 2629 | id: 526 2630 | display_name: "Slow cooker" 2631 | } 2632 | item { 2633 | name: "/m/012ysf" 2634 | id: 527 2635 | display_name: "Syringe" 2636 | } 2637 | item { 2638 | name: "/m/0ky7b" 2639 | id: 528 2640 | display_name: "Dishwasher" 2641 | } 2642 | item { 2643 | name: "/m/02wg_p" 2644 | id: 529 2645 | display_name: "Tree house" 2646 | } 2647 | item { 2648 | name: "/m/0584n8" 2649 | id: 530 2650 | display_name: "Briefcase" 2651 | } 2652 | item { 2653 | name: "/m/03kt2w" 2654 | id: 531 2655 | display_name: "Stationary bicycle" 2656 | } 2657 | item { 2658 | name: "/m/05kms" 2659 | id: 532 2660 | display_name: "Oboe" 2661 | } 2662 | item { 2663 | name: "/m/030610" 2664 | id: 533 2665 | display_name: "Treadmill" 2666 | } 2667 | item { 2668 | name: "/m/0lt4_" 2669 | id: 534 2670 | display_name: "Binoculars" 2671 | } 2672 | item { 2673 | name: "/m/076lb9" 2674 | id: 535 2675 | display_name: "Bench" 2676 | } 2677 | item { 2678 | name: "/m/02ctlc" 2679 | id: 536 2680 | display_name: "Cricket ball" 2681 | } 2682 | item { 2683 | name: "/m/02x8cch" 2684 | id: 537 2685 | display_name: "Salt and pepper shakers" 2686 | } 2687 | item { 2688 | name: "/m/09gys" 2689 | id: 538 2690 | display_name: "Squid" 2691 | } 2692 | item { 2693 | name: "/m/03jbxj" 2694 | id: 539 2695 | display_name: "Light switch" 2696 | } 2697 | item { 2698 | name: "/m/012xff" 2699 | id: 540 2700 | display_name: "Toothbrush" 2701 | } 2702 | item { 2703 | name: "/m/0h8kx63" 2704 | id: 541 2705 | display_name: "Spice rack" 2706 | } 2707 | item { 2708 | name: "/m/073g6" 2709 | id: 542 2710 | display_name: "Stethoscope" 2711 | } 2712 | item { 2713 | name: "/m/02cvgx" 2714 | id: 543 2715 | display_name: "Winter melon" 2716 | } 2717 | item { 2718 | name: "/m/027rl48" 2719 | id: 544 2720 | display_name: "Ladle" 2721 | } 2722 | item { 2723 | name: "/m/01kb5b" 2724 | id: 545 2725 | display_name: "Flashlight" 2726 | } 2727 | """ 2728 | 2729 | 2730 | def parse_pbtxt(pbtxt): 2731 | class_names = [] 2732 | for item in pbtxt.split('item'): 2733 | for line in item.split('\n'): 2734 | if 'display_name' in line: 2735 | name = line.split('"')[1].lower() 2736 | class_names.append(name) 2737 | return class_names 2738 | 2739 | CLASS_NAME = parse_pbtxt(CLASSES_PBTXT) 2740 | --------------------------------------------------------------------------------