├── CNN
└── CaffeNet
│ ├── deploy.prototxt
│ ├── layers.py
│ ├── solver.prototxt
│ └── train_val.prototxt
├── LDA
├── generate_train_dict.py
├── generate_train_labels.py
├── learn_LDA_model.py
├── preprocess_text.py
└── train_dict_ImageCLEF_Wikipedia.json
├── README.md
├── data
├── ImageCLEF_Wikipedia
│ └── README.md
├── VOC2007
│ └── README.md
└── Wikipedia
│ ├── README.md
│ ├── testset_txt_img_cat.list
│ └── trainset_txt_img_cat.list
├── experiments
├── multi_modal_retrieval.py
├── preprocess_text.py
├── utils.py
└── voc_2007_classification.py
├── texttopicnet.png
└── wikipedia_data
├── README.md
├── get_all_docs.py
└── get_images.py
/CNN/CaffeNet/deploy.prototxt:
--------------------------------------------------------------------------------
1 | name: "TextTopicNet"
2 | layer {
3 | name: "data"
4 | type: "Input"
5 | top: "data"
6 | input_param { shape: { dim: 10 dim: 3 dim: 227 dim: 227 } }
7 | }
8 | layer {
9 | name: "conv1"
10 | type: "Convolution"
11 | bottom: "data"
12 | top: "conv1"
13 | convolution_param {
14 | num_output: 96
15 | kernel_size: 11
16 | stride: 4
17 | }
18 | }
19 | layer {
20 | name: "relu1"
21 | type: "ReLU"
22 | bottom: "conv1"
23 | top: "conv1"
24 | }
25 | layer {
26 | name: "pool1"
27 | type: "Pooling"
28 | bottom: "conv1"
29 | top: "pool1"
30 | pooling_param {
31 | pool: MAX
32 | kernel_size: 3
33 | stride: 2
34 | }
35 | }
36 | layer {
37 | name: "norm1"
38 | type: "LRN"
39 | bottom: "pool1"
40 | top: "norm1"
41 | lrn_param {
42 | local_size: 5
43 | alpha: 0.0001
44 | beta: 0.75
45 | }
46 | }
47 | layer {
48 | name: "conv2"
49 | type: "Convolution"
50 | bottom: "norm1"
51 | top: "conv2"
52 | convolution_param {
53 | num_output: 256
54 | pad: 2
55 | kernel_size: 5
56 | group: 2
57 | }
58 | }
59 | layer {
60 | name: "relu2"
61 | type: "ReLU"
62 | bottom: "conv2"
63 | top: "conv2"
64 | }
65 | layer {
66 | name: "pool2"
67 | type: "Pooling"
68 | bottom: "conv2"
69 | top: "pool2"
70 | pooling_param {
71 | pool: MAX
72 | kernel_size: 3
73 | stride: 2
74 | }
75 | }
76 | layer {
77 | name: "norm2"
78 | type: "LRN"
79 | bottom: "pool2"
80 | top: "norm2"
81 | lrn_param {
82 | local_size: 5
83 | alpha: 0.0001
84 | beta: 0.75
85 | }
86 | }
87 | layer {
88 | name: "conv3"
89 | type: "Convolution"
90 | bottom: "norm2"
91 | top: "conv3"
92 | convolution_param {
93 | num_output: 384
94 | pad: 1
95 | kernel_size: 3
96 | }
97 | }
98 | layer {
99 | name: "relu3"
100 | type: "ReLU"
101 | bottom: "conv3"
102 | top: "conv3"
103 | }
104 | layer {
105 | name: "conv4"
106 | type: "Convolution"
107 | bottom: "conv3"
108 | top: "conv4"
109 | convolution_param {
110 | num_output: 384
111 | pad: 1
112 | kernel_size: 3
113 | group: 2
114 | }
115 | }
116 | layer {
117 | name: "relu4"
118 | type: "ReLU"
119 | bottom: "conv4"
120 | top: "conv4"
121 | }
122 | layer {
123 | name: "conv5"
124 | type: "Convolution"
125 | bottom: "conv4"
126 | top: "conv5"
127 | convolution_param {
128 | num_output: 256
129 | pad: 1
130 | kernel_size: 3
131 | group: 2
132 | }
133 | }
134 | layer {
135 | name: "relu5"
136 | type: "ReLU"
137 | bottom: "conv5"
138 | top: "conv5"
139 | }
140 | layer {
141 | name: "pool5"
142 | type: "Pooling"
143 | bottom: "conv5"
144 | top: "pool5"
145 | pooling_param {
146 | pool: MAX
147 | kernel_size: 3
148 | stride: 2
149 | }
150 | }
151 | layer {
152 | name: "fc6"
153 | type: "InnerProduct"
154 | bottom: "pool5"
155 | top: "fc6"
156 | inner_product_param {
157 | num_output: 4096
158 | }
159 | }
160 | layer {
161 | name: "relu6"
162 | type: "ReLU"
163 | bottom: "fc6"
164 | top: "fc6"
165 | }
166 | layer {
167 | name: "drop6"
168 | type: "Dropout"
169 | bottom: "fc6"
170 | top: "fc6"
171 | dropout_param {
172 | dropout_ratio: 0.5
173 | }
174 | }
175 | layer {
176 | name: "fc7"
177 | type: "InnerProduct"
178 | bottom: "fc6"
179 | top: "fc7"
180 | inner_product_param {
181 | num_output: 4096
182 | }
183 | }
184 | layer {
185 | name: "relu7"
186 | type: "ReLU"
187 | bottom: "fc7"
188 | top: "fc7"
189 | }
190 | layer {
191 | name: "drop7"
192 | type: "Dropout"
193 | bottom: "fc7"
194 | top: "fc7"
195 | dropout_param {
196 | dropout_ratio: 0.5
197 | }
198 | }
199 | layer {
200 | name: "fc8"
201 | type: "InnerProduct"
202 | bottom: "fc7"
203 | top: "fc8"
204 | inner_product_param {
205 | num_output: 40
206 | }
207 | }
208 | layer {
209 | name: "prob"
210 | type: "Sigmoid"
211 | bottom: "fc8"
212 | top: "prob"
213 | }
214 |
--------------------------------------------------------------------------------
/CNN/CaffeNet/layers.py:
--------------------------------------------------------------------------------
1 | import caffe
2 | import numpy as np
3 | from PIL import Image
4 | import json
5 | import random
6 | import time
7 |
8 |
9 | class MultiLabelDataLayerWikiCLEF(caffe.Layer):
10 | """
11 | Load (input image, label image) pairs from ImageCLEF_Wikipedia
12 | """
13 |
14 | def setup(self, bottom, top):
15 | """
16 | Setup data layer according to parameters:
17 |
18 | - img_dir: path to ImageCLEF_Wikipedia images dir
19 | - mean: tuple of mean values to subtract
20 | - num_topics: dimensionality of LDA topic space, i.e. last fc layer dim
21 | - batch_size: ...
22 |
23 | example
24 |
25 | params = dict(img_dir="/media/DADES/datasets//images/",
26 | mean=(104.00698793, 116.66876762, 122.67891434)
27 | batch_size=64, num_topics=40)
28 | """
29 | # config
30 | params = eval(self.param_str)
31 | self.img_dir = params['img_dir']
32 | self.mean = np.array(params['mean'])
33 | self.batch_size = params['batch_size']
34 | num_topics = params['num_topics']
35 |
36 | # input data placeholders
37 | self.data = np.zeros((self.batch_size,3,227,227), dtype=np.float32)
38 | self.label = np.zeros((self.batch_size,1,num_topics,1), dtype=np.float32)
39 |
40 | # load GT from json
41 | f = open('../../LDA/training_labels'+str(num_topics)+'.json');
42 | self.gt_train_dict = json.load(f);
43 | f.close();
44 |
45 | # two tops: data and label
46 | if len(top) != 2:
47 | raise Exception("Need to define two tops: data and label.")
48 | # data layers have no bottoms
49 | if len(bottom) != 0:
50 | raise Exception("Do not define a bottom.")
51 |
52 | # indices for images and labels
53 | self.idx = 0
54 |
55 | def reshape(self, bottom, top):
56 | # load image + label image pair
57 | self.load_images(self.gt_train_dict.keys()[self.idx:self.idx+self.batch_size])
58 | self.load_labels(self.gt_train_dict.values()[self.idx:self.idx+self.batch_size])
59 | # reshape tops to fit (leading 1 is for batch dimension)
60 | top[0].reshape(*self.data.shape)
61 | top[1].reshape(*self.label.shape)
62 |
63 |
64 | def forward(self, bottom, top):
65 | # assign output
66 | top[0].data[...] = self.data
67 | top[1].data[...] = self.label
68 |
69 | # pick next input
70 | self.idx += self.batch_size
71 | if self.idx+self.batch_size > len(self.gt_train_dict):
72 | self.idx = 0
73 |
74 |
75 | def backward(self, top, propagate_down, bottom):
76 | pass
77 |
78 |
79 | def load_images(self, idxs):
80 | """
81 | Load input image and preprocess for Caffe:
82 | - cast to float
83 | - switch channels RGB -> BGR
84 | - subtract mean
85 | - transpose to channel x height x width order
86 | """
87 | for i,idx in enumerate(idxs):
88 | im = Image.open(self.img_dir+idx)
89 | im = im.resize((256,256)) # resize to 256x256
90 | # data augmentation by random crops
91 | offset_x = random.randint(0,29)
92 | offset_y = random.randint(0,29)
93 | im = im.crop((offset_x,offset_y,227+offset_x,227+offset_y)) # crop of 227x227
94 | if random.randint(0,1) == 1:
95 | im = im.transpose(Image.FLIP_LEFT_RIGHT) # data augmentation by random mirror
96 | if len(np.array(im).shape) < 3:
97 | im = im.convert('RGB') # grayscale to RGB
98 | in_ = np.array(im, dtype=np.float32)
99 | in_ = in_[:,:,::-1] # switch channels RGB -> BGR
100 | in_ -= self.mean # subtract mean
101 | in_ = in_.transpose((2,0,1)) # transpose to channel x height x width order
102 | self.data[i,:,:,:] = in_
103 |
104 | def load_labels(self, idxs):
105 | """
106 | """
107 | self.label = np.array(idxs, dtype=np.float32)
108 |
--------------------------------------------------------------------------------
/CNN/CaffeNet/solver.prototxt:
--------------------------------------------------------------------------------
1 | net: "train_val.prototxt"
2 | test_initialization: false
3 | base_lr: 0.001
4 | lr_policy: "step"
5 | gamma: 0.1
6 | stepsize: 50000
7 | display: 20
8 | max_iter: 160000
9 | momentum: 0.9
10 | weight_decay: 0.0005
11 | snapshot: 10000
12 | snapshot_prefix: "textTopicNet_train_Wikipedia_ImageCLEF"
13 | solver_mode: GPU
14 |
--------------------------------------------------------------------------------
/CNN/CaffeNet/train_val.prototxt:
--------------------------------------------------------------------------------
1 | name: "TextTopicNet"
2 | layer {
3 | name: "data"
4 | type: "Python"
5 | top: "data"
6 | top: "label"
7 | python_param {
8 | module: "layers"
9 | layer: "MultiLabelDataLayerWikiCLEF"
10 | param_str: "{\'num_topics\': 40, \'batch_size\': 64, \'img_dir\': \'../../data/ImageCLEF_Wikipedia/\', \'mean\': (104.00699, 116.66877, 122.67892)}"
11 | }
12 | include { phase:TRAIN }
13 | }
14 | layer {
15 | name: "conv1"
16 | type: "Convolution"
17 | bottom: "data"
18 | top: "conv1"
19 | param {
20 | lr_mult: 1
21 | decay_mult: 1
22 | }
23 | param {
24 | lr_mult: 2
25 | decay_mult: 0
26 | }
27 | convolution_param {
28 | num_output: 96
29 | kernel_size: 11
30 | stride: 4
31 | weight_filler {
32 | type: "gaussian"
33 | std: 0.01
34 | }
35 | bias_filler {
36 | type: "constant"
37 | value: 0
38 | }
39 | }
40 | }
41 | layer {
42 | name: "relu1"
43 | type: "ReLU"
44 | bottom: "conv1"
45 | top: "conv1"
46 | }
47 | layer {
48 | name: "pool1"
49 | type: "Pooling"
50 | bottom: "conv1"
51 | top: "pool1"
52 | pooling_param {
53 | pool: MAX
54 | kernel_size: 3
55 | stride: 2
56 | }
57 | }
58 | layer {
59 | name: "norm1"
60 | type: "LRN"
61 | bottom: "pool1"
62 | top: "norm1"
63 | lrn_param {
64 | local_size: 5
65 | alpha: 0.0001
66 | beta: 0.75
67 | }
68 | }
69 | layer {
70 | name: "conv2"
71 | type: "Convolution"
72 | bottom: "norm1"
73 | top: "conv2"
74 | param {
75 | lr_mult: 1
76 | decay_mult: 1
77 | }
78 | param {
79 | lr_mult: 2
80 | decay_mult: 0
81 | }
82 | convolution_param {
83 | num_output: 256
84 | pad: 2
85 | kernel_size: 5
86 | group: 2
87 | weight_filler {
88 | type: "gaussian"
89 | std: 0.01
90 | }
91 | bias_filler {
92 | type: "constant"
93 | value: 1
94 | }
95 | }
96 | }
97 | layer {
98 | name: "relu2"
99 | type: "ReLU"
100 | bottom: "conv2"
101 | top: "conv2"
102 | }
103 | layer {
104 | name: "pool2"
105 | type: "Pooling"
106 | bottom: "conv2"
107 | top: "pool2"
108 | pooling_param {
109 | pool: MAX
110 | kernel_size: 3
111 | stride: 2
112 | }
113 | }
114 | layer {
115 | name: "norm2"
116 | type: "LRN"
117 | bottom: "pool2"
118 | top: "norm2"
119 | lrn_param {
120 | local_size: 5
121 | alpha: 0.0001
122 | beta: 0.75
123 | }
124 | }
125 | layer {
126 | name: "conv3"
127 | type: "Convolution"
128 | bottom: "norm2"
129 | top: "conv3"
130 | param {
131 | lr_mult: 1
132 | decay_mult: 1
133 | }
134 | param {
135 | lr_mult: 2
136 | decay_mult: 0
137 | }
138 | convolution_param {
139 | num_output: 384
140 | pad: 1
141 | kernel_size: 3
142 | weight_filler {
143 | type: "gaussian"
144 | std: 0.01
145 | }
146 | bias_filler {
147 | type: "constant"
148 | value: 0
149 | }
150 | }
151 | }
152 | layer {
153 | name: "relu3"
154 | type: "ReLU"
155 | bottom: "conv3"
156 | top: "conv3"
157 | }
158 | layer {
159 | name: "conv4"
160 | type: "Convolution"
161 | bottom: "conv3"
162 | top: "conv4"
163 | param {
164 | lr_mult: 1
165 | decay_mult: 1
166 | }
167 | param {
168 | lr_mult: 2
169 | decay_mult: 0
170 | }
171 | convolution_param {
172 | num_output: 384
173 | pad: 1
174 | kernel_size: 3
175 | group: 2
176 | weight_filler {
177 | type: "gaussian"
178 | std: 0.01
179 | }
180 | bias_filler {
181 | type: "constant"
182 | value: 1
183 | }
184 | }
185 | }
186 | layer {
187 | name: "relu4"
188 | type: "ReLU"
189 | bottom: "conv4"
190 | top: "conv4"
191 | }
192 | layer {
193 | name: "conv5"
194 | type: "Convolution"
195 | bottom: "conv4"
196 | top: "conv5"
197 | param {
198 | lr_mult: 1
199 | decay_mult: 1
200 | }
201 | param {
202 | lr_mult: 2
203 | decay_mult: 0
204 | }
205 | convolution_param {
206 | num_output: 256
207 | pad: 1
208 | kernel_size: 3
209 | group: 2
210 | weight_filler {
211 | type: "gaussian"
212 | std: 0.01
213 | }
214 | bias_filler {
215 | type: "constant"
216 | value: 1
217 | }
218 | }
219 | }
220 | layer {
221 | name: "relu5"
222 | type: "ReLU"
223 | bottom: "conv5"
224 | top: "conv5"
225 | }
226 | layer {
227 | name: "pool5"
228 | type: "Pooling"
229 | bottom: "conv5"
230 | top: "pool5"
231 | pooling_param {
232 | pool: MAX
233 | kernel_size: 3
234 | stride: 2
235 | }
236 | }
237 | layer {
238 | name: "fc6"
239 | type: "InnerProduct"
240 | bottom: "pool5"
241 | top: "fc6"
242 | param {
243 | lr_mult: 1
244 | decay_mult: 1
245 | }
246 | param {
247 | lr_mult: 2
248 | decay_mult: 0
249 | }
250 | inner_product_param {
251 | num_output: 4096
252 | weight_filler {
253 | type: "gaussian"
254 | std: 0.005
255 | }
256 | bias_filler {
257 | type: "constant"
258 | value: 1
259 | }
260 | }
261 | }
262 | layer {
263 | name: "relu6"
264 | type: "ReLU"
265 | bottom: "fc6"
266 | top: "fc6"
267 | }
268 | layer {
269 | name: "drop6"
270 | type: "Dropout"
271 | bottom: "fc6"
272 | top: "fc6"
273 | dropout_param {
274 | dropout_ratio: 0.5
275 | }
276 | }
277 | layer {
278 | name: "fc7"
279 | type: "InnerProduct"
280 | bottom: "fc6"
281 | top: "fc7"
282 | param {
283 | lr_mult: 1
284 | decay_mult: 1
285 | }
286 | param {
287 | lr_mult: 2
288 | decay_mult: 0
289 | }
290 | inner_product_param {
291 | num_output: 4096
292 | weight_filler {
293 | type: "gaussian"
294 | std: 0.005
295 | }
296 | bias_filler {
297 | type: "constant"
298 | value: 1
299 | }
300 | }
301 | }
302 | layer {
303 | name: "relu7"
304 | type: "ReLU"
305 | bottom: "fc7"
306 | top: "fc7"
307 | }
308 | layer {
309 | name: "drop7"
310 | type: "Dropout"
311 | bottom: "fc7"
312 | top: "fc7"
313 | dropout_param {
314 | dropout_ratio: 0.5
315 | }
316 | }
317 | layer {
318 | name: "fc8"
319 | type: "InnerProduct"
320 | bottom: "fc7"
321 | top: "fc8"
322 | param {
323 | lr_mult: 1
324 | decay_mult: 1
325 | }
326 | param {
327 | lr_mult: 2
328 | decay_mult: 0
329 | }
330 | inner_product_param {
331 | num_output: 40
332 | weight_filler {
333 | type: "gaussian"
334 | std: 0.01
335 | }
336 | bias_filler {
337 | type: "constant"
338 | value: 0
339 | }
340 | }
341 | }
342 | layer {
343 | name: "loss"
344 | type: "SigmoidCrossEntropyLoss"
345 | bottom: "fc8"
346 | bottom: "label"
347 | top: "loss"
348 | include { phase:TRAIN }
349 | }
350 |
--------------------------------------------------------------------------------
/LDA/generate_train_dict.py:
--------------------------------------------------------------------------------
1 | import sys,os
2 | from PIL import Image
3 | import xml.etree.ElementTree as ET
4 | import json
5 |
6 | print ' '+sys.argv[0]
7 | print ' Traverses the dataset images directory'
8 | print ' builds a dictionary with images paths as keys and text articles paths as values'
9 | print ' (...)'
10 |
11 | db_dir = '../data/ImageCLEF_Wikipedia/'
12 |
13 | img_dir = db_dir+'images/'
14 | xml_dir = db_dir+'metadata/'
15 |
16 | if not os.path.isdir(db_dir):
17 | sys.exit('ERR: Dataset folder '+db_dir+' not found!')
18 |
19 | if not os.path.isdir(img_dir):
20 | sys.exit('ERR: Dataset images folder '+img_dir+' not found!')
21 |
22 | if not os.path.isdir(xml_dir):
23 | sys.exit('ERR: Dataset metadata folder '+xml_dir+' not found!')
24 |
25 |
26 |
27 | def get_article(xml_file, lang='en'):
28 | tree = ET.parse(xml_file)
29 | root = tree.getroot()
30 | for child in root:
31 | if child.attrib == {'{http://www.w3.org/XML/1998/namespace}lang': lang}:
32 | for child2 in child:
33 | if child2.tag == 'caption':
34 | return child2.attrib
35 | return ''
36 |
37 |
38 | # Traverse dataset images directory, and list directories as dirs and files as files
39 | # Build a dictionary with images paths as keys and text paths as values
40 | # Discard non JPEG files, images from non english articles, and very small images (< 256 pixels)
41 | train_dict = {}
42 | for root, dirs, files in os.walk(img_dir):
43 | path = root.split('/')
44 | for file in files:
45 | ext = file.split('.')
46 | if ext[1] == 'jpg' or ext[1] == 'jpeg': # discard ~30k png files (usually diagrams, drawings, etc...)
47 | if not os.path.isfile(xml_dir+os.path.basename(root)+'/'+ext[0]+'.xml'):
48 | continue
49 | article = get_article(xml_dir+os.path.basename(root)+'/'+ext[0]+'.xml','en')
50 | if article == {}: # discard images from non english articles
51 | continue
52 | if article['article'] == '': # discard images from non english articles
53 | continue
54 | im = Image.open(root+'/'+file)
55 | width, height = im.size
56 | if width < 256 or height < 256: # discard small images
57 | continue
58 |
59 | img_path = path[len(path)-2]+'/'+path[len(path)-1]+'/'+file
60 | train_dict[img_path] = article['article']
61 |
62 | with open('train_dict_ImageCLEF_Wikipedia.json', 'w') as fp:
63 | json.dump(train_dict, fp)
64 |
--------------------------------------------------------------------------------
/LDA/generate_train_labels.py:
--------------------------------------------------------------------------------
1 | import os,sys,re
2 | import json
3 |
4 | from nltk.tokenize import RegexpTokenizer
5 | from stop_words import get_stop_words
6 | from nltk.stem.porter import PorterStemmer
7 |
8 | import gensim
9 | from gensim import utils, corpora, models
10 | from gensim.corpora.wikicorpus import remove_markup
11 | from preprocess_text import preprocess
12 |
13 | print ' '+sys.argv[0]
14 | print ' builds a dictionary with images paths as keys and LDA space probability distributions as values'
15 | print ' these probability distributions are then used as labels'
16 | print ' for training a CNN to predict the semantic context in which images appear'
17 | print ' (...)'
18 |
19 | NUM_TOPICS = 40
20 | db_dir = '../data/ImageCLEF_Wikipedia/'
21 | train_dict_path = 'train_dict_ImageCLEF_Wikipedia.json'
22 |
23 | if not os.path.isdir(db_dir):
24 | sys.exit('ERR: Dataset folder '+db_dir+' not found!')
25 |
26 | if not os.path.isfile(train_dict_path):
27 | sys.exit('ERR: Train dictionary file '+train_dict_path+' not found!')
28 |
29 | with open(train_dict_path) as f:
30 | train_dict = json.load(f)
31 |
32 | # load id <-> term dictionary
33 | if not os.path.isfile('./dictionary.dict'):
34 | sys.exit('ERR: ID <-> Term dictionary file ./dictionary.dict not found!')
35 |
36 | print 'Loading id <-> term dictionary from ./dictionary.dict ...',
37 | sys.stdout.flush()
38 | dictionary = corpora.Dictionary.load('./dictionary.dict')
39 | print ' Done!'
40 | # ignore words that appear in less than 20 documents or more than 50% documents
41 | dictionary.filter_extremes(no_below=20, no_above=0.5)
42 |
43 | # load document-term matrix
44 | if not os.path.isfile('./bow.mm'):
45 | sys.exit('ERR: Document-term matrix file ./bow.mm not found!')
46 |
47 | print 'Loading document-term matrix from ./bow.mm ...',
48 | sys.stdout.flush()
49 | corpus = gensim.corpora.MmCorpus('./bow.mm')
50 | print ' Done!'
51 |
52 | # load LDA model
53 | if not os.path.isfile('ldamodel'+str(NUM_TOPICS)+'.lda'):
54 | sys.exit('ERR: LDA model file ./ldamodel'+str(NUM_TOPICS)+'.lda not found!')
55 |
56 | print 'Loading LDA model from file ./ldamodel'+str(NUM_TOPICS)+'.lda ...',
57 | sys.stdout.flush()
58 | ldamodel = models.LdaModel.load('ldamodel'+str(NUM_TOPICS)+'.lda')
59 | print ' Done!'
60 |
61 | # transform ALL documents into LDA space
62 | target_labels = {}
63 | for img_path in train_dict.keys():
64 |
65 | with open(db_dir+train_dict[img_path]) as fp: raw = fp.read()
66 |
67 | tokens = preprocess(raw)
68 | bow_vector = dictionary.doc2bow(tokens)
69 | #lda_vector = ldamodel[bow_vector]
70 | lda_vector = ldamodel.get_document_topics(bow_vector, minimum_probability=None)
71 | lda_vector = sorted(lda_vector,key=lambda x:x[1],reverse=True)
72 | topic_prob = {}
73 | for instance in lda_vector:
74 | topic_prob[instance[0]] = instance[1]
75 | labels = []
76 | for topic_num in range(0,NUM_TOPICS):
77 | if topic_num in topic_prob.keys():
78 | labels.append(topic_prob[topic_num])
79 | else:
80 | labels.append(0)
81 | target_labels[img_path] = labels
82 | sys.stdout.write('\r%d/%d text documents processed...' % (len(target_labels),len(train_dict.keys())))
83 | sys.stdout.flush()
84 | sys.stdout.write(' Done!\n')
85 |
86 | # save key,labels pairs into json format file
87 | with open('./training_labels'+str(NUM_TOPICS)+'.json','w') as fp:
88 | json.dump(target_labels, fp)
89 |
--------------------------------------------------------------------------------
/LDA/learn_LDA_model.py:
--------------------------------------------------------------------------------
1 | import os,sys,re
2 | import json
3 |
4 | from nltk.tokenize import RegexpTokenizer
5 | from stop_words import get_stop_words
6 | from nltk.stem.porter import PorterStemmer
7 |
8 | import gensim
9 | from gensim import utils, corpora, models
10 | from gensim.corpora.wikicorpus import remove_markup
11 |
12 | from preprocess_text import preprocess
13 | import logging
14 |
15 | logging.basicConfig(stream=sys.stdout, level=logging.INFO)
16 |
17 | NUM_TOPICS = 40
18 | db_dir = '../data/ImageCLEF_Wikipedia/'
19 | train_dict_path = 'train_dict_ImageCLEF_Wikipedia.json'
20 |
21 | print ' '+sys.argv[0]
22 | print ' Learns LDA topic model with '+str(NUM_TOPICS)+' topics from corpora on '+train_dict_path
23 | print ' (...)'
24 |
25 | img_dir = db_dir+'images/'
26 | xml_dir = db_dir+'metadata/'
27 |
28 | if not os.path.isdir(db_dir):
29 | sys.exit('ERR: Dataset folder '+db_dir+' not found!')
30 |
31 | if not os.path.isdir(img_dir):
32 | sys.exit('ERR: Dataset images folder '+img_dir+' not found!')
33 |
34 | if not os.path.isdir(xml_dir):
35 | sys.exit('ERR: Dataset metadata folder '+xml_dir+' not found!')
36 |
37 | if not os.path.isfile(train_dict_path):
38 | sys.exit('ERR: Train dictionary file '+train_dict_path+' not found!')
39 |
40 | with open(train_dict_path) as f:
41 | train_dict = json.load(f)
42 |
43 | if not os.path.isfile('./dictionary.dict') or not os.path.isfile('./bow.mm'):
44 | # list for tokenized documents in loop
45 | texts = []
46 | for text_path in train_dict.values():
47 | with open(db_dir+text_path) as f: raw = f.read()
48 | # add tokens to corpus list
49 | texts.append(preprocess(raw))
50 | sys.stdout.write('\rCreating a list of tokenized documents: %d/%d documents processed...' % (len(texts),len(train_dict.values())))
51 | sys.stdout.flush()
52 | sys.stdout.write(' Done!\n')
53 |
54 | # turn our tokenized documents into a id <-> term dictionary
55 | if not os.path.isfile('./dictionary.dict'):
56 | print 'Turn our tokenized documents into a id <-> term dictionary ...',
57 | sys.stdout.flush()
58 | dictionary = corpora.Dictionary(texts)
59 | dictionary.save('./dictionary.dict')
60 | else:
61 | print 'Loading id <-> term dictionary from ./dictionary.dict ...',
62 | sys.stdout.flush()
63 | dictionary = corpora.Dictionary.load('./dictionary.dict')
64 | print ' Done!'
65 |
66 | # ignore words that appear in less than 20 documents or more than 50% documents
67 | dictionary.filter_extremes(no_below=20, no_above=0.5)
68 |
69 | # convert tokenized documents into a document-term matrix
70 | if not os.path.isfile('./bow.mm'):
71 | print 'Convert tokenized documents into a document-term matrix ...',
72 | sys.stdout.flush()
73 | corpus = [dictionary.doc2bow(text) for text in texts]
74 | gensim.corpora.MmCorpus.serialize('./bow.mm', corpus)
75 | else:
76 | print 'Loading document-term matrix from ./bow.mm ...',
77 | sys.stdout.flush()
78 | corpus = gensim.corpora.MmCorpus('./bow.mm')
79 | print ' Done!'
80 |
81 | # Learn the LDA model
82 | print 'Learning the LDA model ...',
83 | sys.stdout.flush()
84 | #ldamodel = models.ldamodel.LdaModel(corpus, num_topics=NUM_TOPICS, id2word = dictionary, passes=20)
85 | ldamodel = models.ldamulticore.LdaMulticore(corpus=corpus, id2word=dictionary ,num_topics = NUM_TOPICS, workers=3)
86 | ldamodel.save('ldamodel'+str(NUM_TOPICS)+'.lda')
87 | print ' Done!'
88 |
--------------------------------------------------------------------------------
/LDA/preprocess_text.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import re
3 | from nltk.stem import WordNetLemmatizer
4 | from nltk.tokenize import RegexpTokenizer
5 | from stop_words import get_stop_words
6 | from nltk.stem.porter import PorterStemmer
7 | import gensim
8 | from gensim import utils, corpora, models
9 | from gensim.corpora.wikicorpus import filter_wiki
10 |
11 | filter_more = re.compile('(({\|)|(\|-)|(\|})|(\|)|(\!))(\s*\w+=((\".*?\")|([^ \t\n\r\f\v\|]+))\s*)+(({\|)|(\|-)|(\|})|(\|))?', re.UNICODE | re.DOTALL | re.MULTILINE)
12 |
13 | def preprocess(raw):
14 | # Initialize Tokenizer
15 | tokenizer = RegexpTokenizer(r'\w+')
16 |
17 | # Initialize Lemmatizer
18 | lemma = WordNetLemmatizer()
19 |
20 | # create English stop words list
21 | en_stop = get_stop_words('en')
22 |
23 | # Decode Wiki Markup entities and remove markup
24 | text = filter_wiki(raw)
25 | text = re.sub(filter_more, '', text)
26 |
27 | # clean and tokenize document string
28 | text = text.lower()
29 | tokens = tokenizer.tokenize(text)
30 |
31 | # remove stop words from tokens
32 | tokens = [i for i in tokens if not i in en_stop]
33 |
34 | # stem tokens
35 | tokens = [lemma.lemmatize(i) for i in tokens]
36 |
37 | # remove non alphabetic characters
38 | tokens = [re.sub(r'[^a-z]', '', i) for i in tokens]
39 |
40 | # remove unigrams and bigrams
41 | tokens = [i for i in tokens if len(i)>2]
42 |
43 | return tokens
44 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # TextTopicNet
2 | **Self-supervised learning of visual features through embedding images into text topic spaces**, CVPR 2017.
3 |
4 | L. Gomez*, Y. Patel*, M. Rusiñol, D. Karatzas, C.V. Jawahar.
5 |
6 | In this paper we present a method that is able to take advantage of freely available multi-modal content to train computer vision algorithms without human supervision. We put forward the idea of performing self-supervised learning of visual features by mining a large scale corpus of multi-modal (text and image) documents. We show that discriminative visual features can be learnt efficiently by training a CNN to predict the semantic context in which a particular image is more probable to appear as an illustration. For this we leverage the hidden semantic structures discovered by a well-known topic modeling technique over the text corpus.
7 |
8 | 
9 |
10 | Our CNN learns to predict the semantic context in which images appear as illustration. Given an illustrated article we project its textual information into the topic-probability space provided by the LDA topic modeling framework. Then we use this semantic level representation as the supervisory signal for CNN training.
11 |
12 | If you make use of the code in this repository for scientific purposes, we appreciate it if you cite our paper:
13 |
14 | ```
15 | @inproceedings{gomez2017self,
16 | title={Self-supervised learning of visual features through embedding images into text topic spaces},
17 | author={Gomez, Lluis and Patel, Yash and Rusinol, Mar{\c{c}}al and Karatzas, Dimosthenis and Jawahar, CV},
18 | booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition},
19 | pages={4230--4239},
20 | year={2017}
21 | }
22 | ```
23 |
24 | ## Pretrained models
25 |
26 | [TextTopicNet_Wikipedia_ImageCLEF_40Topics.caffemodel](https://drive.google.com/open?id=0B52HmBFhEpdCTldNYjk5TnFTTDA)
27 |
28 | [TextTopicNet_finetuned_VOC2007.caffemodel](https://drive.google.com/open?id=0B52HmBFhEpdCUElJVVZKU1FibTA)
29 |
30 | ## Training from scratch
31 |
32 | ### Set up data
33 |
34 | Download and uncompress the **Wikipedia Retrieval 2010 Collection** and **The PASCAL Visual Object Classes Challenge 2007** datasets into the ``data/ImageCLEF_Wikipedia/`` and ``data/VOC2007/`` folders respectively. Follow the specific instructions provided in:
35 |
36 | [data/ImageCLEF_Wikipedia/README.md](data/ImageCLEF_Wikipedia/README.md)
37 |
38 | [data/VOC2007/README.md](data/VOC2007/README.md)
39 |
40 | ### Learn LDA model
41 |
42 | Run the following commands in order to process the data and learn the LDA model on the text corpus:
43 |
44 | ```
45 | cd LDA
46 | python generate_train_dict.py
47 | python learn_LDA_model.py
48 | python generate_train_labels.py
49 | ```
50 |
51 | This will generate the LDA model files (``ldamodel40.lda`` and ``ldamodel40.lda.state``) that can be used to project new texts into the learned topic space; and a file ``training_labels40.json`` with the trainig labels (topic space probabilities) for all images in our training dataset. If you want to skip this step and directly train the CNN model you can download the files from the following links: [ldamodel40.lda](https://drive.google.com/open?id=0B52HmBFhEpdCZUM1MXNoR1h3RFU) / [ldamodel40.lda.state](https://drive.google.com/open?id=0B52HmBFhEpdCWDV6SXZGaDlYcWs) / [dictionary.dict](https://drive.google.com/open?id=0B52HmBFhEpdCTVJKa3hnS3VRVkE) / [training_labels40.json](https://drive.google.com/open?id=0B52HmBFhEpdCTDZTSDFMVWswNEU)
52 |
53 | ### Train CNNs
54 |
55 | Run the following commands to train the CaffeNet model from scratch:
56 |
57 | ```
58 | cd CNN/CaffeNet
59 | export PYTHONPATH=./:$PYTHONPATH
60 | caffe train --solver solver.prototxt
61 | ```
62 |
63 | Once trained, the CNN model can be used for image classification in two different ways:
64 | * as a feature extractor, i.e. extract features from late layers (e.g. fc7, fc6, pool5, etc.) and training an SVM classifier.
65 | * finetuning the weights for a classification task, e.g. using the PASCAL VOC 2007 training data.
66 |
67 | The same model can be used for the following multi-modal retrieval tasks:
68 | * Image query vs. Text database: First projecting all text documents into the topic space using the learned LDA model. And then projecting the image query into the topic space using the CNN model, and find the nearest neighbour texts on the database.
69 | * Text query vs. Image database: same as before but the other way around.
70 |
71 |
72 | ### Experiments
73 |
74 | The scripts in the ``experiments/`` folder reproduce the results of the experiments in our paper.
75 | * Run the script ``experiments/multi_modal_retrieval.py`` for multi-modal retrieval experiment on Wikipedia dataset.
76 | * Run the script ``experiments/voc_2007_classification.py`` for image classification experiment on PASCAL VOC 2007.
77 |
--------------------------------------------------------------------------------
/data/ImageCLEF_Wikipedia/README.md:
--------------------------------------------------------------------------------
1 |
2 | Download and uncompress here the **Wikipedia Retrieval 2010 Collection**.
3 |
4 | To download the original dataset (~24 GB) follow the instructions provided here: http://imageclef.org/wikidata
5 |
6 | A streamlined version of the dataset (~2.3 GB), with only JPEG images (resized to 256x256 pixels) and English articles, can be downloaded from https://goo.gl/jgQFGr
7 |
8 | Our experiments make only use of the streamlined dataset, but the python scripts on the ``LDA/`` folder are prepared to process the entire original dataset and produce the streamlined version (see ``LDA/generate_train_dict.py``).
9 |
10 | After downloading and uncompressing the dataset there must be three subdirectories here:
11 | ``images/
12 | metadata/
13 | _text/
14 | ``
15 |
--------------------------------------------------------------------------------
/data/VOC2007/README.md:
--------------------------------------------------------------------------------
1 | Download and uncompress train/validation and test data of **The PASCAL Visual Object Classes Challenge 2007**.
2 |
3 | The original datasets can be download from here: http://host.robots.ox.ac.uk/pascal/VOC/voc2007/index.html
4 |
5 | Here is a mirror in case the official site is down: https://pjreddie.com/projects/pascal-voc-dataset-mirror/
6 |
7 | You'll need the VOC2007 Train/Validation Data (VOCtrainval_06-Nov-2007.tar 439 MB) and the 2007 Test Data With Annotations (VOCtest_06-Nov-2007.tar 431 MB).
8 |
9 | Download both files and uncompress them here. There will now be a ``VOCdevkit/`` subdirectory with all the VOC train/test data in it.
10 |
--------------------------------------------------------------------------------
/data/Wikipedia/README.md:
--------------------------------------------------------------------------------
1 | Download and uncompress train and test data proposed in **A New Approach to Cross-Modal Multimedia Retrieval**.
2 |
3 | The original datasets can be download from here: http://www.svcl.ucsd.edu/projects/crossmodal/wiki_top10cats.tar.gz
4 |
5 | Here is the data with re-sized images as used in our experiments: https://drive.google.com/open?id=0B49_-atDNGyUaDA5SHJMaVIxNkU
6 |
7 | Here is the data with text files as used in our experiments : https://drive.google.com/open?id=0B49_-atDNGyUQ3dpQ0VBSGdtcWs
8 |
--------------------------------------------------------------------------------
/data/Wikipedia/testset_txt_img_cat.list:
--------------------------------------------------------------------------------
1 | 6d6ead4cf7fd78eea820ac94d101f602-5 7e214fda4b30c95084e94fbec71ebde1 2
2 | ff106428f695e8509f1e2a6f047a9516-2.11 ff106428f695e8509f1e2a6f047a9516 10
3 | 317532540adceb07d5e2ec62d5eacac2-1.4 64a8268936f12f78c94bbe71d7cd173c 3
4 | 30ad99bdc962780e9fab9002f9a93241-1 386938a1b8c457b45f08cbab1c0d09b7 4
5 | 1f8c305889264e046100b0f069d5993a-1.2 108c8967acbdcc385d09503b817a3a29 6
6 | 3ff4a5fb4f787be0a3de2cc6ce99a42f-3 309db8877c48b845549ae6f2da8d841a 7
7 | fd54901826df6cb0260a8682c9e0686d-4.3 4c2189e6afcbbc4a111ee677574496c1 5
8 | f9983935d2abf59bc8bb63203f07f25f-4.13 adfa5f32eb4c2a853b07e25d3e2c5f31 10
9 | f1308daef06ed03b86e39e96db3fb6ec-3.3 fdd6980e09127beb4516dd0441b3a76c 2
10 | b3bbe6e0c76675357c84c22e04afe6ed-1.5 0d3bb65b6dcefb7a4f080f263bcd6421 8
11 | 161da9e3ff6d804e321511e8c6f390e7-1.2 147f221a5954823ba935853fc166bfe9 7
12 | f9983935d2abf59bc8bb63203f07f25f-1.2.4 4823460da2ebcf114e784d415643c3cc 10
13 | b72ccc56e1f97bad7e578426f363feb9-4 cb3ff460f2092262d69a9d32eeda1211 4
14 | b8808a8352429137916a3cf21c4f75a3-3.3 60ebf6806b7387aa28d8766b8da41ec2 2
15 | 040d81bab2d70218daac18a0e7bd61b2-7 ce124652274de7ba0e575e3e7565fe83 3
16 | 3e8dbc9c7700b34acdf3a0a80c48ae10-4.6 4e82c22d20a8dd9c3ece4fede5e5c841 4
17 | 20aaf5ee3075fd441fb3a679a8177f77-2.3 559d490d4df3b8d0650321112bd17538 6
18 | c4233b0929e7877777f33026748507b2-1.8 75fcbab0f71c5933fe13069c8b15340f 5
19 | bae0e6b899bcecb46fc76e673c1ca877-2.1 a759731a6aa24a0e7c0d97ad8dcba8c7 10
20 | 26ac7d591dacc94faaf50eda76daea7b-6.16 5cb6d0ea12ad161c5ad667eac1e034dd 10
21 | dce4e4fb7e394b49ac6c59ec7cecf5a0-4 37796382f2be63bfde064787bca6d603 10
22 | a7ec14a0bb348b84b4aea5d1488490a5-2 f25ae40aeea99d45b6fe0c16121f63c7 10
23 | 4a4566696cc81c6053ec708975767498-2.3 28eada0b5c4cb2f408d581c1b416857a 5
24 | 570b86ec6ec8e1e2051c6ab59d19adb5-8 93ec8e4a477b0e10c11660aa7da71189 8
25 | ad1900cb874496bdf4d5f34c4f75a2ef-2 26b682326bfb67195f97d497ce231575 6
26 | c16fc36728dff580d2af90b4fe0983f7-5.9 e59b655c354ca1db27b4c0b37d7444ec 3
27 | 19d63963dbc21d95845f4fa5bdafb89c-4.1 efefca33bfa8c64c9f1ca18311536ea9 2
28 | c39584729495496984371f0ec2f38974-3 d4cc67f999d59dff00646ca79ed901bf 3
29 | 5aac98936b970830510f1facd2f3d956-3 feb0e27cf51de7abc9a4215405579e7d 6
30 | 700f481e683a0be686bf2e4f02a549d7-2.2 538bcdb3bea81d00be0dd0e4daa9d167 9
31 | 8ddce5ac766087aa7b06de57e25956bd-2 3f5a2a9443f0dab4cfc96a37b4685683 2
32 | c9f6c6f233cc03b31460bf345008a707-5 9605833a95702bad6eabde581fd0737f 9
33 | 5f04e33f68bc6d78ba943eea3dbf9bf6-4.2 b194c1f68ff5834c8d7fcf182e1a2ceb 4
34 | f36a03cc474d62b5f3007c7ea33e0b5a-2.6 1aa2ac6a54482116ab43f0d43185eb24 6
35 | d03045a8b81d71de2f05d81050f43b40-3.4 1b7c1bbb4b1aa627248d511602eaab65 3
36 | d175b36beaf2ca66b7e25b9a05d3dca9-2.3.4 2588262c06bedd60aba06cfd2b2452ac 8
37 | 230899921affee3f12387edba09920d0-4.4 7c533d802a9647af85888d526fe51b1d 1
38 | 2ef56eabe062fd860c81a0e44ee15549-1.1 222b058220504cbfe9ad8ab1889b2204 2
39 | ebe418ae757b121b0d636041981fc1d1-4.13 d155807c17ebe7fcedef60b37550904b 2
40 | 9db5265677ab7fc67158baedecb1ed5d-4 2bacbd075c8c3c5fe9f0c1d43631c8a2 8
41 | 5e268f76fcf80f6183f4db6babb7f2e6-3 2af58a934acab7901201933a625052e1 6
42 | cc3471ba62dc677884b143b8ba0c66eb-3 bae0a6721329a692a720996dffcddd61 1
43 | 2a2a1ea79f558bbe82c678dfe622524c-1.3 fe1d4e3855d8dafca32a10f81894eda1 5
44 | eb9557384eb5a12ac4e40e7bcf7d16d4-7 13b78d297876699b28d91b920e569dcb 10
45 | ebe418ae757b121b0d636041981fc1d1-1.1 99658b9b576a214ce1e81da1bb42437a 2
46 | c529f9144e9e576ada84e6f63a8efce4-4 30ea8ba0709e2f177e2d8067eb7e0645 8
47 | 55c40b8c6964e2d03b4c5fa2f597487a-6.19 2db862936583f5eee0b936631f6c7dba 3
48 | c7bc3e0955055325f7442933b4118f5f-1.4 f58c6066c84fd9e8201bb01f1362d344 9
49 | 321aa4bbead977296644b8151df9b2f9-6 e3876adde5b72345451a12762a163727 10
50 | b98d8e4be963ca017ec9ccb1513abff3-5.6 f1f8ee53d679f7fc34253a17d5f115ed 3
51 | 9649610dbce420e3eddd1b58072aa8b2-7.1 f310e644e9d6e438abeddc59fc74bd95 5
52 | 0ee6db21c61448f4cafd1af01bce109c-3.5 29880a22b61063258741d245322fa371 5
53 | 396fd1a668d97e3699cbe3b4c9cd76d1-2.2 4bc66a7eb7efd1a771d955b608c7bc34 8
54 | 84951c943faa2db40140ee004db22a25-1.3 c77cec853cb661bf3d74178746ca1a8d 5
55 | a6d6097fd19c757ced0bf10147e2eadf-3.1 f99fecf5f831318fc474848ad3d3d8f8 2
56 | 361fcdbbab8779dedc0dd32afb70a48c-2.5 686059554284916b7aa3835d27720bc9 8
57 | fc38dc0d3e6f7baec17ea368cc7c900a-1 58cbdcdc0c0b4a6ca2baffc5b031e3e8 9
58 | 1bc99cb2f4153c2d0d8025ee5575b2a0-7 668628229d5e22bf4558189ad6bdb0ca 3
59 | 45009b4a16ef03ba8bb5b15665ef4469-3 e6b8b1b5c6a9a31c2f40c80d2be01c47 3
60 | b6d348d347abf6bf79ee669f12acea70-2.1 f60047c1da07ad392e844f7bee80210c 10
61 | 320900763d954fed0bd83182584a34b5-5 7cf021746cde94f18b3817cec0892c8e 2
62 | c16745ceb8da51bba7131d5d0290f2dd-6 94f66bbfd000d05233e3ce2b6e921945 2
63 | a6111ef1d184fb6f317803c64ccd35cd-8 751a2f2dd3e9e0805989beb474031cfe 3
64 | 160359a56c4d86f1e69e21dec758ece6-4.1 d48cc8ef11d460b8e31fff8d7bc45e7c 10
65 | 09b31fe42f598f8d7cde7252bf01e543-3 7ab12fb4602e60acd2428d7ea6672d56 6
66 | e784121d25e1519ea27ea45d5df0fbde-4.5 a59b366c8db934c6e1207ea933114ae1 10
67 | 12471fa7b776e267a9e2817faf032e24-6 062318218f9d923c3e56bed1e3039087 2
68 | ad37f5c5cef207a7528ce960ac174255-7.10 c642a0ba2b703db47737c1b650466bde 4
69 | aa927d147a4db1c3524f0bbde5125a40-2 9ee2702fb979b9ca49aa9ee62e179035 5
70 | 15624eb02bcb55f2b42a360c31562590-1.1 2e95727f5bda87ad2a06d9ec11584047 8
71 | c16745ceb8da51bba7131d5d0290f2dd-4.1 d6d180947eb91a1f4c7398b3cf9012d0 2
72 | 196d340a2d76263ff1f930c3934a40df-4 2ed7d1bd9b577826061bbb30a1f9e75a 6
73 | a66156f1a171a341488eb0cb694cd06b-5 4621e887dd5099b052d189d3d0a56e97 1
74 | cfc4979249ade1b95b13a8e577f32c3e-3.1 c518c89ba41a26ed57f6c0c3790b7a40 7
75 | 756fe9011ebbc81c00d59fcd846ef2ca-0.3 62ebe15e4c24c0d4a9964baab854d97b 2
76 | 7ec724230401f4a030b6a752ef5749db-2.7 fa643cc80e0c08641d6b6dc26ceb2e2e 9
77 | 471c77c08089f480489ac878217fa870-1.3 d600c2f15f3b61cfbcaf338b7d9994fd 1
78 | a62df44f3769dde1bbe94b7959674ddf-4.3 33c7af9cd8deb4c4bccdb717f2d73595 2
79 | e1f1255ac70522fc2ed8b396b7bb8371-3 f91bd820b16070de68565fe95cc0e253 4
80 | 350b059e0d998f5c160ef579ebabe8ae-7.7 3e3920549e9df7814916e6772b628eae 3
81 | 0811abf5a5bc74534bececf59628f85a-5 2193afe2a3135b96aa92a734f9c8e1ba 2
82 | 1c7f6cc931869cc7c1cfb9c056b471ef-4.9 63f4bd111d275d4e1e547ef5f575c942 4
83 | 6f2b4762201dbf3584513fa72ff84dc2-1.1 1c63ad9652631a006c9706ab19215da6 7
84 | 3811976a7fb0681079c00c96760525b0-5 195452ddcbcfaff42ca6a615b403d75e 7
85 | 41d6793b4ba381f946ac84795946147a-1.5.3 3378a89578ce6f8b1f32c4fb860d1abe 2
86 | 32858813e360308c6fab954ea09c91b8-4.1 bdc245251cfd7a0b8861610b1f2c8b5d 4
87 | 7e912013738ebe632e69811ea23a5438-4.9 c55128f8aa7738df39d8670fcb73fc9a 4
88 | e5206fe002d74e25bb73d4a92edc2e45-3 e331939c04f4fd77aaf8d3a5e99d84a1 10
89 | 94517872ca8756e01371963760040e5e-5.4 58d1a28e37ce5b81bdc7d74966cf6a32 4
90 | 72215be866ba18888af2efcead7c23ec-2 f50d8e5f39abeda5f5025fbe25818f31 4
91 | 937a8bc3a5df97d25c9aa61d2425c173-2.1 3ed9bc0fa79e8f82be653ba8e487d481 5
92 | e53eed464b8de4afa2c1316404780adc-1 71273a050b6337985c323db7e5aa51a8 4
93 | e0db2bab271606aefbf9610588076f62-1 290f805c13f40105ce260eeab450aab3 6
94 | e0656490bbbcf643a78f7ad882a90ee2-2.1.3 5e40dc8f4b6f80ecd898a0eb74518655 10
95 | a25b2dff7d13c650e6c7e6bfb3bba5a3-7 575516bf4bea4c6b752766f30a8a6470 3
96 | 3a1acd1fe8b9c3dad8fa395a7ed61d08-1.8 9f09274d5e08102dd1dd9ec846a53bb8 6
97 | 59313e4e34dd1e529f945179fc0adb84-5 a34a598c0d1e7fa299ef338549eba242 10
98 | 654855e02c249c77120d0e72159fed90-7 d1d519370313b0721a0027f2b692b5b0 9
99 | 1b122bc3d03b8dbe7b57a3b59c8f38cd-1 719624c2372757020e9cae15df17c821 4
100 | d12c994d7395d84aecdd859f28e17b4b-3 f5a8c773ecf5afa4e0185247e1fb7f03 10
101 | c868914ed31a10c967ca501fb61fad44-1.1 4633441c4af88b70c8be2704b28d90b2 7
102 | f5fdc33803f448197e795e5cd6de7eab-2.7 74a346fe7b352930841cb60f2cd130a7 3
103 | 44b439ba9975f8e802b456777c54a854-10 1b3ea383ee52a0f1e166195743096b77 3
104 | 82175cb2cc39a1bd4d1b8e20c79c4155-2.6.5 07eb4540c2aa683fdad3b3c43aee979d 5
105 | a9d9fe90d37d82ea843321c9440b67aa-4.5 0b4ec47cd3396768ce003fb901322b63 9
106 | d0a1b77240d99319d899b6e8f73bcf2f-2 aeaca49e6af13a560ca126548f8b8e9a 2
107 | 756fe9011ebbc81c00d59fcd846ef2ca-1.8 a238f4ee42c379e97d744331160e133d 2
108 | b02710d7581ae2770e07ec4949f1d256-2 b7fe4c11e31be75e6decd84efe26dbd3 10
109 | 3e8dbc9c7700b34acdf3a0a80c48ae10-7.9 71e26255ac7aa0f2480574f2be549da0 4
110 | 74625027de86d75fb7854b96140e90d8-10 57793a2ee2518a7e5618b366db0b1a49 10
111 | bff2b440c55cdbd089ee7077567e0603-2.1 8dc3df69377e5c09821a53dd5d0b5ea8 4
112 | 4a25a85c4830bcdb6731ee6bf3b1ceb4-1.1 da96147d69c261b2bcd7d0b33eb27477 4
113 | 73ac4cac6961fa8744ca2937a97b810b-2 f49b0e3b72ae02ad1214a554c56ef6a4 9
114 | 71f351984f1c1ed12d4db4c38e6d15f0-1.7.19 a0083dbbc3fbf4c37ddb72bb4e7f3a9f 4
115 | eef8328ece5ff33ca6cf0c6ee0c91c92-4.4 9d19f3fd02a3e0e664b6201806fc9be1 3
116 | ae407441fc460b133232a81b98fc1df4-3.6.2 f41f51a6a6966af038121013ef81aa71 3
117 | ce370f7c7e2457bf172d891f9407ee2d-3.3 2a5c0c859b13e0a9535a6bb8a4de6f63 8
118 | 0c8667ae0f48f6e6984ca47b005890de-2.9 4810c0031bb7388be4cc28b358681d76 10
119 | f8bdeb99131b2a79ab10023c83f80a7f-3.5 08f403a63a4e19adb1fe574ae5e8f677 10
120 | 1c174cc1abdbf7e474cce05a59465c2d-3.9 8c4b6d3071157e57fde51226097fb913 6
121 | 848a7fe4308208dcc81794c133f2c0cc-4.7 f1a8d308c5c21403f0fd3962f1feb0f1 10
122 | a6366153ccc386884404a48c882a5ad4-4 227d0ff584907541bf26b88a264edad2 3
123 | eccf75893dd661f16d44b15c8f65a0a3-3.4 d31979dc62b912934ed4cca009280d9e 1
124 | 507f3aa1e9bd0536fda39bb8e2e15118-5.8 dd14c81cf8ff13e5fa6f9648d996a684 10
125 | b37a101c0006eaca4d4a18520623e47e-4 a33564d14bdb6ce0382031a545b371ba 3
126 | 5188f17b561517f7d720197b074d4617-2.1.5 79478ccaf656bead7d7b162f6f027f0d 9
127 | 8ddce5ac766087aa7b06de57e25956bd-4.11 7d89bf2c8e21cfcfb926169157933319 2
128 | 3de4f35d8a989025d7611f8e57f2ea4a-3.4 9363e5aaa5d28105e543fd760bbf93b4 4
129 | 97396873b31037b0370923527843dd80-3.2.8 6a670d6d0280fd45e4af75e6d8602bbb 10
130 | 1372c6e051d95a90adeb2d31ff003554-3.4 251cbed562e4c0a6af2c35bb52a27dad 3
131 | 325dc920b2a0491e91aba4c256504c4f-3.5 017803ba7a477526d2db11c71998f127 9
132 | 16a968cdb906137a952161789b5c1ea3-1.6 3edfc9c502a4cbec61acad7a41916d4d 7
133 | de8dbff8cde950770af850c8e547de69-3 3f7077ce964db556178d7806b932d17a 9
134 | 70780355e478c7075444edba707c92b1-5 0a2840192e91f18a0fcbe1c5deeda57b 2
135 | a79133abe3132caf9491427873dc9560-4 f945e3aff1b2d137bc173ac68ee7da14 4
136 | 4d9405595edfacc475db88d5cbf58fb3-3.6 48258fda617d31bd49071f151852a083 1
137 | b2f45b3643284cccc13534a099ec0866-6.4 08382ea41628a19156fe89b600fff27a 5
138 | 3ff4a5fb4f787be0a3de2cc6ce99a42f-1 dc779c3ced297d8e527ad8e3cc0ea351 7
139 | f8ec59b71981dc5999c300e9395444bf-2.4 9f54319933988395ab6e241d70c77f09 5
140 | e2d6b5ad8b42586267b4c468fca715bd-5 3113a9e79edbb461bab392339ab3bbcd 4
141 | f98a8f5d28b9c13d11a48e424b7af28c-2 fcf79955418d284102394d8dfeafc7b6 10
142 | 45af5b1cface02ee25496df285e94ad5-1.2 d36cb073af9ea568181eb86199fdad1d 1
143 | 0ab876cd182527b839573064c70a8223-8 7b8c76ff55c662bbf638579d9f5938bb 10
144 | 756ea971c47ff2c6f7409444fcec1c09-3.4 7a1853e62fccaa0327432a3f42f624d9 2
145 | 56754c98f49c04325f506991a7bab3d6-2.5 edff5b5117f8682d52489b9584e0ba4d 5
146 | 1136c2ab5563b433db89dd8bc01418d1-5.11 6c9b6d6d566babc97a73f738d525bc3d 9
147 | 380587d895b823cd34d5396441aafe94-2 2063fc8f1f40049918d7ca072157ae83 10
148 | 5f0015ad7dbf64194a6e97e30ab83720-4 5c17689575fbf12c536cd1b3c4b5f724 8
149 | b846373e48cf002363110ff72973f24b-4 45b4cb2abb068096be25b6b54a90091c 10
150 | a2952754ac4942cf7ea5e22ac9fbe80c-2.7 9972db8ade003ed753e632e697d8086e 6
151 | 8a3d167afbb4e753445afa4e2b4cc7b8-2.5 7ace66aa7393055ac89e9575ee3d9e91 10
152 | 1a24a2fbbfbfc8a5cda39493a4405ac4-4.4 a9f0e519829be051e0092e0565b9114b 9
153 | a0bd4962d01f0c5a6338363a868b4eca-8.15 6d2c1c72c4fa12cf51222cc3ae5bc910 2
154 | bae0e6b899bcecb46fc76e673c1ca877-2.3 aeca48d62209b656d490a392423406a8 10
155 | 4f46dd0bedbc68c91be78f75a8be118c-5 d11e80b90c82624ee4e98017211a3289 8
156 | f6edb499d71fa9ef08d26469a49e4681-4.4 73b421ce565560769f73e31df774d430 2
157 | a14ea9e131acd1625d09958d147a0baf-2 4389aa6a6b47558a7c6b61b2ea514c6e 4
158 | 53b28fa81ed88da3ee7b47b7979bb9fb-7.4 61933019fbe41d75eb96b1b31a87bfce 6
159 | 254505d8f78808ba3bd75c96f7f717ac-6.11 382aa4feb91a84d20cb61446455af656 2
160 | 04f5e44fc8292749d066537fa2438b24-4.4 a27f183bc166cfabf1c88662e9f98a52 3
161 | 848a7fe4308208dcc81794c133f2c0cc-2.1 a5f23b7cf50c457506f94eda86d83e0c 10
162 | c0008d92a65249fa11a7bf1e8e758b85-2.4.1 79b1a30ccb7fef76fb3c9d143a8b5410 1
163 | 6d03078e24dc3247b2c0c64fe7a139f4-6 e67a44c677d796ee9b38190f64e3ef40 3
164 | 2e4489966c79adaa91cee52f59a7c50b-3.5 b349136aabb9c0238da2bf8437623eac 10
165 | 9ca6f5bdbe9e94435985bfd81d63e8b3-7 7c19e7d47f3960cb3770fb103d7f2b00 8
166 | e68d2e2ad132f935494408bbaea13273-3 404a67717de1036be5cf1680d528730f 7
167 | c4c11b3283ba22e4508789cd656d7b0d-6 e70f34aa92e41468d1f75729eae0a84c 4
168 | df8f6df05c6d12dc8e050c570f14eea8-1.4 35f13083023215602297e11016b5a1fc 7
169 | b8b909a7d6d91b8e1db6aa48220dc191-2 1f1821cae618e3a07d408d6db3d1646c 4
170 | aab8ba8d49dcab281f97186deaa805a4-1.6 dc2f9fa97992f99e03099eb5344432ae 7
171 | f82c7682b284ffa37fdcd1bc429a35bb-2.1 49f6103ea1f6e18753512c4a6b135946 9
172 | 0b70416f7bbf38c21234300ba2ff90ae-1.1 70755036fb392cf6da69f8072c2aa6d1 1
173 | 5f10ff39392fea90cd7a810a10c6e818-2.2 730c8cdcb6deac9ebe82e71bd222d39d 9
174 | 6e1b71c30173b94e73b522e42db5ee82-4.3 0d242835bf638881f011e6280fd50b8c 4
175 | 4c54c163f43d0ac8512df032b3b07bff-2.8 962aaedae6beff12f0e2de612713276f 3
176 | cdf5e094a57f4c677abf8ec5eb1dec4e-3 6948019ef00d5f16e48271d1ea300888 10
177 | 1f8c305889264e046100b0f069d5993a-1.1.2 d7b006186c2f28bc41c30eb8a4163a27 6
178 | 733300b3a45446bcac5ccee1836c2b26-3.6 7106ef0c57acfcd2f16922948ce5e3d3 9
179 | f9983935d2abf59bc8bb63203f07f25f-1.3.6 2eba5a598fcbc5cdd75b0a52d9da8613 10
180 | 1069c82d37b07064ccce1142c2c908d3-2.5 fec5960f04cdd46b8421924b3fcb20c4 3
181 | f1d8deb2f01716e709aba61388c2e7b7-5.4 11984bacc7f55bbbfdef5f6724376d36 2
182 | 5c5cc66c9e0cff64ac88b31c457b9809-6 45fc109d28d8c0473c5a6653e8a145de 3
183 | 75c3c4d04cdf63f677758a7637e65be1-8 47242cd5e9eb0046044f9b129ea27c3c 3
184 | f2c2350d0f017ab0074b4e50633af682-11 8731ddc7f3f8aa054f02163e97060d53 3
185 | 70dacdd695fa0f06c096655ac1a5ed35-7 e904b7d9f8a0f8f583a42d2bb45fee39 7
186 | c407db96a89986aa5f4a52a3e2f00686-5 701c5089e083a598667159f1133e152d 6
187 | 31ed719daa37b297341a98fdf3bf31ae-3.10 bfd42a5b6315b2462e9f036bbbb595e4 10
188 | 4b055af79e05167b41460443de65e574-3.4 14b6039c334a582e171f9f02a9db50c0 9
189 | 3ab1ef3067dc08e67d0dcd4273754f0f-8 ef3727bb78de14e2b242f6c31e75d938 6
190 | ebc99fb8d80a280b534450affa7086c6-4 15e9c503cae45311094c5ea03d06c11f 3
191 | 9c76a07ab62cceeb85527b63f8b09800-3.5 0f6d3e1bf806cd202ad5a65e13c747a7 4
192 | 8eb55bad0c2703126e6d4575db9f8f81-5.5 5101c4c141f74206d279dd093c7e70bd 3
193 | a8d02ad8c306be96bf8e882a813784ef-8 d4d0eed17b51dc385da619c70d1b8e80 10
194 | 970eed35cee1a20ccae4e529b5683276-1 824352908817b7343fd53c01fcdd5e1a 5
195 | 70dacdd695fa0f06c096655ac1a5ed35-3.10 b588187173c459328ccecbb660c89e25 7
196 | a09df49ba116b24b71300c107b642814-1.5 f75b0b113acbd5ae677431d09f31c42d 7
197 | 159de799c0a47c698b9416a93499e833-2.2 2e9fbd7a704050f21be65e1f77879f80 2
198 | 74625027de86d75fb7854b96140e90d8-7 88763abff633e64e2adbff94dd7367a8 10
199 | 985118344ef3adb3e3806ce6694da609-6 4629721531052c5ef6a5254670c6d60c 8
200 | c16745ceb8da51bba7131d5d0290f2dd-3 76a2a7285a33a95dc9deb3dd34da36fc 2
201 | fe895e20f843e10790adcf56e7138235-2.7 5fd45146fd163825bd1b54f5d7772f98 1
202 | e1b6d6708e1c202c3bb139fd88cf51e8-3 18003c7763d744a0db2fcaea80f0244e 5
203 | ed91d05e7bd45934c7ffeb8a2aa21065-4.2 52c85c28c55f09695fccadce6fb9c281 2
204 | 26014846a9bf8d6f8d62e0c74cb3a70b-1 7f3b6a8918e5db34f00910df197b8644 10
205 | 8be4d536659a057935da0a90e28e7dfc-1 39907eba37c7fdba9d8a94dd8792f52f 2
206 | d15d7ea67999319eb893420dbae4226e-2.6 82e0f54ab166927a83e166ebca489b72 8
207 | c868914ed31a10c967ca501fb61fad44-2.6 6af3b40c4fcfb279b166f5d1c513d418 7
208 | c4f1d24899f46e19ee21819f3e56b7c3-5.4 c5f33010f84f5533b1c7bf7970c800fc 3
209 | 180adc65eb45e63b890a6998d9b5af31-4 a1d03a2c884d0686db5c4c965c18e639 9
210 | 2693d447fb83aae6033fb4dbc155b01f-4 0b0a1a85c3b9ab6028c14d20e1101dc0 9
211 | 25496eac695f6264950d9d50df1ea270-4 05b4c017a59d4587719949e0c3a85a06 3
212 | f93e9c8b752ce4805336dc1bd5e4a52d-1 98dc58e46b882a0d1681925462ea4431 3
213 | db8eacdbea2bd08ff85e82015abf7465-2.2 a549772622a2d716deb04d1345308467 10
214 | e4c792a4b6681bdee4e4f940376294d0-6 af209f6bbd61d7406e2797019f73e29f 4
215 | 92416f269ed8cfdc68a4ba246c06b939-6.1 cb6ed68348bf8bfcc3337cd0d3a97983 3
216 | 6188d59e537ed86d9e2b0199c6abbd72-5.5 0b00f7b80fcb355cdd50cd721148adb8 2
217 | 784da86c783e62cd0d66306ceae713e0-4 282a7ce52861239657c9e4817e412db3 6
218 | c412e1ec8d2bc397f4fa0db5579a0368-6.3 b5df1c9c1a634ab96bdb5315ecadd9de 3
219 | 7068d3a40f59460ca6ecbe36750f169b-3 fe0e6584d1b09a5f48d53290332318d3 10
220 | ed251ee523d42bae9c4d3c887e75fc7a-4 354b0d001d1d8049cc8a5eace438d79d 9
221 | ff106428f695e8509f1e2a6f047a9516-2.10 1de9b70f928a295d2e510d0319dfd58f 10
222 | 733300b3a45446bcac5ccee1836c2b26-2.3 ece4ea20fc0c6ff70ecb559808676cbd 9
223 | ea8fa10412c5e778e8117bd577cd5544-3 9d95ebecf1ef279fa4d8f9bc1cec7f0f 2
224 | c4f1d24899f46e19ee21819f3e56b7c3-7 cef417d677c89b867897c36484c27baf 3
225 | 80b9b49b9ce9711acc51f6515426c9ed-2.6 a6e43d99bc60a6a780417ed36d8b5bc9 5
226 | 9098410a05683f8373815299ffd2889e-3.6 7e1210171d83cb0d283c1e8de1dd12e6 6
227 | 70dacdd695fa0f06c096655ac1a5ed35-4.13 c001d78f05039309b5a55b88f504557c 7
228 | d0a1b77240d99319d899b6e8f73bcf2f-4.1 e6210363d774e9f779b496371f8d5a39 2
229 | ce370f7c7e2457bf172d891f9407ee2d-2.2 fce73911f0afb7ee68f5d287bf5476c9 8
230 | 6a4eb1890a3ac1dc71cf1d48a0e7ac8a-1 640188b2d17b9d7ff775781013833e16 1
231 | 200b0491fda29647e3eeb0a0c0c0deec-2.8 2f900119ee94394dab357af7291f345d 4
232 | 1bc99cb2f4153c2d0d8025ee5575b2a0-10 f69a68194072520f1957b512b33763a3 3
233 | 4cb8d1c251d3436a4abbf1df9d300bde-3.5 485d3b29733cd999ae67dea0fa04068f 9
234 | 70dacdd695fa0f06c096655ac1a5ed35-5.18 61837b93a0070cc576e6bea7d389d591 7
235 | 4ba89b2fc68d6ab4f37feb53eae919d2-2 c44f3e2efa068f6df33dfdabf9c69aa5 4
236 | b611d063437ed2de37351e8ec76ee3e7-2 9f17e7fda2d9f3421580e53e741c04cc 8
237 | 55c40b8c6964e2d03b4c5fa2f597487a-3.8 7668c661cadb43afe604273e20b13c71 3
238 | 187859a255197631a4d518fd62315e4d-2.7 245fa7f20b6a9bd107c9f3137bc98dcc 8
239 | 3e806e6fc909d3dd16314489cd1cf542-2 b863388417112efc688ef6df688169fc 5
240 | 4fabd0ceb387d5ac14d3a1eefb2e7281-2.5 fababc22283ccd08d1a631e76dd01e9a 9
241 | f8b863bd0862467214aab9dfd55543ac-7.8 6fb5f5c33ec8e7b5a158b6bc786f5d13 6
242 | 640cc17f18dde329a9c8e037982aa344-2 47f1be527023541daaa0b270af9f5177 6
243 | b50559ebd1e407ddd42671bef4012e9a-2.2 f91c643c1ee39a421291ba05015a0c37 9
244 | f2c2350d0f017ab0074b4e50633af682-1 fea64d71f4fee5b4e7e7f6a1b910d45c 3
245 | d4ca4f87da296f410bc1405b724aa842-2.4 1d63db8cd5b96d0775a78361580ef9cb 4
246 | 681830061f34470d2a957dccaf39d154-2.6 3c224c9a8ad08c84f336ab93b4226645 7
247 | 3e5bcd062df2a216eefb2011a4c29d3a-6.7 27b9bca52c5f9078bfa8bd247c5f930b 5
248 | 0c0e6db1923bb73b6400a3d7ed6ace9a-1 1bc271c4fe060e8980258d7dc86085d3 9
249 | 54474bf87b0281e5b897518789fba114-8 84db20608ccdd190d1d54647570cf15a 8
250 | 23f1c01b206b2e887b9935b64df18240-4.2 23f1c01b206b2e887b9935b64df18240 8
251 | 56f5eaf7cc5b148b0dca3372588f0a98-4.7 28f80e439fa4881db542d2871a6d166f 2
252 | 09b31fe42f598f8d7cde7252bf01e543-1 835b97d34de702727d744e09044cbdda 6
253 | c6c0c0ebfe08377e850437fb2adf1351-5 caf2ac1b3ea76a7fe0c8dcc222a831f7 9
254 | 4c1c270666933dbd47ce3ac089c0d63a-4.3 aaeba354857312d8c6e417cf0f77ab24 2
255 | 784da86c783e62cd0d66306ceae713e0-3 5fc251627d830a911ef79ad3ae91d619 6
256 | ae18e1b5d3d98c3d561bcdd43dce6537-6.7 5c39eb62c22441fa5e3ca88e893088a5 5
257 | 2c8c4a9769e370c19143ea9e0d58a32d-1.1 4910519bb1f16cf9f11ea241afdd893d 10
258 | bde65360c546ebf6f662041f1584b3ab-6 0f5d3360702b4f57adbdf29d369e46d3 5
259 | d48813041376add08c9e7251d3b1547b-3 2ea97c8d07f6834e0d5eaa197d4ca231 4
260 | e0e615fb5f0cc926ce63debe16c82b30-2 4acfaa2a4698b3b267dcee0e8d9f781b 4
261 | d8b00929dec65d422303256336ada04f-1.3 41bae20e57727db4cd32a93b34fd3fba 3
262 | e8f64b142074f8d56456503a41caded3-2 4bef63e4deeab00e104771854b20770e 9
263 | b8808a8352429137916a3cf21c4f75a3-3.4 5aeff2af32e9bc25c82e385c193eb4ec 2
264 | 82175cb2cc39a1bd4d1b8e20c79c4155-1.4 ab5c6ccebc1280e354d8550082643dac 5
265 | 969730fc679ebe5aa50e263392331c94-5 8f00ef9228e4b6a28d1542992cc72656 8
266 | 0bea422cf6b12f623ca25749efc42c4e-3.2 768de4d7433b4067d923004a34fc8465 5
267 | c0008d92a65249fa11a7bf1e8e758b85-2.4.13 c9e4c39f818ec09d4d6aef0d18a61ec0 1
268 | fab4b82dcf65dbf68da99e994eecd5ce-3 234809111a0b0bc1cd40a588af8b9db9 3
269 | 5927bc35ce53b83946c55ee95b63da1f-5.11 d9ecc45683fc420be6c2dd5c76b1851b 8
270 | 681d9191dcea58f4c270203455cfe13a-6 e979c12bbeb963fef60c63b33fac1ace 9
271 | 800ffc07db0a675583c2f5cd13cef791-2 bd8221acba1d63f78084631d4b1eb876 8
272 | 3da1f195057fd715ec745a4010b519bd-5 ea4ff9b269858b0163b0d643581877a8 10
273 | 8a3d167afbb4e753445afa4e2b4cc7b8-2.6 7d4ecfb94d6e17272a72dd1ebe70d068 10
274 | 1b23df173de2bde33e6c445928329539-3 f22927f86060e28b3169df0dec650364 1
275 | 54474bf87b0281e5b897518789fba114-7 2f32dc0ab745701bf4fc9d6b486c22dc 8
276 | 4e1fb86d435ce0ddc629de9c36c00080-3.2 9dcfd30886d86b417bc10d9939f5c754 6
277 | e5f12b060fd818bdd45ab879af230690-1 2087269c5925ccc1355a5d2f32e7a229 4
278 | 79411a31129c20e7ee9eb6aa838cb664-1 c022dd5e2e302d0567f7e839e32b94d7 2
279 | e469235aac106888402bc8dc042e915e-5 4be3c49a3982045b6dc5d37954207efa 10
280 | 681b873f7f4353f8bcb0feb7d51111bc-3.6 ea33fe01c2e68d3c0a57f1845109b370 7
281 | 35f60409d861728f423fd78eea34dff4-6 b132c14508127d002ae71c5650356a47 8
282 | 22a491f82ae4ba10c4423a875ea2fd9a-4.5 8aac0a9a4f2b6af9fc620056f078006f 2
283 | 445d337b5cd5de476f99333df6b0c2a7-7 e0d0cb42e9ea7803c2f2dc3dcc7bfc97 3
284 | f615455cda36f8afe043d888634bb999-2 1f440edca652b83c525e89487845192e 7
285 | 3e5bcd062df2a216eefb2011a4c29d3a-5.3 d53ed22cd42bfca599d15c91ae12d91c 5
286 | 3f5c1f8ed20759bd1506e8b54e7d38e0-3.2 0f1088a4a678bd09eaf703fb698e4789 3
287 | ff106428f695e8509f1e2a6f047a9516-1.2 83cdb03238a658503ec538ae40110412 10
288 | f0e703d20bd18ec26ceb07a9ab1d986c-3 64d39d8f031312f2d5fa90ec9cca1f6d 4
289 | 30fb0e1bf86eb14bfe4ca00dd29c61d3-2 2d9520731d479c556fb915b1c9cd4c82 10
290 | 8ea76227a9cfa9cd95d9a57544ca4886-1 5b5d258c47643173b1e6e0ba06e24184 4
291 | 048a135264054d9c6829f8e4157397aa-10 c6146bfbb015b65ea2e69cc879e79ea2 6
292 | c1d96f851b917db3a34598927f632aac-3.13 006935571c4f0f8f1babc25d54fe4e21 9
293 | 6e432d09e024efd42e1dbe14b5cd795a-2.3 5e29c01c5a9a9048d30eb698b0035bd5 1
294 | 63db44496d352f4b59a88542e7fd2498-1.2 ed665a3eecd1542f2d798d9124325b94 5
295 | b698ff8744eba17a13e6e2b022fe38e2-1.1 ed533c3d8778c8c02b94ea9a2d882555 2
296 | 927ccb87605fdbc73a6898f423cbc96e-4 c5442585634cc81c2d9da63a164a5033 10
297 | b9b841cff6c543df5bed24222b316230-6 6fb3455f0582f7c1b2ce98fb442b4fa1 4
298 | 8bd84a5663be440d632d3ba1ff917cf1-2.4.1 58fee759b7366c2c051d4718902784f5 9
299 | c9c5ff5fde678f2fdd78b0da24ea3494-4.8 45209b81a27d952ab4b55605e07c057b 2
300 | 68fd6d945eeed458a81d50fb76119953-2.4 02d1194085b29a6b28c05054fc8c94ad 9
301 | 6a448be8ed7a0156939c90a4a1f7d559-3 c14ab07c75cf1719387a9d9483ab1d8d 4
302 | 058a209eb073b5ee7c24a5c99d3efb9d-4 6b462e56e64f8dc1bbe51f56de379c30 10
303 | c75d192902a965859f7f5dfba4e5396b-3.3 5ccacc6b69a7719b0b0076a2209e6ecf 5
304 | 74625027de86d75fb7854b96140e90d8-21.1 1fcbb5a68eb924c3401f6fd29945eb01 10
305 | e0656490bbbcf643a78f7ad882a90ee2-1 fe4d46d2c0a7b86f801be1cd2a0bb21f 10
306 | 71f351984f1c1ed12d4db4c38e6d15f0-1.5.13 93cc721864d399352f1e5f026c62a672 4
307 | 8918c5402b82d897801a43959bdbb02e-4 eb37fc48db534c0a917bef6e74a2b9ac 1
308 | 56754c98f49c04325f506991a7bab3d6-2.2 813aae6e5385ea4c58080259683dfb1a 5
309 | 9b80a63df52f88f7cc11b9b2649d3df6-1.2 c959dd9dbb2297e4dc20636abdafc879 9
310 | 75c3c4d04cdf63f677758a7637e65be1-7.8 36cb4576de48c4b8ce863046829b1c38 3
311 | 2871d9c0be319d4b8ff139a1fed8a12c-3 e0415ed8154443ed6bcfbbc3e26a6593 3
312 | 090cd4ad469e9af440f17e6e2a6a81e4-4 065eb5159f4f1e90c540e52d4e248b80 6
313 | 53120de70f8f70f5d6292dba67041d6b-1 c2b85c7c52e444858f0a4adac9d7c6cd 1
314 | df299223e933ac915334f550fbb40a8f-4 1993298a33db80acea95a81805d80c11 10
315 | 539c9363403bf9bb07256c67ae8f985f-8 244266277f9b703a6cee4b9673352e87 5
316 | 92fdd2ecb9e00982a766e42085dbe674-8 4cef52d4380ffabb3e4dfb94f4f746af 4
317 | 9b80a63df52f88f7cc11b9b2649d3df6-5 fdecd0cc221e7e70d04701c702183783 9
318 | 68fd6d945eeed458a81d50fb76119953-2.3 f9ac24371e9da45b031132b7048b70f1 9
319 | c0008d92a65249fa11a7bf1e8e758b85-2.9.30 aefe252926a0ef2962728ad71000ec94 1
320 | 0960de8471bcc9200d083c02e054a2c2-5 fefdd705656ced2467a7dd71f3036bbf 3
321 | f3128071d6d92c6b28a5d4cb4f8b4630-1.2 71782af66da0ee94c4762c79954e4969 3
322 | 2748a5cd290c2c660f84875646bc6e05-4 c65258d39b37ae531e820d410d8b9c61 2
323 | f09ad97b6a34bde31eed38267a3266d2-8 641c8b80eba8726aba19b04743125a45 3
324 | 0789d77e27962a5ce5f7f5017816aa63-2 7416c6254186311feda9f0b6378ee5ea 5
325 | 8580588096a57b352e094fbaf9152304-2.5 1d8864fb978ebb4caf2c008a59bc4e39 1
326 | de8c3e262753aae5376a535f08618cb6-4 5ac9019e7bd92f0755044125896a7f5b 7
327 | 9b6a2aa6838121f6b90e41886e91e976-4 ae88596600d9be519ff6bfc6db946c91 6
328 | 1f8c305889264e046100b0f069d5993a-4.7 a95b1a1c334039022b4c57d75d87d01b 6
329 | f4507409bcd05764e54e0a018453c7ec-2.2 c83ea8ae9168a63e13cd6a0d0a5e8655 6
330 | f3128071d6d92c6b28a5d4cb4f8b4630-5.9 f687193c40f3d5adaf416130e71b86f8 3
331 | c8b287075ce4f11c834d2a0ada967ddc-5 1bb66033e24e154aa26a493d3d252e4d 3
332 | fa774a3d87df390c60374735a831bd6f-5.4 3c38a87af792a8983ad306e20bc9c958 7
333 | 1f8c305889264e046100b0f069d5993a-1.3 e970f8411c2bad0ac16d0ddb1f33db28 6
334 | 98fbb43a04146f28f65e9a6b0f5a144f-2.1 de09cb9c6aee829e8ce67948a2392028 9
335 | 90edde4ecc403ea91eaa3443872111dc-10 5723e3d67c37fa749ba098775731b26b 3
336 | 86a8e2d1fc7d097075cca2b875e396c0-3.7 a0e1387d65a6581a8fabaf7c1def95e4 5
337 | de6ae888618d868f1a7e2c181a93f6e0-1.2 480f3fc92cdc9b7ad39f64acca4cb6d9 7
338 | 46557d9c44e732a65c24f05183e57ec9-5.4 9180dffd397a46890a095efa15851e5c 7
339 | b22ba8256eab5a211a56f3a6e8553469-1 4941cd0e307ac421e0664213789c690d 5
340 | 57acfa52b2616f13857f6e317ef8942f-1.3 f30625492cac58d61cc019465240405a 4
341 | a592286c072793b47ca673a015a2f520-1.4 ff1100b3890bfd70dae375c1d7558e59 9
342 | a9f3e1fea9bbfc91e7c43a24338ec8b8-2.5 ca803456ae8abbf7c38460a59a84e0c9 9
343 | 461a1a38011260404c8514de09399538-1 655fa44dfcb558f28621a0b458e5da39 10
344 | 4c1c270666933dbd47ce3ac089c0d63a-2.1 5677562fb642dc640e622d8215fe95ca 2
345 | 6162ee68a7a60104b110db6e3735ca19-3 f5abca882cb530f1e7349e4be7d9c559 7
346 | cf8f8c3c30d887816b55aec2d4be0a8b-5 98c55ad56b1af7d9ce208b5a631995fd 8
347 | 5f10ff39392fea90cd7a810a10c6e818-3 48ad5885acc353d8c5a35ffc279f6642 9
348 | ef0890b14dc2959ba0fb0ba3c136c7e4-1.1 bc7bb80fd00ea6dc5671b76ed0f1087d 2
349 | 63db44496d352f4b59a88542e7fd2498-1.1 3f80b9052bedfd562af59f5a6d55c50a 5
350 | ec03aeaa0ab42b260796428bc24ddb38-6.7 d7cb4940f717128a935cc19ab3b396e4 7
351 | eca8b646e80c5f6691f282fc428b7282-3 705652c3fa4fe08e388a0e2178ec9c78 7
352 | 7169640034220fa16e0584af65890169-4.2 07cacb3746fb8574f5d351f52d05798f 2
353 | 1372c6e051d95a90adeb2d31ff003554-3.5 c6fce0058d104a705c6b9059c3a2c5bd 3
354 | d4ca4f87da296f410bc1405b724aa842-7 854e30088b0ebe076af27b09e91ba9b3 4
355 | b2dc02e795f8d5054cf6ea61c8f3af4d-1.4 b6cdfcb398d106f7c0209c5ccbb7091d 6
356 | 75d3c3b4ddd19d9091e606fae5be444b-5 082970aa5254819fc35f80ed6f0bf461 8
357 | 3f5c1f8ed20759bd1506e8b54e7d38e0-3.3 38fa8c25a49cbd2c7e54b5103d846dfe 3
358 | 88bcdc4bda16b0c3779f2879b821d45f-5 f58d6a59d089d4c98801afb4a08cf937 10
359 | 344261ef9c7d9c910c350b8473bffbce-5.2 9e2ba30e9a13de4251d888276b2acd0d 2
360 | 1bfbdda26895557e3b76ef417d88fcb4-3.5 409ab872b9284d5ac21eeb4b71ca1b58 6
361 | f82c7682b284ffa37fdcd1bc429a35bb-4 8c067637186a0b93597ce579f4af65a3 9
362 | a0bd4962d01f0c5a6338363a868b4eca-6 804624733b280af49010f5ba2f820f22 2
363 | 98fbb43a04146f28f65e9a6b0f5a144f-2.2 259c8e02c658bed58dd9aa6c0b6a0b76 9
364 | 3e8dbc9c7700b34acdf3a0a80c48ae10-4.4 a0b8e4f8640ece3dcdae48ca011ea70e 4
365 | 5f0015ad7dbf64194a6e97e30ab83720-2 8c2d108fd23090878fa383b69410bcbc 8
366 | 6e63d85b7141b63b0411cd3ef1790b42-3.9 22c1e46c5b52fc2da50e66d42ded73e5 10
367 | 2d62e212e80cd97d781296c2d1fa7227-5.2 900de75a92e14d723288aa8c39435886 2
368 | c8b287075ce4f11c834d2a0ada967ddc-2.5 dea706af1b687fd95b5c2e872c6b9e48 3
369 | cbcef8bacac0863a097520edcc0dd9ed-2.2.4 4b37abe8403f7fd778d27e398273c083 4
370 | 55c40b8c6964e2d03b4c5fa2f597487a-3.10 f6ae04c3bc52a73e7f814a2ef63b73a5 3
371 | 9622129f4a5397c15998d296c4f9c269-2.4 10ccd78f6c0491f04f13d362e1a8393a 9
372 | e3f8085aa0f488853acaa00f7e5e8d17-7 fdc36434438863a59274f73635010842 2
373 | 5909e31b3075748c4c9710b18c198739-5 62e7ae4a36f52b550ab781b42391229c 2
374 | 2cb9f65d01c4221b75953e5be0ed2ff8-4 e3221d71f2ecdb1881d5274c75fcc441 2
375 | 2de1be2cdc7d6a0a469fc59a5d1a5588-6 f0d66315d46b4952c1c9f7bcd20d72b3 2
376 | d8b00929dec65d422303256336ada04f-8.15 d8691c171159e2cf2062f656a2a6b91e 3
377 | 8ef95695c1880fd9838f8ff05c01a68a-3.6 691a07c08c6ddff38ba2a917bdfaa14a 9
378 | 3a8aad3f81b9e8585d9a9b6cb074931f-6 ca1d28c8930897d208430344e1e567ec 5
379 | 2c7c7cfc6fda13471aba25999f4d9088-1.4 92a3899ce6f18b9b5e29e7e48d122205 2
380 | 374bb4d8045a454b7548419999879246-2 1957fd6b470f03e5ad61b132f4b35eab 2
381 | 21898ba7134f979e9eb31f17bee2a008-2.1 a16fd1e1f457f0ee293f4d24b249d80e 10
382 | 71f351984f1c1ed12d4db4c38e6d15f0-2.9.21 d810e816f221e608f819119dfa3018df 4
383 | eb4b24f717686b20ebb6ae0b58df0623-1 9696f4309fb479e2ab85173ea4f0e580 10
384 | e59eccfd308432a9168efb44d0f7a982-6 d3a330026d57d124f2c5e9741fd4e889 9
385 | e6d1160b2fec0c60dcf98fd57466c7c2-2 b305d93f7dbf48b74cadd5d1d207e65b 4
386 | 96a6d281f40487c4fda06ba271f24fb8-3 12082fb28c69e17398ca89cb003d9472 2
387 | 394bc2b3f118be28044d922c83b5acc5-2 394bc2b3f118be28044d922c83b5acc5 7
388 | f8ec59b71981dc5999c300e9395444bf-2.3.3 43c715c99374ffe00fa59da2857e0cca 5
389 | c9334d55b2014eff7ba9e0d1922d75ba-1 b549e5e7c550da30ca5d43b106af92a0 7
390 | 8dd7bd02936c100916c9b4cd4be3cbf0-5 b2a08bbfa15017f6f50d75e181df63cf 2
391 | c8b287075ce4f11c834d2a0ada967ddc-8 e1a9147be2c7c7d369e7fdf82e07a516 3
392 | 4b81674785998f48856b4680001df379-3.4 10e01f1ef744427799f89787716020f1 1
393 | 467b9c0e460a05beed2a77aae07d8209-2 bbde15d738083571737e9a51a2dccb01 7
394 | 6bb294e836c0b5f7fd797217b7b4bc7b-2 38e0b56605990a0cd178a393a71fd4fa 2
395 | 3a8aad3f81b9e8585d9a9b6cb074931f-5 334d05cfae279882c66ba2a887228c39 5
396 | 4647d00cf81f8fb0ab80f753320d0fc9-3 c00f922af88b6c90207c2fab00489fc8 3
397 | a25b2dff7d13c650e6c7e6bfb3bba5a3-5 69bfbdfbb3d1887593f589c737080a16 3
398 | 463695a603a57a387123dc5cbdf15418-1.1 50954ae35f56da33ee08c92590e4505c 5
399 | 27699cffb5032e807827cd5f9cbb6fb3-1 e08e596d213233ddbeaf51cf1f08a1f2 5
400 | 7312e6fd1b7c259f4fa4ad7fed2a35d3-9 edbbe1f63fe6dc87e7cf291b64d49a3f 3
401 | d15d7ea67999319eb893420dbae4226e-1.2 52cd851d3f89858631e1b717cffaf875 8
402 | 3f7c4c1408eb0f354e63f6a5073cd683-2.6 76df6b550e74b9b487e6855d41385ebb 9
403 | a42b88727b019cfe407e458b9f01c03c-2.3 14548f02e8202bdf7dc607d324d661c0 3
404 | 693cbd06ca2c7d132bb4b649f303903f-2.5 657a7dc7ed7aaf459ac2f23b79684bb0 4
405 | d16605c61403d8cf61834417cd95404a-5 f784998afe39dcc56201ff98f24c6bbf 9
406 | 6e26057fe568923166f5ee47d5432501-4 b7a49541ced699e9764afaa747417507 9
407 | 45c76ba14e8668b3d4ee674f4c6e26d9-2 4cd5b260b04851aae0597d492d0d8117 2
408 | 8be4d536659a057935da0a90e28e7dfc-5.3 b5aac07af19afa00090c2a16ef953200 2
409 | 652ccf6eb1863fd79c8c2c9337bea987-2.3 efde239f8e007d2fbc2e9c359806935c 4
410 | 92fdd2ecb9e00982a766e42085dbe674-1 b3259212f6431c750bcc66fcdbf86386 4
411 | 4a25a85c4830bcdb6731ee6bf3b1ceb4-3.5 a42ad7d8cbd485b9b2ccbc36c7b8ecf0 4
412 | 1bfbdda26895557e3b76ef417d88fcb4-3.3 0ef09f0f895e995f6c6a2e03ff283f29 6
413 | 4eabacda84430e6e562b966068f2ab2e-6 e15f5af3d877d17a0a45c34d1f7d16e3 3
414 | 0d59d6f5c28ce2f7517136922f8783d4-1.2 9e951590a5263e498c67622c13c2a619 5
415 | f9983935d2abf59bc8bb63203f07f25f-1.1.1 2cfbd45db29e5245c9fa8f9aa0726c14 10
416 | f2cddd116b495c6e31a23cf1340bfc3b-4 2eb3e1281dbd7461c0508edfce22c3de 9
417 | 5ed183ab17a0b19b8e6843124262aad7-1 400f2b6e696c703d1b3734b74908ce7e 6
418 | aebab2f72b1c7fe0660acc7e056941cd-4 48fcc034b6888e3005d71bf93532dbc1 4
419 | c1d255d33f1005688e0e084374c98ced-4 153058d373415241979d935375b49d14 5
420 | 0c9624e7f1fc9451e164183fcb453db0-6 67e9f77115c57c94cd3163044ec9e5ad 7
421 | 6b5ee0e06260a46b4d47e8843441c46f-5.7 aca6e26ceb5956f77ded7e8680d8de65 10
422 | 3a1acd1fe8b9c3dad8fa395a7ed61d08-1.5 1e8ade47f4b610b28ce4aea0482fa328 6
423 | ec871f96492f968f26e2f4c058e55452-5 10c2ac3860f030d6670131d133ac3a57 6
424 | 51912e2b3eaebf7f0d8a63e0319174a4-1 4fd1f26322948ed7fcd67a04c59efc6c 2
425 | 2de1be2cdc7d6a0a469fc59a5d1a5588-1.1 5db34a8a926b0b97a1791b7fd59fd437 2
426 | fc66f418ce7c7855daed259ca643fb0a-2.3 06036321a2c2f161efafa86984f44168 10
427 | 74feb790232e5a7d88fb3caf293672f2-9 f2bae101a122823531826dab0b93c40d 3
428 | 3811976a7fb0681079c00c96760525b0-3.2 9c88179a2f45e788803e2703b4193d6f 7
429 | 7169640034220fa16e0584af65890169-1 287f7402aa3ac53d1972af0e1bc61901 2
430 | 430e2856ee8488ce800bba5d1f4133de-2.5 f7e8899844567497ccb8b74c51469a8c 7
431 | f0a3a2b9f40487c74ff0bce9f820e993-6.5 d2d6c71f4f72b5fc279d6109190c119d 3
432 | ca6bebed5d63208da1f064de65b11f95-2 17aa8721080a629d0604a6ffd247bb22 4
433 | 919a312983a42b0b6a7d0f29ca09e757-1.2.5 9afc82f7ef3c54fb7950e39c62fe78cf 5
434 | b07f538d55c0927bd86da1d489c93f95-2 a59b96bb4619e69a77d51ccc7f94abc3 2
435 | 74feb790232e5a7d88fb3caf293672f2-7 1db1690f3ec690445898abfff62dc6aa 3
436 | 48520284c0ddf38c114c55184152bccc-4 8e6cfff9b5595bb6d49eb36b1da82458 5
437 | 109e21f9eacfb4c80acbed6f515f47ac-5 67ccda08767b3a40eb79c97e1b00e67f 8
438 | 3e8dbc9c7700b34acdf3a0a80c48ae10-8 a95351bfde85071890e9a9f8ba50b023 4
439 | 9bae22cc2ee5cd5bfd302a7d18694f31-7 b76b873ddc394109ce6449105d634f48 10
440 | 9ed3be384dfd4945dd94e3f14c0abe32-8 60d2579278748bb7884533ae7c1879c4 3
441 | f82c7682b284ffa37fdcd1bc429a35bb-2.4 56aa3d8db1fc9117eb02d6e9c7ed9058 9
442 | 9b80a63df52f88f7cc11b9b2649d3df6-1.5 f1bcdb815771cfd0bf6af33e0db1e341 9
443 | ed91d05e7bd45934c7ffeb8a2aa21065-1 a26586cc8d2742698061f0cde7bc4691 2
444 | 0ec5ef4ac923dcb81b1fe684bf59e90f-1.4 3c6032ed7ebb48cee8d0a7ddcdc4039e 5
445 | ca3a68dab06975c2cdcb787ceed44e6e-3 6f1616045670a91936ed3c32849d09a3 6
446 | c3b93f98a70472f1721e97638be04bed-2.8 196df32d7c1cba080cb6b2af38154caf 4
447 | c529f9144e9e576ada84e6f63a8efce4-1.2 1d639b67e0fc687ebe839c947428427e 8
448 | dd44b52e63503b680293577f5f306412-4.10 0db3de3bf41c9f8f61d6eec0d07d6f03 5
449 | 2ac42086a50a636063e8d2e1a10f70f4-2.1 120a4e0ad9d9c389cf46f282e9b64ba0 1
450 | 756ea971c47ff2c6f7409444fcec1c09-5.10.8 7587dbfb269122ef2d20447c0e530317 2
451 | fbdbd8e2ce3c1d124f61203169f91a77-1.1 a9deec24037e52e05c8837abc6307219 4
452 | c476619114d976603cad64beec1ce27c-1.4 6dcf4040b3d837f0c1c832fd5e228f10 7
453 | 2f0c85c03d5688756a28d56d9db7657a-1.5 ba7313d0aa963569ccdbaccaf5dd9769 6
454 | a60f626443630e8c42a3fe352b98dc17-4 0074446d41068be1492f6eb41384e5b6 10
455 | 23f4f580f1f1ae1a351c246e100d2da7-1.1 4a43e871931b2a1dac0f78e50e0a173a 3
456 | ced2af5f4d479613bc7ab0a2f46c8fdf-1 000023fbee1f2fafc7221590dbe496f6 2
457 | d8c4813e7639ad1062f30b5ed2240a50-2 5c2f3548d3506f72e68bb96afd901559 5
458 | ff9f3c57d48e940d59af453f3cc3e724-3 b54a18335703a7bfc00663886eff1f78 6
459 | 94b9bb9590d146e7807ea92e7234ce5f-2.4 b7fb7fbbd6d84cc40934e96aedafbfb7 3
460 | cf9f662fbdb8f6449021652c1a45b0cc-1.7 935412a31a6710adf528e351c0eb4a8d 5
461 | 35f1f29ad06bb759ef1537bd7b07dec6-2.3 621669d5840e0249da9ba26bddfda12b 4
462 | e53eed464b8de4afa2c1316404780adc-4 838340c074dee74cd1cd1bf49c0e1923 4
463 | fbdbd8e2ce3c1d124f61203169f91a77-1.2.4 194490bda3fbdfb4ae3091ddc6aaa3ae 4
464 | eebbd249f5cb214a731d364786cd3cbd-2 ad08c130e65e6bdae172b5bad1dd8c1f 1
465 | 4ba89b2fc68d6ab4f37feb53eae919d2-4.1 f044ee96a5fbe4236ba3e0a147bbd988 4
466 | e8307bfb8b09940b09f18a20759ea08b-7.3 a0e6e6da59e826b206df8d00c17d1fa7 5
467 | 26ac7d591dacc94faaf50eda76daea7b-5.12 9b4be82b38575636ddb8fd431fc40550 10
468 | 4b81674785998f48856b4680001df379-3.1 bd34a04b65348610cf02288eaf3815a5 1
469 | 185a5eb309e9ba14f65e6486f08b3032-7 7fce5b12ef95a1335ea71ef3a7ccae4f 3
470 | 24d5235e2ecb0c65bba650d63f3a0f56-1 f8afb2ff23ad5595b3a410f9448a884e 5
471 | a0d889e03873462f3d73f918cf85f0fa-5.1 8bdf0a7c1966e3354e455c914a3d0485 7
472 | 71f351984f1c1ed12d4db4c38e6d15f0-2.9.20 0aa21577437b378329bc3f8fb2430539 4
473 | 44db7fe40b3dc4854932bc1d8b36400e-3 876a8ce607124a95cbc8d386cc061e8f 4
474 | 82dcb3edb6f2118f21c73bdbfd020c70-4.5 9c05d37771c4b163b5ad46eaa0a9034b 2
475 | 2ce6e785b25db1fa3a3a26800e8d4cb7-6.1 55dac85a7856c66c30ab786d802477b1 8
476 | ebe418ae757b121b0d636041981fc1d1-6.17 25ca49547a537989610230b8c3bac15f 2
477 | 6295352bfbcdfdb03c74ab13e42e1544-8 347a34cd8488ea47da0148d659e5353d 9
478 | 2f0c85c03d5688756a28d56d9db7657a-1.2 7a183762acbeea02f6e3e8895abfc7d3 6
479 | 9153a0666f7172ff17b5a5f89d9d9e88-2 bfe832c1a5d5ef89145f7933ecfa890d 10
480 | 375a3ab409560e9ffd3a97600e85a88c-1 0774d5e5d453073f2efe8b706edc2007 7
481 | bfabf896c8b5778f10bad2cf75c93527-2.1 1c0b20bd6657330e8a29f2f5cf67f2cd 9
482 | 4e1fb86d435ce0ddc629de9c36c00080-3.1 30b0c3a09eee90849396dcae6634fff4 6
483 | 86a8e2d1fc7d097075cca2b875e396c0-3.7.1 f490b6c3a46a17bdbd37ff35ce16e5f7 5
484 | 0c8667ae0f48f6e6984ca47b005890de-2.12 eb27e33834c2d9779090f627500b2d8b 10
485 | e237e3c6308d0867edab8cd3155b5a90-4 48d2a61f18d6718fd78845e3ca69438b 4
486 | 985118344ef3adb3e3806ce6694da609-4 06071102f78422f117f8b865b971ce27 8
487 | ae10157e7e4d8155ae3f67a22e870333-1.2 cb40bacb3f6da84c1ce287cdc510001b 9
488 | c1d96f851b917db3a34598927f632aac-2.10 4e3655ff389678f7724082ea11687497 9
489 | 6bb294e836c0b5f7fd797217b7b4bc7b-3 32d118d300060dab97a620d39a87e0e2 2
490 | a8b1b10367a55b3c9316a6fbaa63baaa-6 3339cb9d47f991b85c126b1a2470274f 3
491 | 15fc7d9f85191fc4cc3e377a5178d502-2 1cb17c49495d288ba07064302dfa3a71 5
492 | 2a2a1ea79f558bbe82c678dfe622524c-1.2 89d7a71acd19232e7ac871d56ad6ff37 5
493 | 2d62e212e80cd97d781296c2d1fa7227-9.13 68ec40f25b699d68029ec31adbd0b5b8 2
494 | e4c792a4b6681bdee4e4f940376294d0-2 28487078456bb0ab721f7a135f0854a4 4
495 | 662669445dc1de9707a5a1bdd5cd8d11-1.6 953d474a409598c9c12ff10b36424ee3 5
496 | aa4f333e8a7c4cc2a34e232c5be41c3f-5.10 9dee04499785b6e930a298852c2d3dbe 2
497 | 462570d0fe2da2257700f7db3bc6b25c-4.7 b3b72c7526957e387f0e54d994855a45 2
498 | 200b0491fda29647e3eeb0a0c0c0deec-2.9 1d8d1e31d59d0145cd9a11c5a3893a2f 4
499 | be5bf577e4f25ca3525e0ddd4c3e3688-4 19726cd86d60ba47303cc70294c58104 3
500 | f947a9b1e73f31ed2d8d17e0fb6f7b6c-1.2 6e0caf02ef84ce32f6321bc73b808572 6
501 | 4f7dc4e838637de03530377d27ec092e-3 6c231d988d4b6635e087c050ad4494b3 4
502 | f81b65072205c55fb211f3a6a9e06345-1.3 a20f4b013ae102e95f6dc1601cdfb4d6 6
503 | 29b55b84322492cd3e4dd9530c2cb4ac-3.4 8c291f552f6f037c742a6774ca66eb80 10
504 | 1372c6e051d95a90adeb2d31ff003554-3.3 7b7a4cd0d5c161dc3babcbc482fd304e 3
505 | b4e54d900ed36f16a1457ca01981208f-3.10 5c66490150b63519d46f72509d59ee32 10
506 | 5c5397d543fd429dd9d4206263979723-2.2 1ecc8333be36b91aa91c65a48ec09497 1
507 | 49892ced776b6ef3376a37b50a6c07fb-1.1 f587d593209c0bf54e36806828fda7d9 10
508 | d3bce84257c6201f277530295099fbbb-4.8 8ee4f209bd633ae3884c41a264cb956e 7
509 | e784121d25e1519ea27ea45d5df0fbde-4.2 1ff52e844b49508077893ad2eaec1f55 10
510 | f81b65072205c55fb211f3a6a9e06345-1.5 243f7ea55f6c9425b527e7c96f105041 6
511 | 86152a2926dba76c312e7f4b365dbe77-3 e4dee8a3d92258f9d7f6b28ea9eadf24 7
512 | 0789d77e27962a5ce5f7f5017816aa63-7.5 dd673602e10c70912ef596be1cc25af8 5
513 | 71f351984f1c1ed12d4db4c38e6d15f0-3.13.27 b99d724ad182d0abd0f4d219ba1c13d7 4
514 | c86cb686ffb837f7299f7e670a84808c-3 279d2098d3ef989a19fcc1902a5daf64 6
515 | 0766665dbb750d978e903755f3e98c46-2.4.2 98295159aa6f2ebb4f13eb5bbe37d17a 2
516 | 9fcafe9f332d55c162a92732cd7e486f-1.4 13f2d4316ff2f69f26d55aa2668eef2b 3
517 | a018e47e5cea6ba27f33c3c67585a3ed-4.3 3604678e94cfcb64047192221ed6d182 1
518 | 90933baaab2c69edc1279eed1ea01db0-1 f7860a9c6b574413cf0c5c30c2c9c8d6 9
519 | bccded0a80fdf599444673d421515c66-3.8 e434c7683c7b08a7dad73c6632fec7bb 10
520 | 9d81c44af8fef8e928720bd2ee5bcb01-2 ce673bf2cbf7554c1605a2036da10bf9 6
521 | 5e07e6ac52ffe0731a09299867a27983-2 8643b5adddd60cda127225fad108a372 2
522 | 6fa1ff9dda249f094059315aeff058ab-6 147d22f706399a1ca2b25b746efc5a82 10
523 | 43d4be37e48cdd24d007c269b882b05f-2 0655bdd27ef87f00e7cdf93154c0be69 9
524 | ad1900cb874496bdf4d5f34c4f75a2ef-1 47511f39821ff852f75a2f428aa24dff 6
525 | 317532540adceb07d5e2ec62d5eacac2-10 a8ba8a54eb0a39b28ada232b29ffe34f 3
526 | 461a1a38011260404c8514de09399538-4 3502aa13199f24f163a1b1807d431bab 10
527 | dd083fdc5a4f9f82597d7b1f46a6c594-1.2 284e10cd107cf5b8f4bdeff1352e8458 5
528 | a374955b77313f8f418ecec96f70081c-3 52105f598910250244a3b5704c946bf0 9
529 | d03045a8b81d71de2f05d81050f43b40-4.5 e6674b49854e2d0cdb44ad7fb7f18146 3
530 | 53b28fa81ed88da3ee7b47b7979bb9fb-2 d795a1012c43575bc751f61438d27dd3 6
531 | 9f9f80c2dca34bfec4c75f0b8e96271d-4 c2404950472e9e925592c93c58db4f69 8
532 | 1d77d915e1e28f06309d402ba1ec7e87-4 88e63411ad7776e67ef075e39aee7c7c 4
533 | 652ccf6eb1863fd79c8c2c9337bea987-2.1 2296a8f5f841ba4d10895064353ec1ab 4
534 | af1df4d04666ed6239f885f313f37302-2.1 f5084daea5ffaf4f4a86d30610c5b67c 9
535 | 9bae22cc2ee5cd5bfd302a7d18694f31-6 dd7874c7ed3add9844b3a941beee434c 10
536 | 45009b4a16ef03ba8bb5b15665ef4469-8 8469a917158e7ed3e9858cc952bace73 3
537 | 88315ba1db2f60ceaa8e869dd3c935da-5 e371548dc98008d6f797e68a68a0c1f2 3
538 | 3cdd7f212d77dd690991abe3df14eb0e-3 361ae15ff62c35dfc48b7965a21259af 10
539 | d280e0f028346a2cc7012012c9a8fde3-4.9 fd90ff8f7a484d59d6c1cb4af73c0b17 10
540 | 46a806af69758583647ce238463c9852-3.5 5f243ab926d56f732091593aa23266bb 4
541 | df8f6df05c6d12dc8e050c570f14eea8-1.1 0bae68d9751ded60809b1c0b6b0257de 7
542 | 6cc3f5e25f62cce158a0b7af74077851-3 66e9589534fb669977feb00c5b807363 10
543 | 8fa52fc9e76962339d5fe4f985ced328-3 6411042ef729c50f2a75eb55528b15ad 10
544 | 51a6a166bfaf59dce23a10b13f17d6a7-3 a200a2fae60ceac5f08826b9dabd713a 4
545 | 0326a61f1522b13bde5c4cab2507430c-2 e46872721df8df3530cafadf58ce6087 2
546 | cff637b6145a244b89aa89f526c696da-3.1.4 26e918db593cda8e9ba12026508e9ec6 10
547 | 570b86ec6ec8e1e2051c6ab59d19adb5-6.10 ec9dea7d04bba26e5ce44ef41750f627 8
548 | ff106428f695e8509f1e2a6f047a9516-2.7 ae38f664ea5d2c299b2dbd8fa0d13e34 10
549 | 40ce2216a0309889cf1d46a2137d1897-4.6 f10c4685a37c7404c9bad890a8b89d17 4
550 | b02710d7581ae2770e07ec4949f1d256-1 c52d9c6880312d293102526b83c54b69 10
551 | 90edde4ecc403ea91eaa3443872111dc-8 be55c1037bdebe7847a618f13095949d 3
552 | 969e6042af3f44b1de5021f36996954b-5 c078f00a9b0a7690715c4edf334df376 2
553 | bdfc630666ac36a1b1ce38309983332d-3 873dd0632b82eaf662d90363be5fe3e8 2
554 | 1855ebb6505036646e82ea9b2533600d-7 bb66f2084d2f319ed4906b45346e77b2 3
555 | 7d31e0da1ab99fe8b08a22118e2f402b-4 19770153f35295b4341c9a385869bfde 3
556 | 44bef2ee415c62f0ce01fbe6c5566250-5 ba57ae587d2f8e2941875fc995ca06c4 3
557 | 7dbb4d7bdd41cb2f00b70479a0c12082-1.3 45aa0cce65bb11a40e39baed03404cf7 6
558 | 9f65236f3f0c18b8b7845acd72c0a6ec-5 5071e0bea3e483ca7a144e897f27c51b 2
559 | 3811976a7fb0681079c00c96760525b0-2 2c7ca04a43ee08a5f2c88a6e0221d2a2 7
560 | 469979eb5d434b2684b0c932f4454cae-2.4 08aac08eff9ede5b38a655a7c21f72ed 3
561 | 6f2b4762201dbf3584513fa72ff84dc2-1.2 828819a1c174d520eb21f01b54bd98d0 7
562 | 2073fc20368af309007bb4adef0f086b-8.10 dff1e4b015fe249bbc3ebbfa351dbc74 4
563 | 476a82c10b5830881fe3219715c29d06-2.1 b81ebfd85b4d048b1d1bf704f5a55704 10
564 | 0811abf5a5bc74534bececf59628f85a-4.10 06561acc6a6da75e4fbb880530057f57 2
565 | a1b7d77895dbd92fd780389171358b26-4.6 56123f566b663ed51d6fa75c41abdb1f 3
566 | 1136c2ab5563b433db89dd8bc01418d1-3 13ba2b27c7b6c536f0e986b5e2ba0942 9
567 | 74625027de86d75fb7854b96140e90d8-11 5e10c124cc50d94c1c7a12b3fa85832c 10
568 | 97c85e123d3e6bb5c4c2da98a1e3e0e5-2 724089e4b5e9003ea28d6ee4fcb9f0cf 7
569 | 1bfbdda26895557e3b76ef417d88fcb4-4 78a9c0c12561d15a40ce3146fd344436 6
570 | e784121d25e1519ea27ea45d5df0fbde-4.3 1e646ced9ccd04269a29c3bc46741777 10
571 | c952e888ff380b35d44f2a5da0068686-2.8 8efb9d0d207c1d8f83cd70bd4f3544ff 9
572 | 1136c2ab5563b433db89dd8bc01418d1-1.6 0301babd082a184be2134db749e6c1d9 9
573 | 0174e628c0d9a007c78819c9eb1029b2-4 8d1ec77ad9662bf81abd76217e82a366 10
574 | 9d28c804bde9a6d04b496c72095d65b5-2.3 c2431e8d2c5cc5762dec3afce459aa7a 10
575 | 6bd22968066a2b1c8ec009eb99809859-1 d395428b9d63fd7dfe89e85af59a54cc 9
576 | b50559ebd1e407ddd42671bef4012e9a-2.1 f1400d327018515f36e928a17087b283 9
577 | 0649339e3c133f746da0cc055013466d-2.2 38a82cee5b5f522d44fff9b0884f3629 9
578 | d175b36beaf2ca66b7e25b9a05d3dca9-2.3.3 dc054173f0f262541ab1aa6ae948a763 8
579 | af1df4d04666ed6239f885f313f37302-5 e694801872ad7b3d6785c2ec573d89f1 9
580 | 0d379f070fbd5492f0363ed8c8400c26-1 4dfce6ad67993f6386580584d120574f 4
581 | ed2a1d6fedc5c7e69ed3f79eabf0b997-3.2 2f5cf977b5b06f87e01e608c295b9d5c 9
582 | c394b20b377aabc3e30723773e015482-2.7 33a1c8dd3113c84cc16a03ba961c8322 3
583 | 241f041e0600065ff18039e0cc4c2d32-4 f0da808f451c59a64c0b393eb10f89bd 4
584 | f615455cda36f8afe043d888634bb999-1.6 a3004bb244e2cfa1d287944ca82077c9 7
585 | 6851fb450fd1e824b599b2a000360252-2.6 c771de9552ca20d92164197b704ab13d 9
586 | 970eed35cee1a20ccae4e529b5683276-3.2 bfb63c246d939b28bead4e635ac375d6 5
587 | ad95f3c2d7ba367b10f943d8969979da-2 cf0c7405ac053063a41778041c9aa337 4
588 | 60d28e7d879c0dc48b9a593468cf11e5-4.8 2f6ea6e2a2d227f1bb3118bcc73e2ff4 2
589 | df376caf7432a13d6d49e0641463dfcc-1 f83e2e59ca67a3bf8ce41037ecb03d8c 9
590 | aafac65ae06628be791f7d5faa1c4aa3-6 645e49fb3e91d3625c82c23d52b08eca 10
591 | 74f3d9106ffb4a5df72234e7c60e3d8b-1.8 75ba6d593e51a1009c072c7e6b318d01 2
592 | a79133abe3132caf9491427873dc9560-5 9452b311c054c041f51f8a88f680255d 4
593 | 048a135264054d9c6829f8e4157397aa-5 1b17b01220f43e440d015fe78ceff5d6 6
594 | 161da9e3ff6d804e321511e8c6f390e7-3 12478b105178de047494bd18f23e141d 7
595 | 35c5f60ea04deea79f6107fee80af0c0-7 e1a22f2e3c69588a1692fa65cabf3899 3
596 | 74f3d9106ffb4a5df72234e7c60e3d8b-1.5 505cf8be1fa9d7fd4f6251167cf865dd 2
597 | b4e54d900ed36f16a1457ca01981208f-2.4 3c3a59b4de20b27da4bcf1a58f4c550e 10
598 | 7309aa510e05face6e6eeb5e35880be0-6 e74d9ff2cf659ef1072d017d8a7745ed 8
599 | 720334cd658006fa4e2f9e33ff8afb62-4 e5cfb57ddfc9218f6264ff8c4b5de876 6
600 | ce3ebe94976062adda049cf60ac95edc-3.3 1961b221e302cdf287c62a621c9e58db 7
601 | 95350ee93b80523503810a779481247b-3 b95c21ce4c36153b8028433b32437b73 6
602 | 7c8419e9eba6b27378b7bfa131283673-7.9 f03d2282700296c04fa1712ea7e51764 10
603 | 53b28fa81ed88da3ee7b47b7979bb9fb-6 bf983d82cecaab2b515152bc6a775a1e 6
604 | a8d02ad8c306be96bf8e882a813784ef-5 168388aecccff12cdbdff873deef40e8 10
605 | a14ea9e131acd1625d09958d147a0baf-4.7 348687e2a24c6e357035eb96514f863d 4
606 | adcc21cd76c53e93b399336bcf4a4145-2.6 6b1a3e0b949f2d34e15e5b9abc0d3fdd 7
607 | 5b80b71611400db800202ca10cad4635-5.9 98b5ed1427370773e35316d5eff0c108 10
608 | 2bdf0a587f682a930d5a8767e4089c17-2.7.4 e72b6cc4c4c10dc65135a518aaee1f9b 4
609 | 26014846a9bf8d6f8d62e0c74cb3a70b-8 d9b10237be00f101e0d1be092d6c0812 10
610 | c86c9ef031ad63bced4b51cc4cf513f8-5 2e2d9f0842f6c75ae6092f281bb60301 3
611 | b07f538d55c0927bd86da1d489c93f95-4 913348eaf8442ebf5f194683692d08e3 2
612 | 0d56f03b5c80195a59a48fcf258f1c67-3 4107f53ff41f9134a4f3af4d1344e7d2 9
613 | 4143d621e742074887b0ecba0011d1da-9 b3ccb5e38e60bebb1a35dc5c621d0582 2
614 | bd390705dd1fc7fc3dd09e31b366d2e5-1 7c7b4a888125172ccdf73dc426f18074 2
615 | 078f3eb97ff6c365c2e645282a661bad-5.4 418a619ea506735103d12f8419f3a5c7 8
616 | cf9f662fbdb8f6449021652c1a45b0cc-2.13 98899a9d070ec8ba79a211b0cd0d8a45 5
617 | b846373e48cf002363110ff72973f24b-7 621e60a8cdb9451936404413156e2224 10
618 | a0379bab1b29d0217d15cff1354ad8be-7 eae394c42ae3eece9fc61a817d5359e9 8
619 | 469979eb5d434b2684b0c932f4454cae-1 db2fe1affbf31169c64e08825e6bdfac 3
620 | 0a86e2ad2b1828b0250b305984113e7a-6 225ec96db2f4ac904fd9d6b37c82e447 8
621 | 725552c5aee9b729a10ff8fdc80ed4fc-1.2 87f5789a3892ce79b5cc5dbd76096474 10
622 | aebaefbb0bc2b366650fb1f016a70841-1 cc1c2cb8b787d17ef64364032cda2844 2
623 | 44b439ba9975f8e802b456777c54a854-6.4 392c8751e5374e1681794006fcd9244b 3
624 | aba4eeb2c1bee81441c9e38045a65f6f-1 5e44679d6c1fa44760713ab68188d200 3
625 | 221cdfb73049678e244380b45872cbb2-9 9002bc82ace4a8106a439b012b30e3b6 3
626 | ce3ebe94976062adda049cf60ac95edc-5.12 0e6ed13a165a65cd1830e63d181fde35 7
627 | c39584729495496984371f0ec2f38974-2 fb6242a4af5f65f96eccdfcb4cd859f9 3
628 | 487f72ec246538202434ad1025ebbad4-2.1 9756af95710427a148e8d281e22af09c 10
629 | 4062a15c6c2f4b43904cb50d25b6cc78-5.6 4157557e1496b5f08fb4b5223db9992d 10
630 | 23648f8f4e0293ec6f157b2675a3efab-5 47e5d4b3c6a1e3537152f552690675bb 1
631 | 350b059e0d998f5c160ef579ebabe8ae-8 508958dd435b1a8bdf0c061ea5f13702 3
632 | 0b70416f7bbf38c21234300ba2ff90ae-1.3 6ff1e9da362874ef08190f722e3276cc 1
633 | 9a820165ebf67ce8e19ab5b503a276a3-5.11 58fe0ca33ab4716b5f1150805c9b494a 5
634 | 4b68493476c7b9b3e7a207677df788c6-3.2 ba02748f789b534f9b6b186d3d535656 6
635 | f8d3a31cdd07e6dd89cf77a7583b3a16-3 909cfcfcc5a695630446202fbdedac5d 6
636 | ae18e1b5d3d98c3d561bcdd43dce6537-8.10 ba62ffd6c081e54240df3d74a2244a79 5
637 | 40a7e794398bcbc755d379273793eca8-1.1 5428000956fe293e97545553614531cf 9
638 | 9649610dbce420e3eddd1b58072aa8b2-5 7d6fb018c2e26c85e369eb94dc3e39d6 5
639 | c252008ba58b2b5633a436a54965816f-1 fb282035c146ed17fb1071d588a40e9d 1
640 | 5febbff9a5e62ce653ef1499995b94a6-8 7b77e1582ae1bd65bda5513d1c01cacf 1
641 | b2e9f9a1f627ec881f82495153e6f852-2.6 a24f1ebd09e5ec5e640571ed995b3ffb 6
642 | 8a5e28d1b5f28bc5d1a88afbad5a96f5-6.8 25039e93a6bfad26f4baa64335b3c0f1 8
643 | b71ff51adfd06cdef61b317cadd67cd1-1 a2e5d9f401c31da82f2d9a51c5ef3e93 2
644 | 022b721531b70a95477f6ac1a9c17343-4 f55e8e62156df8aebf49410d720ee866 1
645 | 71f351984f1c1ed12d4db4c38e6d15f0-1.7.18 beafda31dfa62f0f94dad8729cbcb20c 4
646 | b2dc02e795f8d5054cf6ea61c8f3af4d-1.9 dd152b9c02bb725f7af5038f5b1649ac 6
647 | 31ed719daa37b297341a98fdf3bf31ae-3.9 bb41253b451677edf8e79c33e9c68ff9 10
648 | ca4b68d231c4389802524a464fc74ef5-2.4 8aceca9b8e9a35f93099aa6fd5fa8089 4
649 | cc726d8ba07859dc74addc6961852500-8 ed4fe1d221e9402b6cdf09e871db425d 8
650 | 5c5397d543fd429dd9d4206263979723-2.4 0f809a2939804729f10fae126846c70f 1
651 | 1f567cd562e7b6d83dba586f77cff6eb-3 578ccf30b0eb7ad2e965ad412a9c7836 10
652 | ee2c0a9a9257c740ea1269d084682ec6-3.6 03c8093b6585604239705546c3d3ae76 10
653 | e77e164489b673dcb8631c929bfae05b-1.6 00ab896e682993c3c5afa65f2752ba45 7
654 | 6e432d09e024efd42e1dbe14b5cd795a-3 6e432d09e024efd42e1dbe14b5cd795a 1
655 | 4f1411629c6e068c44d61b172149ce40-8.5 28beeb4babe6dd1acc8b848bf5c1c0a6 10
656 | 3a8aad3f81b9e8585d9a9b6cb074931f-4 923c82bb57d137da3b34709a274c7630 5
657 | 609b77f61aa56b2327326d2957dbb072-1 3cc816ab9dfa4948679b4b19ae394055 10
658 | ef0890b14dc2959ba0fb0ba3c136c7e4-1 e280ba7e7b5abbae26dad3b979367739 2
659 | 254505d8f78808ba3bd75c96f7f717ac-3 396a21e8f35f5e4f80df5a050ee04d7a 2
660 | 5188f17b561517f7d720197b074d4617-2.1.1 f27dff9a74ce58623f9c085454cfcf57 9
661 | 9515f8a169b7fb0d5d75718df2e9219d-3 3f9daa2f7d7084ce3b4ca2c0e715bfd9 5
662 | 917c8e96fbaa4c6999a8d530654bcb66-3.7 b3d393a7359abdb78f1a97209b7a4cc2 10
663 | 5dd8d46555667c0a2247d69c7e7347e8-1.2 f1a743863608c33f2a6bc92cfc8e4c8f 1
664 | 71e3008097be4eb3efe341fea257e953-5 9256b15f0ee02ac49e75933ddbfe1ef3 1
665 | ce3ebe94976062adda049cf60ac95edc-3.4 5fbbb25c58efb699a5116fe853dc7eb7 7
666 | ecbf1f7afc3cab9308c49ba3449fa9f0-4 83b1256c1f2830540fb5c8ed5a9e8b65 7
667 | 80b9b49b9ce9711acc51f6515426c9ed-2.5 2e0630bafd5d46d02db2466bc23bd655 5
668 | 81a67e13b761f333fd956e81141401e9-3 c5919bf7b39c302f5c79a9e00a05c313 4
669 | 90edde4ecc403ea91eaa3443872111dc-1 80f00f5a751976780a3db66daad5561c 3
670 | d66a42d932d8dabc87d600e34c85ed20-4 de4d6f54678bd80aa612d13cc07e9ede 5
671 | 6b5ee0e06260a46b4d47e8843441c46f-4.6 37cc5bd961cef424cf05618365497e7d 10
672 | 4c54c163f43d0ac8512df032b3b07bff-4.12 7fe62471ff1ec1072326a151fbffbc39 3
673 | 24d5235e2ecb0c65bba650d63f3a0f56-5.1 219066d5232e3da1b98e4d3454134bb8 5
674 | 4e9261acce01f7b573f08ceac213fe64-6 b1ae3ce1d6be0379845d373d69e42231 7
675 | 1481c5fdbe56eb71ea6c7d32bbe34185-2.4 7044d4c00fed7d0954d4cbcc297d7fe9 4
676 | c0008d92a65249fa11a7bf1e8e758b85-2.4.15 c797b88f10e365b929bf18f26395610c 1
677 | 0ec5ef4ac923dcb81b1fe684bf59e90f-1.7 c6a37579e45997f9a770731ebf5b60fd 5
678 | f0a3a2b9f40487c74ff0bce9f820e993-1.2 6f32068970f7872a8d035d4befa1555c 3
679 | 19099abc83b37014b75a2506ffa4c2d6-4.2 e505960b4372e7fa70659a026343f285 6
680 | 350b059e0d998f5c160ef579ebabe8ae-1 86058510aff109d21b7df0f2b04011c1 3
681 | 92aec0ba411203aa3a57aec94b108ed6-4.6 15deec203fa9eebaf14d3b8d51c495ad 3
682 | 9b785800f815749afd346fc4c5a7077d-3 b4ed112b9fafdd2a18be4f71a6355a65 4
683 | 63db44496d352f4b59a88542e7fd2498-1.5 6817652b11082f2065fced1f1b937169 5
684 | 9f61eb10a3b4a7be3a81a913f3f2b9da-5.4 629108ea68ab7d235e260dc657e4e6f4 10
685 | e52bbf7bf426bda9b65169b214e79b9d-3.4.7 b9b25715ebe36cbb934f8a2662443cef 4
686 | aebab2f72b1c7fe0660acc7e056941cd-2.3 8e4c8be455424582c97b8d6fd1c7ba54 4
687 | 4b055af79e05167b41460443de65e574-5 c62102715614b3259b1ce2473b8a674b 9
688 | 038a69f549d1470199e92700231bb34b-3.6 7ec7bef91a89402ea4a2ccedf7a8df84 10
689 | 088c774776e2cfdba1a27861c215a616-5 86cf96e37f93763c87f3396fe9042319 10
690 | 1c7f6cc931869cc7c1cfb9c056b471ef-3.5 ecab2ecd413f62da8fe765964a20c9a6 4
691 | 0b4ebd99673d910a6747df881d000dc1-7 2a8d984cc0a2aaf85b02f7e5de4d3633 5
692 | fbdbd8e2ce3c1d124f61203169f91a77-1.6.13 003a2e19de70cfc0a5cb9ff19306cf40 4
693 | 92aec0ba411203aa3a57aec94b108ed6-5.8 2c2dfccfadbd6e17a53234c969367ae8 3
694 |
--------------------------------------------------------------------------------
/experiments/multi_modal_retrieval.py:
--------------------------------------------------------------------------------
1 | from __future__ import division
2 |
3 | import sys,os
4 | import random
5 | import json
6 | import numpy as np
7 | import caffe
8 | from termcolor import colored
9 | from PIL import Image
10 | import gensim
11 | import matplotlib.pyplot as plt
12 | from sklearn import svm
13 | from sklearn.externals import joblib
14 | from sklearn.metrics import average_precision_score
15 | from sklearn import preprocessing
16 | import scipy.stats as sp
17 | from preprocess_text import preprocess_imageclef
18 |
19 | num_topics = 40
20 | layer = 'prob'
21 | type_data_list = ['test']
22 |
23 | # Function to compute average precision for text retrieval given image as input
24 | def get_AP_img2txt(sorted_scores, given_image, top_k):
25 | consider_top = sorted_scores[:top_k]
26 | top_text_classes = [GT_txt2img[i[0]][1] for i in consider_top]
27 | class_of_image = GT_img2txt[given_image][1]
28 | T = top_text_classes.count(class_of_image)
29 | R = top_k
30 | sum_term = 0
31 | for i in range(0,R):
32 | if top_text_classes[i] != class_of_image:
33 | pass
34 | else:
35 | p_r = top_text_classes[:i+1].count(class_of_image)
36 | sum_term = sum_term + float(p_r/len(top_text_classes[:i+1]))
37 | if T == 0:
38 | return 0
39 | else:
40 | return float(sum_term/T)
41 |
42 | # Function to compute average precision for image retrieval given text as input
43 | def get_AP_txt2img(sorted_scores, given_text, top_k):
44 | consider_top = sorted_scores[:top_k]
45 | top_image_classes = [GT_img2txt[i[0]][1] for i in consider_top]
46 | class_of_text = GT_txt2img[given_text][1]
47 | T = top_image_classes.count(class_of_text)
48 | R = top_k
49 | sum_term = 0
50 | for i in range(0,R):
51 | if top_image_classes[i] != class_of_text:
52 | pass
53 | else:
54 | p_r = top_image_classes[:i+1].count(class_of_text)
55 | sum_term = sum_term + float(p_r/len(top_image_classes[:i+1]))
56 | if T == 0:
57 | return 0
58 | else:
59 | return float(sum_term/T)
60 |
61 |
62 | if len(sys.argv) < 2:
63 | print 'Please enter the type of query. Eg txt2img, img2txt'
64 | quit()
65 | query_type=sys.argv[1]
66 |
67 | ### Start : Generating image representations of wikipedia dataset for performing multi modal retrieval
68 |
69 | layer = 'prob' # Note : Since image and text has to be in same space for retieval. CNN layer has to be 'prob'
70 | num_topics = 40 # Number of topics for the corresponding LDA model
71 | caffe.set_mode_gpu()
72 |
73 | # Specify path to model prototxt and model weights
74 | model_def = '../CNN/CaffeNet/deploy.prototxt'
75 | model_weights = '../CNN/CaffeNet/TextTopicNet_Wikipedia_ImageCLEF_40Topics.caffemodel'
76 | print colored('Model weights are loaded from : ' + model_weights, 'green')
77 |
78 | # Initialize caffe model instnce with given weights and model prototxt
79 | net = caffe.Net(model_def, # defines the structure of the model\n",
80 | model_weights, # contains the trained weights\n",
81 | caffe.TEST) # use test mode (e.g., don't perform dropout)"
82 |
83 | IMG_SIZE = 256
84 | MODEL_INPUT_SIZE = 227
85 | MEAN = np.array([104.00698793, 116.66876762, 122.67891434])
86 |
87 | text_dir_wd = '../data/Wikipedia/texts_wd/' # Path to wikipedia dataset text files
88 | img_root = '../data/Wikipedia/images_wd_256/' # Path to wikipedia dataset image files
89 | image_categories = ['art', 'geography', 'literature', 'music', 'sport', 'biology', 'history', 'media', 'royalty', 'warfare'] # List of document (image-text) categories in wikipedia dataset
90 |
91 | # Generate representation for each image in wikipedia dataset
92 | for type_data in type_data_list:
93 | # Specify path to wikipedia dataset image folder and output folder to store features
94 | out_root = './generated_data/multi_modal_retrieval/image/' + str(layer) + '/' + str(num_topics) + '/' + str(type_data) + '/'
95 | if not os.path.exists(out_root):
96 | os.makedirs(out_root)
97 | im_txt_pair_wd = open('../data/Wikipedia/'+str(type_data)+'set_txt_img_cat.list', 'r').readlines() # Image-text pairs
98 | img_files = [i.split('\t')[1] + '.jpg' for i in im_txt_pair_wd] # List of image files in wikipedia dataset
99 | for sample in img_files:
100 | im_filename = img_root+sample
101 | print colored('Generating image representation for : ' + im_filename, 'green')
102 | im = Image.open(im_filename)
103 | im = im.resize((IMG_SIZE,IMG_SIZE)) # resize to IMG_SIZExIMG_SIZE
104 | im = im.crop((14,14,241,241)) # central crop of 227x227
105 | if len(np.array(im).shape) < 3:
106 | im = im.convert('RGB') # grayscale to RGB
107 | in_ = np.array(im, dtype=np.float32)
108 | in_ = in_[:,:,::-1] # switch channels RGB -> BGR
109 | in_ -= MEAN # subtract mean
110 | in_ = in_.transpose((2,0,1)) # transpose to channel x height x width order
111 | net.blobs['data'].data[...] = in_[np.newaxis,:,:,:]
112 | output = net.forward()
113 | output_prob = net.blobs[layer].data[0] # the output feature vector for the first image in the batch
114 | f = open(out_root+sample, 'w+')
115 | np.save(f, output_prob)
116 | f.close()
117 | print 'Finished generating representation for wikipedia dataset images'
118 | ### End : Generating image representations of wikipedia dataset for performing multi modal retrieval
119 |
120 | ### Start : Generating text representation of wikipedia dataset for performing multi modal retrieval
121 |
122 | choose_set_list = type_data_list
123 |
124 | # IMPORTANT ! Specify minimum probability for LDA
125 | min_prob_LDA = None
126 |
127 | # load id <-> term dictionary
128 | dictionary = gensim.corpora.Dictionary.load('../LDA/dictionary.dict')
129 |
130 | # load LDA model
131 | ldamodel = gensim.models.ldamulticore.LdaMulticore.load('../LDA/ldamodel'+str(num_topics)+'.lda', mmap='r')
132 |
133 | for choose_set in choose_set_list:
134 | # Read image-text document pair ids
135 | im_txt_pair_wd = open('../data/Wikipedia/'+str(choose_set)+'set_txt_img_cat.list', 'r').readlines()
136 | text_files_wd = [text_dir_wd + i.split('\t')[0] + '.xml' for i in im_txt_pair_wd]
137 | output_path_root = './generated_data/multi_modal_retrieval/text/'
138 | if not os.path.exists(output_path_root):
139 | os.makedirs(output_path_root)
140 | output_file_path = 'wd_txt_' + str(num_topics) + '_' + str(type_data) + '.json'
141 | output_path = output_path_root + output_file_path
142 |
143 | # transform ALL documents into LDA space
144 | TARGET_LABELS = {}
145 |
146 | for i in text_files_wd:
147 | print colored('Generating text representation for document number : ' + str(len(TARGET_LABELS.keys())), 'green')
148 | raw = open(i,'r').read()
149 | process = preprocess_imageclef(raw)
150 | if process[1] != '':
151 | tokens = process[0]
152 | bow_vector = dictionary.doc2bow(tokens)
153 | lda_vector = ldamodel.get_document_topics(bow_vector, minimum_probability=None)
154 | #lda_vector = ldamodel[bow_vector]
155 | lda_vector = sorted(lda_vector,key=lambda x:x[1],reverse=True)
156 | topic_prob = {}
157 | for instance in lda_vector:
158 | topic_prob[instance[0]] = instance[1]
159 | labels = []
160 | for topic_num in range(0,num_topics):
161 | if topic_num in topic_prob.keys():
162 | labels.append(topic_prob[topic_num])
163 | else:
164 | labels.append(0)
165 | list_name = list_name = i.split('/')
166 | TARGET_LABELS[list_name[len(list_name) -1 ].split('.xml')[0]] = labels
167 |
168 | # Save thi as json.
169 | json.dump(TARGET_LABELS, open(output_path,'w'))
170 |
171 |
172 | ### End : Generating text representation of wikipedia dataset for performing multi modal retrieval
173 |
174 | ### Start : Perform multi-modal retrieval on wikipedia dataset.
175 |
176 | for type_data in type_data_list:
177 | # Wikipedia data paths
178 | im_txt_pair_wd = open('../data/Wikipedia/'+str(type_data)+'set_txt_img_cat.list', 'r').readlines()
179 | image_files_wd = [i.split('\t')[1] + '.jpg' for i in im_txt_pair_wd]
180 |
181 | # Read the required Grount Truth for the task.
182 | GT_img2txt = {} # While retrieving text, you need image as key.
183 | GT_txt2img = {} # While retrieving image, you need text as key.
184 | for i in im_txt_pair_wd:
185 | GT_img2txt[i.split('\t')[1]] = (i.split('\t')[0], i.split('\t')[2]) # (Corresponding text, class)
186 | GT_txt2img[i.split('\t')[0]] = (i.split('\t')[1], i.split('\t')[2]) # (Corresponding image, class)
187 |
188 | # Load image representation
189 | image_rep = './generated_data/multi_modal_retrieval/image/' + str(layer) + '/' + str(num_topics) + '/' + str(type_data) + '/'
190 |
191 | # Load text representation
192 | data_text = json.load(open('./generated_data/multi_modal_retrieval/text/wd_txt_' + str(num_topics) + '_' + str(type_data) + '.json','r'))
193 |
194 | image_ttp = {}
195 | for i in GT_img2txt.keys():
196 | sample = i
197 | value = np.load(image_rep + i + '.jpg')
198 | image_ttp[sample] = value
199 |
200 | # Convert text_rep to numpy format
201 | text_ttp = {}
202 | for i in data_text.keys():
203 | text_ttp[i] = np.asarray(data_text[i], dtype='f')
204 | # IMPORTANT NOTE : always sort the images and text in lexi order !!
205 | # If Query type is input=image, output=text
206 | mAP = 0
207 | if query_type == 'img2txt':
208 | counter = 0
209 | order_of_images = sorted(image_ttp.keys())
210 | order_of_texts = sorted(text_ttp.keys())
211 | for given_image in order_of_images:
212 | print colored('Performing retrieval for document number : ' + str(counter), 'green')
213 | score_texts = []
214 | image_reps = image_ttp[given_image]
215 | for given_text in order_of_texts:
216 | text_reps = text_ttp[given_text]
217 | given_score = sp.entropy(text_reps, image_reps)
218 | score_texts.append((given_text, given_score))
219 | sorted_scores = sorted(score_texts, key=lambda x:x[1],reverse=False)
220 | mAP = mAP + get_AP_img2txt(sorted_scores, given_image, top_k=len(order_of_texts))
221 | counter += 1
222 | print colored('MAP img2txt : ' + str(float(mAP/len(image_ttp.keys()))), 'red')
223 | if query_type == 'txt2img' :
224 | counter = 0
225 | order_of_images = sorted(image_ttp.keys())
226 | order_of_texts = sorted(text_ttp.keys())
227 | for given_text in order_of_texts:
228 | print colored('Performing retrieval for document number : ' + str(counter), 'green')
229 | score_images = []
230 | text_reps = text_ttp[given_text]
231 | for given_image in order_of_images:
232 | image_reps = image_ttp[given_image]
233 | given_score = sp.entropy(text_reps, image_reps)
234 | score_images.append((given_image, given_score))
235 | sorted_scores = sorted(score_images, key=lambda x:x[1],reverse=False)
236 | mAP = mAP + get_AP_txt2img(sorted_scores, given_text, top_k=len(order_of_images))
237 | counter += 1
238 | print colored('MAP txt2img : ' + str(float(mAP/len(text_ttp.keys()))), 'red')
239 |
240 | ### End : Perform multi modal retrieval on wikipedia dataset
241 |
--------------------------------------------------------------------------------
/experiments/preprocess_text.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import re
3 | from nltk.stem import WordNetLemmatizer
4 | from nltk.tokenize import RegexpTokenizer
5 | from stop_words import get_stop_words
6 | from nltk.stem.porter import PorterStemmer
7 | import gensim
8 | from gensim import utils, corpora, models
9 | from gensim.corpora.wikicorpus import filter_wiki, remove_file
10 |
11 | filter_more = re.compile('(({\|)|(\|-)|(\|})|(\|)|(\!))(\s*\w+=((\".*?\")|([^ \t\n\r\f\v\|]+))\s*)+(({\|)|(\|-)|(\|})|(\|))?', re.UNICODE | re.DOTALL | re.MULTILINE)
12 | def preprocess_imageclef(raw):
13 | # Initialize Tokenizer
14 | tokenizer = RegexpTokenizer(r'\w+')
15 |
16 | # Initialize Lemmatizer
17 | lemma = WordNetLemmatizer()
18 |
19 | # create English stop words list
20 | en_stop = get_stop_words('en')
21 |
22 | # Decode Wiki Markup entities and remove markup
23 | text = filter_wiki(raw)
24 | text = re.sub(filter_more, '', text)
25 |
26 | # clean and tokenize document string
27 | text = text.lower()
28 | tokens = tokenizer.tokenize(text)
29 |
30 | # remove stop words from tokens
31 | tokens = [i for i in tokens if not i in en_stop]
32 |
33 | # stem tokens
34 | tokens = [lemma.lemmatize(i) for i in tokens]
35 |
36 | # remove non alphabetic characters
37 | tokens = [re.sub(r'[^a-z]', '', i) for i in tokens]
38 |
39 | # remove unigrams and bigrams
40 | tokens = [i for i in tokens if len(i)>2]
41 |
42 | return (tokens, text)
43 |
44 | def preprocess_wikidata(raw):
45 | # Initialize Tokenizer
46 | tokenizer = RegexpTokenizer(r'\w+')
47 |
48 | # Initialize Lemmatizer
49 | lemma = WordNetLemmatizer()
50 |
51 | # create English stop words list
52 | en_stop = get_stop_words('en')
53 |
54 | # Decode Wiki Markup entities and remove markup
55 | text = filter_wiki(raw)
56 | text = re.sub(filter_more, '', text)
57 |
58 | # clean and tokenize document string
59 | text = text.lower().split('../img/')[0]
60 | tokens = tokenizer.tokenize(text)
61 |
62 | # remove stop words from tokens
63 | tokens = [i for i in tokens if not i in en_stop]
64 |
65 | # stem tokens
66 | tokens = [lemma.lemmatize(i) for i in tokens]
67 |
68 | # remove non alphabetic characters
69 | tokens = [re.sub(r'[^a-z]', '', i) for i in tokens]
70 |
71 | # remove unigrams and bigrams
72 | tokens = [i for i in tokens if len(i)>2]
73 |
74 | return (tokens, text)
75 |
76 |
--------------------------------------------------------------------------------
/experiments/utils.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import re
3 | from nltk.stem import WordNetLemmatizer
4 | from nltk.tokenize import RegexpTokenizer
5 | from stop_words import get_stop_words
6 | from nltk.stem.porter import PorterStemmer
7 | import gensim
8 | from gensim import utils, corpora, models
9 | from gensim.corpora.wikicorpus import filter_wiki
10 |
11 | filter_more = re.compile('(({\|)|(\|-)|(\|})|(\|)|(\!))(\s*\w+=((\".*?\")|([^ \t\n\r\f\v\|]+))\s*)+(({\|)|(\|-)|(\|})|(\|))?', re.UNICODE | re.DOTALL | re.MULTILINE)
12 |
13 | def preprocess(raw):
14 | # Initialize Tokenizer
15 | tokenizer = RegexpTokenizer(r'\w+')
16 |
17 | # Initialize Lemmatizer
18 | lemma = WordNetLemmatizer()
19 |
20 | # create English stop words list
21 | en_stop = get_stop_words('en')
22 |
23 | # Decode Wiki Markup entities and remove markup
24 | text = filter_wiki(raw)
25 | text = re.sub(filter_more, '', text)
26 |
27 | # clean and tokenize document string
28 | text = text.lower()
29 | tokens = tokenizer.tokenize(text)
30 |
31 | # remove stop words from tokens
32 | tokens = [i for i in tokens if not i in en_stop]
33 |
34 | # stem tokens
35 | tokens = [lemma.lemmatize(i) for i in tokens]
36 |
37 | # remove non alphabetic characters
38 | tokens = [re.sub(r'[^a-z]', '', i) for i in tokens]
39 |
40 | # remove unigrams and bigrams
41 | tokens = [i for i in tokens if len(i)>2]
42 |
43 | return tokens
44 |
--------------------------------------------------------------------------------
/experiments/voc_2007_classification.py:
--------------------------------------------------------------------------------
1 | import sys,os
2 | import random
3 | import json
4 | import numpy as np
5 | import caffe
6 | from termcolor import colored
7 | from PIL import Image
8 | import matplotlib.pyplot as plt
9 | from sklearn import svm
10 | from sklearn.externals import joblib
11 | from sklearn.metrics import average_precision_score
12 | from sklearn import preprocessing
13 |
14 | ### Start : Extract the representation from specified layer and save in generated_data direcroty ###
15 | if len(sys.argv)<2:
16 | print colored('You must provide the layer from wich to extract features. e.g. fc7, fc6, pool5, ...', 'red')
17 | quit()
18 |
19 | layer = sys.argv[1]
20 |
21 | caffe.set_mode_gpu()
22 |
23 | # Specify paths to model prototxt and model weights
24 | model_def = '../CNN/CaffeNet/deploy.prototxt'
25 | model_weights = '../CNN/CaffeNet/TextTopicNet_Wikipedia_ImageCLEF_40Topics.caffemodel'
26 | print colored('Model weights are loaded from : ' + model_weights, 'green')
27 |
28 | # Initialize caffe model instnce with given weights and model prototxt
29 | net = caffe.Net(model_def, # defines the structure of the model\n",
30 | model_weights, # contains the trained weights\n",
31 | caffe.TEST) # use test mode (e.g., don't perform dropout)"
32 |
33 | IMG_SIZE = 256
34 | MODEL_INPUT_SIZE = 227
35 | MEAN = np.array([104.00698793, 116.66876762, 122.67891434])
36 |
37 | # Specify path to directory containing PASCAL VOC2007 images
38 | img_root = '/media/DADES/datasets/VOC2007/VOCdevkit/VOC2007/JPEGImages/'
39 | out_root = './generated_data/voc_2007_classification/features_'+layer+'/'
40 | if not os.path.exists(out_root):
41 | os.makedirs(out_root)
42 |
43 | # Get list of all file (image) names for VOC2007
44 | onlyfiles = [f for f in os.listdir(img_root) if os.path.isfile(os.path.join(img_root, f))]
45 |
46 | print colored('Starting image representation generation', 'green')
47 | # For given layer and each given input image, generate corresponding representation
48 | for sample in onlyfiles:
49 | im_filename = img_root+sample
50 | print im_filename
51 |
52 | im = Image.open(im_filename)
53 | im = im.resize((IMG_SIZE,IMG_SIZE)) # resize to IMG_SIZExIMG_SIZE
54 | im = im.crop((14,14,241,241)) # central crop of 227x227
55 | if len(np.array(im).shape) < 3:
56 | im = im.convert('RGB') # grayscale to RGB
57 | in_ = np.array(im, dtype=np.float32)
58 | in_ = in_[:,:,::-1] # switch channels RGB -> BGR
59 | in_ -= MEAN # subtract mean
60 | in_ = in_.transpose((2,0,1)) # transpose to channel x height x width order
61 |
62 | net.blobs['data'].data[...] = in_[np.newaxis,:,:,:]
63 | output = net.forward()
64 | output_prob = net.blobs[layer].data[0] # the output feature vector for the first image in the batch
65 | f = open(out_root+sample, 'w+')
66 | np.save(f, output_prob)
67 | f.close()
68 |
69 | print colored('Completed image representation generation.', 'green')
70 | ### End : Generating image representations for all images ###
71 |
72 | ### Start : Learn one vs all SVMs for each target class ###
73 | features_root = out_root
74 | svm_out_path = './generated_data/voc_2007_classification/'+ layer + '_SVM'
75 | if not os.path.exists(svm_out_path):
76 | os.makedirs(svm_out_path)
77 | classes = ['aeroplane','bicycle','bird','boat','bottle','bus','car','cat','chair','cow','diningtable','dog','horse','motorbike','person','pottedplant','sheep','sofa','train','tvmonitor'] # List of classes in PASCAL VOC2007
78 | cs = [13,14,15,16,17,18] # List of margins for SVM
79 |
80 | # Specify ground truth paths for PASCAL VOC2007 dataset
81 | gt_root = '/home/yash/LDA_wikipedia_imageCLEF/wiki_large/TextTopicNet/nets/CaffeNet/SVMs/VOC2007/GT_labels/'
82 | gt_train_sufix = '_train.txt'
83 | gt_val_sufix = '_val.txt'
84 | gt_test_sufix = '_test.txt'
85 |
86 | mAP2 = 0
87 |
88 | for cl in classes:
89 |
90 | print colored("Do grid search for class "+cl, 'green')
91 | with open(gt_root+cl+gt_train_sufix) as f:
92 | content = f.readlines()
93 | aux = np.load(features_root+content[0].split(' ')[0]+'.jpg')
94 | X = np.zeros((len(content),(aux.flatten()).shape[0]), dtype=np.float32)
95 | y = np.zeros(len(content))
96 | idx = 0
97 | for sample in content:
98 | data = sample.split(' ')
99 | if data[1] == '': data[1] = '1'
100 | X[idx,:] = np.load(features_root+data[0]+'.jpg').flatten()
101 | y[idx] = max(0,int(data[1]))
102 | idx = idx+1
103 |
104 | with open(gt_root+cl+gt_val_sufix) as f:
105 | content = f.readlines()
106 | XX = np.zeros((len(content),(aux.flatten()).shape[0]), dtype=np.float32)
107 | yy = np.zeros(len(content))
108 | idx = 0
109 | for sample in content:
110 | data = sample.split(' ')
111 | if data[1] == '': data[1] = '1'
112 | XX[idx,:] = np.load(features_root+data[0]+'.jpg').flatten()
113 | yy[idx] = max(0,int(data[1]))
114 | idx = idx+1
115 |
116 | bestAP=0
117 | bestC=-1
118 |
119 | scaler = preprocessing.StandardScaler().fit(X)
120 | joblib.dump(scaler, './generated_data/voc_2007_classification/features_'+layer+'/scaler.pkl')
121 | X_scaled = scaler.transform(X)
122 | XX_scaled = scaler.transform(XX)
123 |
124 | for c in cs:
125 | clf = svm.LinearSVC(C=pow(0.5,c))
126 | clf.fit(X_scaled, y)
127 | #yy_ = clf.predict(XX)
128 | yy_ = clf.decision_function(XX_scaled)
129 | AP = average_precision_score(yy, yy_)
130 | if AP > bestAP:
131 | bestAP = AP
132 | bestC=pow(0.5,c)
133 | print " Best validation AP :"+str(bestAP)+" found for C="+str(bestC)
134 | mAP2=mAP2+bestAP
135 | X_all = np.concatenate((X, XX), axis=0)
136 | scaler = preprocessing.StandardScaler().fit(X_all)
137 | X_all = scaler.transform(X_all)
138 | joblib.dump(scaler, './generated_data/voc_2007_classification/features_'+layer+'/scaler.pkl')
139 | print X.shape, XX.shape, X_all.shape
140 | y_all = np.concatenate((y, yy))
141 | clf = svm.LinearSVC(C=bestC)
142 | clf.fit(X_all, y_all)
143 | joblib.dump(clf, svm_out_path + '/clf_'+cl+'_'+layer+'.pkl')
144 | print " ... model saved as "+svm_out_path+'/clf_'+cl+'_'+layer+'.pkl'
145 |
146 | print "\nValidation mAP: "+str(mAP2/float(len(classes)))+" (this is an underestimate, you must run VOC_eval.m for mAP taking into account don't care objects)"
147 |
148 | ### End : Learn one vs all SVMs for PASCAL VOC 2007 ###
149 |
150 | ### Start : Testing of learned SVMs ###
151 | res_root = './generated_data/voc_2007_classification/'+layer+'_SVM/RES_labels/'
152 | if not os.path.exists(res_root):
153 | os.makedirs(res_root)
154 |
155 | mAP2=0
156 |
157 | for cl in classes:
158 |
159 | with open(gt_root+cl+gt_test_sufix) as f:
160 | content = f.readlines()
161 | print "Testing one vs. rest SVC for class "+cl+" for "+str(len(content))+" test samples"
162 | aux = np.load(features_root+content[0].split(' ')[0]+'.jpg')
163 | X = np.zeros((len(content),(aux.flatten()).shape[0]), dtype=np.float32)
164 | y = np.zeros(len(content))
165 | idx = 0
166 | for sample in content:
167 | data = sample.split(' ')
168 | if data[1] == '': data[1] = '1'
169 | X[idx,:] = np.load(features_root+data[0]+'.jpg').flatten()
170 | y[idx] = max(0,int(data[1]))
171 | idx = idx+1
172 |
173 | print " ... loading model from "+svm_out_path+'clf_'+cl+'_'+layer+'.pkl'
174 | clf = joblib.load(svm_out_path+'/clf_'+cl+'_'+layer+'.pkl')
175 | scaler = joblib.load('./generated_data/voc_2007_classification/features_'+layer+'/scaler.pkl')
176 | X = scaler.transform(X)
177 |
178 | # y_ = clf.predict(X)
179 | y_ = clf.decision_function(X)
180 | AP = average_precision_score(y, y_)
181 | print " ... Test AP: "+str(AP)
182 | mAP2 = mAP2+AP
183 |
184 | fr = open(res_root+'RES_cls_test_'+cl+'.txt','w+')
185 | idx = 0
186 | for sample in content:
187 | data = sample.split(' ')
188 | fr.write(str(data[0])+' '+str(y_[idx])+'\n')
189 | idx = idx+1
190 | fr.close()
191 |
192 | print colored("\nTest mAP: "+str(mAP2/float(len(classes)))+" (this is an underestimate, you must run VOC_eval.m for mAP taking into account don't care objects)", 'green')
193 | ### End : Testing of learned SVMs ###
194 |
--------------------------------------------------------------------------------
/texttopicnet.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lluisgomez/TextTopicNet/37a8341924e91331320085631c45d75fc9b4611d/texttopicnet.png
--------------------------------------------------------------------------------
/wikipedia_data/README.md:
--------------------------------------------------------------------------------
1 | **TextTopicNet - Self-Supervised Learning of Visual Features Through Embedding Images on Semantic Text Spaces**
2 |
3 | Y. Patel, L. Gomez, R. Gomez, M. Rusiñol, D. Karatzas, C.V. Jawahar.
4 | ```
5 | @article{patel2018texttopicnet,
6 | title={TextTopicNet-Self-Supervised Learning of Visual Features Through Embedding Images on Semantic Text Spaces},
7 | author={Patel, Yash and Gomez, Lluis and Gomez, Raul and Rusi{\~n}ol, Mar{\c{c}}al and Karatzas, Dimosthenis and Jawahar, CV},
8 | journal={arXiv preprint arXiv:1807.02110},
9 | year={2018}
10 | }
11 | ```
12 | ## Training Dataset
13 |
14 | Download the image-text article co-occurring dataset from [here](http://datasets.cvc.uab.es/rrc/wikipedia_data/)
15 |
16 | Make sure to download the following:
17 | * All the images [here](http://datasets.cvc.uab.es/rrc/wikipedia_data/images/)
18 | * Wikipedia text corpus dump [here](http://datasets.cvc.uab.es/rrc/wikipedia_data/text_dump/)
19 | * List of images [here](http://datasets.cvc.uab.es/rrc/wikipedia_data/list_of_images.json)
20 |
21 | ## Code Snippets
22 | Following are some utility functions to access the data:
23 | * The wikipedia text corpus dump associates each article with an unique id. To obtain the set of text documents used for training in our paper (text articles only with atleast 50 words), run the following command:
24 | ``python get_all_docs.py ``
25 | * To obtain the list of images for a given text article number, run the following command:
26 | ``python get_images.py ``
27 |
28 | ``get_images.py`` also provides a snippet to generate a python dictionary with key as text article number and value as list of co-occuring images.
29 |
--------------------------------------------------------------------------------
/wikipedia_data/get_all_docs.py:
--------------------------------------------------------------------------------
1 | ### git @ yash0307 ###
2 |
3 | import logging
4 | import os.path
5 | import sys
6 | import gensim
7 | import bz2
8 | import json
9 | import sys,re
10 | from nltk.tokenize import RegexpTokenizer
11 | from stop_words import get_stop_words
12 | from nltk.stem.porter import PorterStemmer
13 | from gensim import utils, corpora, models
14 | from gensim.corpora.wikicorpus import remove_markup, process_article, remove_template, filter_wiki
15 | import logging
16 | import re
17 | from PIL import Image
18 |
19 | if len(sys.argv) < 2:
20 | print('Please specify the following (in same order):')
21 | print('\t path to list of all images (list_of_images.json)')
22 | print('An example of overall command is: python get_image.py /home/yash/list_of_images.json')
23 | sys.exit(1)
24 |
25 | list_of_images = json.load(open(sys.argv[1], 'r'))
26 |
27 | # Obtain directory of image paths with article number as key
28 | def all_articles(list_of_images):
29 | list_articles = []
30 | for given_im in list_of_images:
31 | im_article_num = given_im.split('/')[2].split('#')[0]
32 | list_articles.append(im_article_num)
33 | return list_articles
34 |
35 | # Get a list of all articles used in training
36 | list_articles = list(set(all_articles(list_of_images)))
37 |
38 | # Dump the list of all articles
39 | json.dump(list_articles, open('list_all_articles.json', 'w'))
40 |
--------------------------------------------------------------------------------
/wikipedia_data/get_images.py:
--------------------------------------------------------------------------------
1 | ### git @ yash0307 ###
2 |
3 | import logging
4 | import os.path
5 | import sys
6 | import gensim
7 | import bz2
8 | import json
9 | import sys,re
10 | from nltk.tokenize import RegexpTokenizer
11 | from stop_words import get_stop_words
12 | from nltk.stem.porter import PorterStemmer
13 | from gensim import utils, corpora, models
14 | from gensim.corpora.wikicorpus import remove_markup, process_article, remove_template, filter_wiki
15 | import logging
16 | import re
17 | from PIL import Image
18 |
19 | if len(sys.argv) < 4:
20 | print('Please specify the following (in same order):')
21 | print('\t path to list of all images (list_of_images.json)')
22 | print('\t path of root directory where images are located')
23 | print('\t article number in wikipedia corpus for which images are required')
24 | print('An example of overall command is: python get_image.py /home/yash/list_of_images.json /media/DADES/yash/ 25')
25 | sys.exit(1)
26 |
27 | list_of_images = json.load(open(sys.argv[1], 'r'))
28 | root_dir = sys.argv[2]
29 | text_article = sys.argv[3]
30 |
31 | # Obtain directory of image paths with article number as key
32 | def article_im_dir(list_of_images, root_dir):
33 | article_im_dir = {}
34 | for given_im in list_of_images:
35 | im_article_num = given_im.split('/')[2].split('#')[0]
36 | try:
37 | article_im_dir[im_article_num].append(root_dir+given_im)
38 | except KeyError:
39 | article_im_dir[im_article_num] = [root_dir+given_im]
40 | return article_im_dir
41 |
42 | # Get a dictionary of all key = article number, value = image path
43 | article_im_dir = article_im_dir(list_of_images, root_dir)
44 |
45 | try:
46 | print(article_im_dir[text_article])
47 | for given_im in article_im_dir[text_article]:
48 | im = Image.open(given_im)
49 | print('Successfully loaded: ' + str(given_im))
50 | except KeyError:
51 | print('No images found for given article number. This can be because the article number corresponds to a meta-file or article length is less than 50 words.')
52 |
--------------------------------------------------------------------------------