├── .DS_Store
├── .idea
├── encodings.xml
├── keras-yolo3-master.iml
├── misc.xml
├── modules.xml
└── workspace.xml
├── 1.py
├── 3.py
├── 4.py
├── LICENSE
├── README.md
├── convert.py
├── font
├── .DS_Store
└── font.ttf
├── kmeans.py
├── logs
├── .DS_Store
└── 000
│ └── .DS_Store
├── model_data
├── .DS_Store
├── 1.png
├── 2.png
├── 3.png
├── label.txt
├── my_anchors.txt
└── my_classes.txt
├── result
└── result.csv
├── sample
├── .DS_Store
├── 0fbf0dd31f2c1747f245e12c6486612c.jpeg
└── cctvnew3.jpg
├── train.py
├── xml_to_data.py
├── yolo.py
├── yolo3
├── .DS_Store
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-36.pyc
│ ├── __init__.cpython-37.pyc
│ ├── model.cpython-36.pyc
│ ├── model.cpython-37.pyc
│ ├── utils.cpython-36.pyc
│ └── utils.cpython-37.pyc
├── model.py
└── utils.py
└── yolo_images.py
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/.DS_Store
--------------------------------------------------------------------------------
/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/.idea/keras-yolo3-master.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
46 |
47 |
48 |
49 | ,39
50 | 0
51 | val_split
52 | yolo
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 | 1556275283262
267 |
268 |
269 | 1556275283262
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 |
379 |
380 |
381 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 |
391 |
392 |
393 |
394 |
395 |
396 |
397 |
398 |
399 |
400 |
401 |
402 |
403 |
404 |
405 |
406 |
407 |
408 |
409 |
410 |
411 |
412 |
413 |
414 |
415 |
416 |
417 |
418 |
419 |
420 |
421 |
422 |
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 |
431 |
432 |
433 |
434 |
435 |
436 |
437 |
438 |
439 |
440 |
441 |
442 |
443 |
444 |
445 |
446 |
447 |
448 |
449 |
450 |
451 |
452 |
453 |
454 |
455 |
456 |
457 |
458 |
459 |
460 |
461 |
462 |
463 |
464 |
465 |
466 |
467 |
468 |
469 |
470 |
471 |
472 |
473 |
474 |
475 |
476 |
477 |
478 |
479 |
480 |
481 |
482 |
483 |
484 |
485 |
486 |
487 |
488 |
489 |
490 |
491 |
--------------------------------------------------------------------------------
/1.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import numpy as np
3 | import os, cv2
4 |
5 | LABELS = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19",
6 | "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36",
7 | "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50"]
8 | train_image_folder = "model_data/train/"
9 | train_annot_folder = "model_data/label_train/"
10 |
11 | import xml.etree.ElementTree as ET
12 |
13 |
14 | def parse_annotation(ann_dir, img_dir, labels=[]):
15 | '''
16 | output:
17 | - Each element of the train_image is a dictionary containing the annoation infomation of an image.
18 | - seen_train_labels is the dictionary containing
19 | (key, value) = (the object class, the number of objects found in the images)
20 | '''
21 | all_imgs = []
22 | seen_labels = {}
23 |
24 | for ann in sorted(os.listdir(ann_dir)):
25 | if "xml" not in ann:
26 | continue
27 | img = {'object': []}
28 |
29 | tree = ET.parse(ann_dir + ann)
30 |
31 | for elem in tree.iter():
32 | if 'filename' in elem.tag:
33 | path_to_image = img_dir + elem.text
34 | img['filename'] = path_to_image
35 | ## make sure that the image exists:
36 | if not os.path.exists(path_to_image):
37 | assert False, "file does not exist!\n{}".format(path_to_image)
38 | if 'width' in elem.tag:
39 | img['width'] = int(elem.text)
40 | if 'height' in elem.tag:
41 | img['height'] = int(elem.text)
42 | if 'object' in elem.tag or 'part' in elem.tag:
43 | obj = {}
44 |
45 | for attr in list(elem):
46 | if 'name' in attr.tag:
47 |
48 | obj['name'] = attr.text
49 |
50 | if len(labels) > 0 and obj['name'] not in labels:
51 | break
52 | else:
53 | img['object'] += [obj]
54 |
55 | if obj['name'] in seen_labels:
56 | seen_labels[obj['name']] += 1
57 | else:
58 | seen_labels[obj['name']] = 1
59 |
60 | if 'bndbox' in attr.tag:
61 | for dim in list(attr):
62 | if 'xmin' in dim.tag:
63 | obj['xmin'] = int(round(float(dim.text)))
64 | if 'ymin' in dim.tag:
65 | obj['ymin'] = int(round(float(dim.text)))
66 | if 'xmax' in dim.tag:
67 | obj['xmax'] = int(round(float(dim.text)))
68 | if 'ymax' in dim.tag:
69 | obj['ymax'] = int(round(float(dim.text)))
70 |
71 | if len(img['object']) > 0:
72 | all_imgs += [img]
73 |
74 | return all_imgs, seen_labels
75 |
76 |
77 | ## Parse annotations
78 | train_image, seen_train_labels = parse_annotation(train_annot_folder, train_image_folder, labels=LABELS)
79 | print("N train = {}".format(len(train_image)))
80 |
81 | print(train_image[:2])
82 | y_pos = np.arange(len(seen_train_labels))
83 | fig = plt.figure(figsize=(13, 10))
84 | ax = fig.add_subplot(1, 1, 1)
85 | ax.barh(y_pos, list(seen_train_labels.values()))
86 | ax.set_yticks(y_pos)
87 | ax.set_yticklabels(list(seen_train_labels.keys()))
88 | ax.set_title("The total number of objects = {} in {} images".format(
89 | np.sum(list(seen_train_labels.values())), len(train_image)
90 | ))
91 | plt.show()
92 |
93 | wh = []
94 | for anno in train_image:
95 | aw = float(anno['width']) # width of the original image
96 | ah = float(anno['height']) # height of the original image
97 | for obj in anno["object"]:
98 | w = (obj["xmax"] - obj["xmin"]) / aw # make the width range between [0,GRID_W)
99 | h = (obj["ymax"] - obj["ymin"]) / ah # make the width range between [0,GRID_H)
100 | temp = [w, h]
101 | wh.append(temp)
102 | wh = np.array(wh)
103 | print("clustering feature data is ready. shape = (N object, width and height) = {}".format(wh.shape))
104 |
105 | plt.figure(figsize=(10, 10))
106 | plt.scatter(wh[:, 0], wh[:, 1], alpha=0.3)
107 | plt.title("Clusters", fontsize=20)
108 | plt.xlabel("normalized width", fontsize=20)
109 | plt.ylabel("normalized height", fontsize=20)
110 | plt.show()
111 |
112 |
113 | def iou(box, clusters):
114 | '''
115 | :param box: np.array of shape (2,) containing w and h
116 | :param clusters: np.array of shape (N cluster, 2)
117 | '''
118 | x = np.minimum(clusters[:, 0], box[0])
119 | y = np.minimum(clusters[:, 1], box[1])
120 |
121 | intersection = x * y
122 | box_area = box[0] * box[1]
123 | cluster_area = clusters[:, 0] * clusters[:, 1]
124 |
125 | iou_ = intersection / (box_area + cluster_area - intersection)
126 |
127 | return iou_
128 |
129 |
130 | def kmeans(boxes, k, dist=np.median, seed=1):
131 | """
132 | Calculates k-means clustering with the Intersection over Union (IoU) metric.
133 | :param boxes: numpy array of shape (r, 2), where r is the number of rows
134 | :param k: number of clusters
135 | :param dist: distance function
136 | :return: numpy array of shape (k, 2)
137 | """
138 | rows = boxes.shape[0]
139 |
140 | distances = np.empty((rows, k)) ## N row x N cluster
141 | last_clusters = np.zeros((rows,))
142 |
143 | np.random.seed(seed)
144 |
145 | # initialize the cluster centers to be k items
146 | clusters = boxes[np.random.choice(rows, k, replace=False)]
147 |
148 | while True:
149 | # Step 1: allocate each item to the closest cluster centers
150 | for icluster in range(k): # I made change to lars76's code here to make the code faster
151 | distances[:, icluster] = 1 - iou(clusters[icluster], boxes)
152 |
153 | nearest_clusters = np.argmin(distances, axis=1)
154 |
155 | if (last_clusters == nearest_clusters).all():
156 | break
157 |
158 | # Step 2: calculate the cluster centers as mean (or median) of all the cases in the clusters.
159 | for cluster in range(k):
160 | clusters[cluster] = dist(boxes[nearest_clusters == cluster], axis=0)
161 |
162 | last_clusters = nearest_clusters
163 |
164 | return clusters, nearest_clusters, distances
165 |
166 |
167 | kmax = 10
168 | dist = np.mean
169 | results = {}
170 |
171 | for k in range(2, kmax):
172 | clusters, nearest_clusters, distances = kmeans(wh, k, seed=2, dist=dist)
173 | WithinClusterMeanDist = np.mean(distances[np.arange(distances.shape[0]), nearest_clusters])
174 | result = {"clusters": clusters,
175 | "nearest_clusters": nearest_clusters,
176 | "distances": distances,
177 | "WithinClusterMeanDist": WithinClusterMeanDist}
178 | print("{:2.0f} clusters: mean IoU = {:5.4f}".format(k, 1 - result["WithinClusterMeanDist"]))
179 | results[k] = result
180 |
181 |
182 | def plot_cluster_result(plt, clusters, nearest_clusters, WithinClusterSumDist, wh, k):
183 | for icluster in np.unique(nearest_clusters):
184 | pick = nearest_clusters == icluster
185 | c = current_palette[icluster]
186 | plt.rc('font', size=8)
187 | plt.plot(wh[pick, 0], wh[pick, 1], "p",
188 | color=c,
189 | alpha=0.5, label="cluster = {}, N = {:6.0f}".format(icluster, np.sum(pick)))
190 | plt.text(clusters[icluster, 0],
191 | clusters[icluster, 1],
192 | "c{}".format(icluster),
193 | fontsize=20, color="red")
194 | plt.title("Clusters=%d" % k)
195 | plt.xlabel("width")
196 | plt.ylabel("height")
197 | plt.legend(title="Mean IoU = {:5.4f}".format(WithinClusterSumDist))
198 |
199 |
200 | import seaborn as sns
201 |
202 | current_palette = list(sns.xkcd_rgb.values())
203 |
204 | figsize = (15, 35)
205 | count = 1
206 | fig = plt.figure(figsize=figsize)
207 | for k in range(5, 9):
208 | result = results[k]
209 | clusters = result["clusters"]
210 | nearest_clusters = result["nearest_clusters"]
211 | WithinClusterSumDist = result["WithinClusterMeanDist"]
212 |
213 | ax = fig.add_subplot(kmax / 2, 2, count)
214 | plot_cluster_result(plt, clusters, nearest_clusters, 1 - WithinClusterSumDist, wh, k)
215 | count += 1
216 | plt.show()
217 |
--------------------------------------------------------------------------------
/3.py:
--------------------------------------------------------------------------------
1 | #获取文件夹中的文件路径
2 | import os
3 | def getFilePathList(dirPath, partOfFileName=''):
4 | allFileName_list = list(os.walk(dirPath))[0][2]
5 | fileName_list = [k for k in allFileName_list if partOfFileName in k]
6 | filePath_list = [os.path.join(dirPath, k) for k in fileName_list]
7 | return filePath_list
8 |
9 | #此段代码检查标记好的文件夹是否有图片漏标
10 | def check_1(dirPath):
11 | jpgFilePath_list = getFilePathList(dirPath, '.jpg')
12 | allFileMarked = True
13 | for jpgFilePath in jpgFilePath_list:
14 | xmlFilePath = jpgFilePath[:-4] + '.xml'
15 | if not os.path.exists(xmlFilePath):
16 | print('%s this picture is not marked.' %jpgFilePath)
17 | allFileMarked = False
18 | if allFileMarked:
19 | print('congratulation! it is been verified that all jpg file are marked.')
20 |
21 | #此段代码检查标记的xml文件中是否有物体标记类别拼写错误
22 | import xml.etree.ElementTree as ET
23 | def check_2(dirPath, className_list):
24 | className_set = set(className_list)
25 | xmlFilePath_list = getFilePathList(dirPath, '.xml')
26 | allFileCorrect = True
27 | for xmlFilePath in xmlFilePath_list:
28 | with open(xmlFilePath) as file:
29 | fileContent = file.read()
30 | root = ET.XML(fileContent)
31 | object_list = root.findall('object')
32 | for object_item in object_list:
33 | name = object_item.find('name')
34 | className = name.text
35 | if className not in className_set:
36 | print('%s this xml file has wrong class name "%s" ' %(xmlFilePath, className))
37 | allFileCorrect = False
38 | if allFileCorrect:
39 | print('congratulation! it is been verified that all xml file are correct.')
40 |
41 | if __name__ == '__main__':
42 | dirPath = '/Users/zx/Desktop/game/train/'
43 | className_list = []
44 | for i in range(51):
45 | className_list.append(str(i))
46 | check_1(dirPath)
47 | check_2(dirPath, className_list)
--------------------------------------------------------------------------------
/4.py:
--------------------------------------------------------------------------------
1 | # 获取文件夹中的文件路径
2 | import os
3 |
4 |
5 | def getFilePathList(dirPath, partOfFileName=''):
6 | allFileName_list = list(os.walk(dirPath))[0][2]
7 | fileName_list = [k for k in allFileName_list if partOfFileName in k]
8 | filePath_list = [os.path.join(dirPath, k) for k in fileName_list]
9 | return filePath_list
10 |
11 |
12 | # 生成新的xml文件
13 | import xml.etree.ElementTree as ET
14 |
15 |
16 | def generateNewXmlFile(old_xmlFilePath, new_xmlFilePath, new_size):
17 | new_width, new_height = new_size
18 | with open(old_xmlFilePath) as file:
19 | fileContent = file.read()
20 | root = ET.XML(fileContent)
21 | # 获得图片宽度变化倍数,并改变xml文件中width节点的值
22 | width = root.find('size').find('width')
23 | old_width = int(width.text)
24 | try:
25 | width_times = new_width / old_width
26 |
27 | width.text = str(new_width)
28 | # 获得图片高度变化倍数,并改变xml文件中height节点的值
29 | height = root.find('size').find('height')
30 | old_height = int(height.text)
31 | height_times = new_height / old_height
32 | height.text = str(new_height)
33 | # 获取标记物体的列表,修改其中xmin,ymin,xmax,ymax这4个节点的值
34 | object_list = root.findall('object')
35 | for object_item in object_list:
36 | bndbox = object_item.find('bndbox')
37 | xmin = bndbox.find('xmin')
38 | xminValue = int(xmin.text)
39 | xmin.text = str(int(xminValue * width_times))
40 | ymin = bndbox.find('ymin')
41 | yminValue = int(ymin.text)
42 | ymin.text = str(int(yminValue * height_times))
43 | xmax = bndbox.find('xmax')
44 | xmaxValue = int(xmax.text)
45 | xmax.text = str(int(xmaxValue * width_times))
46 | ymax = bndbox.find('ymax')
47 | ymaxValue = int(ymax.text)
48 | ymax.text = str(int(ymaxValue * height_times))
49 | tree = ET.ElementTree(root)
50 | tree.write(new_xmlFilePath)
51 | except Exception as e:
52 | print("错误:"+old_xmlFilePath)
53 |
54 |
55 | # 修改文件夹中的若干xml文件
56 | def batch_modify_xml(old_dirPath, new_dirPath, new_size):
57 | xmlFilePath_list = getFilePathList(old_dirPath, '.xml')
58 | for xmlFilePath in xmlFilePath_list:
59 | xmlFileName = os.path.split(xmlFilePath)[1]
60 | new_xmlFilePath = os.path.join(new_dirPath, xmlFileName)
61 | generateNewXmlFile(xmlFilePath, new_xmlFilePath, new_size)
62 |
63 |
64 | # 生成新的jpg文件
65 | from PIL import Image
66 |
67 |
68 | def generateNewJpgFile(old_jpgFilePath, new_jpgFilePath, new_size):
69 | old_image = Image.open(old_jpgFilePath)
70 | new_image = old_image.resize(new_size, Image.ANTIALIAS)
71 | try:
72 | new_image.save(new_jpgFilePath)
73 | except Exception as e:
74 | captcha=new_image.convert('RGB')
75 | captcha.save(new_jpgFilePath)
76 |
77 |
78 |
79 |
80 | # 修改文件夹中的若干jpg文件
81 | def batch_modify_jpg(old_dirPath, new_dirPath, new_size):
82 | if not os.path.isdir(new_dirPath):
83 | os.makedirs(new_dirPath)
84 | xmlFilePath_list = getFilePathList(old_dirPath, '.xml')
85 | for xmlFilePath in xmlFilePath_list:
86 | old_jpgFilePath = xmlFilePath[:-4] + '.jpg'
87 | jpgFileName = os.path.split(old_jpgFilePath)[1]
88 | new_jpgFilePath = os.path.join(new_dirPath, jpgFileName)
89 | generateNewJpgFile(old_jpgFilePath, new_jpgFilePath, new_size)
90 |
91 |
92 | if __name__ == '__main__':
93 | old_dirPath = '/Users/zx/Desktop/game/train/'
94 | new_width = 416
95 | new_height = 416
96 | new_size = (new_width, new_height)
97 | new_dirPath = 'ximages_%sx%s' % (str(new_width), str(new_height))
98 | batch_modify_jpg(old_dirPath, new_dirPath, new_size)
99 | batch_modify_xml(old_dirPath, new_dirPath, new_size)
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 ZzzzzZXxxX
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ### 部分实验效果(图片来着百度图片爬取)
2 | 
3 | 
4 | ##### 本项目里有30类logo
5 | 
6 | ## 快速开始
7 | #### 1. 下载本项目预训练 权重
8 | - 权重1
9 | ```
10 | 链接:https://pan.baidu.com/s/1sanx0wELCMmektdHNHxkhQ 密码:6rzz
11 | ```
12 | - 权重2
13 | ```
14 | 链接:https://pan.baidu.com/s/1N9cTopyEcB-sqdw-FLs4Rw 密码:5cij
15 | ```
16 | #### 2. 修改yolo.py中第24行权重路径
17 | #### 3. 将需要检测图片放入sample文件夹中
18 | #### 4. 运行检测
19 | ```
20 | python yolo_images.py
21 | ```
22 | ## 训练
23 | #### 训练自己的数据 无需使用 预训练的权重 (此方法适用于各类数据)
24 | #### step 1
25 | - 使用labelImg对数据进行标记
26 | - 得到xml文件,放置于./model_data/label_train/将图片数据放在于./model_data/train/
27 | (建议图片宽高大于416,不然影响训练)
28 | - 将数据类别写入my_classes.txt中(本项目中name_classes.txt为自定义文件,因为数据标记时,标记的为类别id,为了方便检测时直接输出类别,自己数据预测时将yolo.py中的classes_path修改为自己的)
29 | #### step 2
30 | - 执行xml_to_data.py 生成 kitti_simple_label.txt
31 | ```
32 | python xml_to_data.py
33 | ```
34 | #### step 3
35 | - k-means 聚类算法生成对应自己样本的 anchor box 尺寸 生成 my_anchors.txt
36 | ```
37 | python kmeans.py
38 | ```
39 | #### step 4
40 | - 开始训练(建议epochs大于500,如果内存溢出可减小batch_size。其他参数,按照自己数据,自行修改。)
41 | ```
42 | python train.py
43 | ```
44 |
45 |
46 | ## 项目环境依赖
47 | ```
48 | tensorflow 1.13.1
49 | Keras 2.2.4
50 | h5py 2.8.0
51 | opencv-python 4.0.0.21
52 | numpy 1.16.2
53 | Anaconda3
54 | ```
55 |
--------------------------------------------------------------------------------
/convert.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | """
3 | Reads Darknet config and weights and creates Keras model with TF backend.
4 |
5 | """
6 |
7 | import argparse
8 | import configparser
9 | import io
10 | import os
11 | from collections import defaultdict
12 |
13 | import numpy as np
14 | from keras import backend as K
15 | from keras.layers import (Conv2D, Input, ZeroPadding2D, Add,
16 | UpSampling2D, MaxPooling2D, Concatenate)
17 | from keras.layers.advanced_activations import LeakyReLU
18 | from keras.layers.normalization import BatchNormalization
19 | from keras.models import Model
20 | from keras.regularizers import l2
21 | from keras.utils.vis_utils import plot_model as plot
22 |
23 |
24 | parser = argparse.ArgumentParser(description='Darknet To Keras Converter.')
25 | parser.add_argument('config_path', help='Path to Darknet cfg file.')
26 | parser.add_argument('weights_path', help='Path to Darknet weights file.')
27 | parser.add_argument('output_path', help='Path to output Keras model file.')
28 | parser.add_argument(
29 | '-p',
30 | '--plot_model',
31 | help='Plot generated Keras model and save as image.',
32 | action='store_true')
33 | parser.add_argument(
34 | '-w',
35 | '--weights_only',
36 | help='Save as Keras weights file instead of model file.',
37 | action='store_true')
38 |
39 | def unique_config_sections(config_file):
40 | """Convert all config sections to have unique names.
41 |
42 | Adds unique suffixes to config sections for compability with configparser.
43 | """
44 | section_counters = defaultdict(int)
45 | output_stream = io.StringIO()
46 | with open(config_file) as fin:
47 | for line in fin:
48 | if line.startswith('['):
49 | section = line.strip().strip('[]')
50 | _section = section + '_' + str(section_counters[section])
51 | section_counters[section] += 1
52 | line = line.replace(section, _section)
53 | output_stream.write(line)
54 | output_stream.seek(0)
55 | return output_stream
56 |
57 | # %%
58 | def _main(args):
59 | config_path = os.path.expanduser(args.config_path)
60 | weights_path = os.path.expanduser(args.weights_path)
61 | assert config_path.endswith('.cfg'), '{} is not a .cfg file'.format(
62 | config_path)
63 | assert weights_path.endswith(
64 | '.weights'), '{} is not a .weights file'.format(weights_path)
65 |
66 | output_path = os.path.expanduser(args.output_path)
67 | assert output_path.endswith(
68 | '.h5'), 'output path {} is not a .h5 file'.format(output_path)
69 | output_root = os.path.splitext(output_path)[0]
70 |
71 | # Load weights and config.
72 | print('Loading weights.')
73 | weights_file = open(weights_path, 'rb')
74 | major, minor, revision = np.ndarray(
75 | shape=(3, ), dtype='int32', buffer=weights_file.read(12))
76 | if (major*10+minor)>=2 and major<1000 and minor<1000:
77 | seen = np.ndarray(shape=(1,), dtype='int64', buffer=weights_file.read(8))
78 | else:
79 | seen = np.ndarray(shape=(1,), dtype='int32', buffer=weights_file.read(4))
80 | print('Weights Header: ', major, minor, revision, seen)
81 |
82 | print('Parsing Darknet config.')
83 | unique_config_file = unique_config_sections(config_path)
84 | cfg_parser = configparser.ConfigParser()
85 | cfg_parser.read_file(unique_config_file)
86 |
87 | print('Creating Keras model.')
88 | input_layer = Input(shape=(None, None, 3))
89 | prev_layer = input_layer
90 | all_layers = []
91 |
92 | weight_decay = float(cfg_parser['net_0']['decay']
93 | ) if 'net_0' in cfg_parser.sections() else 5e-4
94 | count = 0
95 | out_index = []
96 | for section in cfg_parser.sections():
97 | print('Parsing section {}'.format(section))
98 | if section.startswith('convolutional'):
99 | filters = int(cfg_parser[section]['filters'])
100 | size = int(cfg_parser[section]['size'])
101 | stride = int(cfg_parser[section]['stride'])
102 | pad = int(cfg_parser[section]['pad'])
103 | activation = cfg_parser[section]['activation']
104 | batch_normalize = 'batch_normalize' in cfg_parser[section]
105 |
106 | padding = 'same' if pad == 1 and stride == 1 else 'valid'
107 |
108 | # Setting weights.
109 | # Darknet serializes convolutional weights as:
110 | # [bias/beta, [gamma, mean, variance], conv_weights]
111 | prev_layer_shape = K.int_shape(prev_layer)
112 |
113 | weights_shape = (size, size, prev_layer_shape[-1], filters)
114 | darknet_w_shape = (filters, weights_shape[2], size, size)
115 | weights_size = np.product(weights_shape)
116 |
117 | print('conv2d', 'bn'
118 | if batch_normalize else ' ', activation, weights_shape)
119 |
120 | conv_bias = np.ndarray(
121 | shape=(filters, ),
122 | dtype='float32',
123 | buffer=weights_file.read(filters * 4))
124 | count += filters
125 |
126 | if batch_normalize:
127 | bn_weights = np.ndarray(
128 | shape=(3, filters),
129 | dtype='float32',
130 | buffer=weights_file.read(filters * 12))
131 | count += 3 * filters
132 |
133 | bn_weight_list = [
134 | bn_weights[0], # scale gamma
135 | conv_bias, # shift beta
136 | bn_weights[1], # running mean
137 | bn_weights[2] # running var
138 | ]
139 |
140 | conv_weights = np.ndarray(
141 | shape=darknet_w_shape,
142 | dtype='float32',
143 | buffer=weights_file.read(weights_size * 4))
144 | count += weights_size
145 |
146 | # DarkNet conv_weights are serialized Caffe-style:
147 | # (out_dim, in_dim, height, width)
148 | # We would like to set these to Tensorflow order:
149 | # (height, width, in_dim, out_dim)
150 | conv_weights = np.transpose(conv_weights, [2, 3, 1, 0])
151 | conv_weights = [conv_weights] if batch_normalize else [
152 | conv_weights, conv_bias
153 | ]
154 |
155 | # Handle activation.
156 | act_fn = None
157 | if activation == 'leaky':
158 | pass # Add advanced activation later.
159 | elif activation != 'linear':
160 | raise ValueError(
161 | 'Unknown activation function `{}` in section {}'.format(
162 | activation, section))
163 |
164 | # Create Conv2D layer
165 | if stride>1:
166 | # Darknet uses left and top padding instead of 'same' mode
167 | prev_layer = ZeroPadding2D(((1,0),(1,0)))(prev_layer)
168 | conv_layer = (Conv2D(
169 | filters, (size, size),
170 | strides=(stride, stride),
171 | kernel_regularizer=l2(weight_decay),
172 | use_bias=not batch_normalize,
173 | weights=conv_weights,
174 | activation=act_fn,
175 | padding=padding))(prev_layer)
176 |
177 | if batch_normalize:
178 | conv_layer = (BatchNormalization(
179 | weights=bn_weight_list))(conv_layer)
180 | prev_layer = conv_layer
181 |
182 | if activation == 'linear':
183 | all_layers.append(prev_layer)
184 | elif activation == 'leaky':
185 | act_layer = LeakyReLU(alpha=0.1)(prev_layer)
186 | prev_layer = act_layer
187 | all_layers.append(act_layer)
188 |
189 | elif section.startswith('route'):
190 | ids = [int(i) for i in cfg_parser[section]['layers'].split(',')]
191 | layers = [all_layers[i] for i in ids]
192 | if len(layers) > 1:
193 | print('Concatenating route layers:', layers)
194 | concatenate_layer = Concatenate()(layers)
195 | all_layers.append(concatenate_layer)
196 | prev_layer = concatenate_layer
197 | else:
198 | skip_layer = layers[0] # only one layer to route
199 | all_layers.append(skip_layer)
200 | prev_layer = skip_layer
201 |
202 | elif section.startswith('maxpool'):
203 | size = int(cfg_parser[section]['size'])
204 | stride = int(cfg_parser[section]['stride'])
205 | all_layers.append(
206 | MaxPooling2D(
207 | pool_size=(size, size),
208 | strides=(stride, stride),
209 | padding='same')(prev_layer))
210 | prev_layer = all_layers[-1]
211 |
212 | elif section.startswith('shortcut'):
213 | index = int(cfg_parser[section]['from'])
214 | activation = cfg_parser[section]['activation']
215 | assert activation == 'linear', 'Only linear activation supported.'
216 | all_layers.append(Add()([all_layers[index], prev_layer]))
217 | prev_layer = all_layers[-1]
218 |
219 | elif section.startswith('upsample'):
220 | stride = int(cfg_parser[section]['stride'])
221 | assert stride == 2, 'Only stride=2 supported.'
222 | all_layers.append(UpSampling2D(stride)(prev_layer))
223 | prev_layer = all_layers[-1]
224 |
225 | elif section.startswith('yolo'):
226 | out_index.append(len(all_layers)-1)
227 | all_layers.append(None)
228 | prev_layer = all_layers[-1]
229 |
230 | elif section.startswith('net'):
231 | pass
232 |
233 | else:
234 | raise ValueError(
235 | 'Unsupported section header type: {}'.format(section))
236 |
237 | # Create and save model.
238 | if len(out_index)==0: out_index.append(len(all_layers)-1)
239 | model = Model(inputs=input_layer, outputs=[all_layers[i] for i in out_index])
240 | print(model.summary())
241 | if args.weights_only:
242 | model.save_weights('{}'.format(output_path))
243 | print('Saved Keras weights to {}'.format(output_path))
244 | else:
245 | model.save('{}'.format(output_path))
246 | print('Saved Keras model to {}'.format(output_path))
247 |
248 | # Check to see if all weights have been read.
249 | remaining_weights = len(weights_file.read()) / 4
250 | weights_file.close()
251 | print('Read {} of {} from Darknet weights.'.format(count, count +
252 | remaining_weights))
253 | if remaining_weights > 0:
254 | print('Warning: {} unused weights'.format(remaining_weights))
255 |
256 | if args.plot_model:
257 | plot(model, to_file='{}.png'.format(output_root), show_shapes=True)
258 | print('Saved model plot to {}.png'.format(output_root))
259 |
260 |
261 | if __name__ == '__main__':
262 | _main(parser.parse_args())
263 |
--------------------------------------------------------------------------------
/font/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/font/.DS_Store
--------------------------------------------------------------------------------
/font/font.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/font/font.ttf
--------------------------------------------------------------------------------
/kmeans.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | class YOLO_Kmeans:
5 |
6 | def __init__(self, cluster_number, filename):
7 | self.cluster_number = cluster_number
8 | self.filename = "model_data/label.txt"
9 |
10 | def iou(self, boxes, clusters): # 1 box -> k clusters
11 | n = boxes.shape[0]
12 | k = self.cluster_number
13 |
14 | box_area = boxes[:, 0] * boxes[:, 1]
15 | box_area = box_area.repeat(k)
16 | box_area = np.reshape(box_area, (n, k))
17 |
18 | cluster_area = clusters[:, 0] * clusters[:, 1]
19 | cluster_area = np.tile(cluster_area, [1, n])
20 | cluster_area = np.reshape(cluster_area, (n, k))
21 |
22 | box_w_matrix = np.reshape(boxes[:, 0].repeat(k), (n, k))
23 | cluster_w_matrix = np.reshape(np.tile(clusters[:, 0], (1, n)), (n, k))
24 | min_w_matrix = np.minimum(cluster_w_matrix, box_w_matrix)
25 |
26 | box_h_matrix = np.reshape(boxes[:, 1].repeat(k), (n, k))
27 | cluster_h_matrix = np.reshape(np.tile(clusters[:, 1], (1, n)), (n, k))
28 | min_h_matrix = np.minimum(cluster_h_matrix, box_h_matrix)
29 | inter_area = np.multiply(min_w_matrix, min_h_matrix)
30 |
31 | result = inter_area / (box_area + cluster_area - inter_area)
32 | return result
33 |
34 | def avg_iou(self, boxes, clusters):
35 | accuracy = np.mean([np.max(self.iou(boxes, clusters), axis=1)])
36 | return accuracy
37 |
38 | def kmeans(self, boxes, k, dist=np.median):
39 | box_number = boxes.shape[0]
40 | distances = np.empty((box_number, k))
41 | last_nearest = np.zeros((box_number,))
42 | np.random.seed()
43 | clusters = boxes[np.random.choice(
44 | box_number, k, replace=False)] # init k clusters
45 | while True:
46 |
47 | distances = 1 - self.iou(boxes, clusters)
48 |
49 | current_nearest = np.argmin(distances, axis=1)
50 | if (last_nearest == current_nearest).all():
51 | break # clusters won't change
52 | for cluster in range(k):
53 | clusters[cluster] = dist( # update clusters
54 | boxes[current_nearest == cluster], axis=0)
55 |
56 | last_nearest = current_nearest
57 |
58 | return clusters
59 |
60 | def result2txt(self, data):
61 | f = open("model_data/my_anchors.txt", 'w')
62 | row = np.shape(data)[0]
63 | for i in range(row):
64 | if i == 0:
65 | x_y = "%d,%d" % (data[i][0], data[i][1])
66 | else:
67 | x_y = ", %d,%d" % (data[i][0], data[i][1])
68 | f.write(x_y)
69 | f.close()
70 |
71 | def txt2boxes(self):
72 | f = open(self.filename, 'r')
73 | dataSet = []
74 | for line in f:
75 | infos = line.split(" ")
76 | length = len(infos)
77 | for i in range(1, length):
78 | width = int(infos[i].split(",")[2]) - \
79 | int(infos[i].split(",")[0])
80 | height = int(infos[i].split(",")[3]) - \
81 | int(infos[i].split(",")[1])
82 | dataSet.append([width, height])
83 | result = np.array(dataSet)
84 | f.close()
85 | return result
86 |
87 | def txt2clusters(self):
88 | all_boxes = self.txt2boxes()
89 | result = self.kmeans(all_boxes, k=self.cluster_number)
90 | result = result[np.lexsort(result.T[0, None])]
91 | self.result2txt(result)
92 | print("K anchors:\n {}".format(result))
93 | print("Accuracy: {:.2f}%".format(
94 | self.avg_iou(all_boxes, result) * 100))
95 |
96 |
97 | if __name__ == "__main__":
98 | cluster_number = 9
99 | filename = "model_data/kitti_simple_label.txt"
100 | kmeans = YOLO_Kmeans(cluster_number, filename)
101 | kmeans.txt2clusters()
102 |
--------------------------------------------------------------------------------
/logs/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/logs/.DS_Store
--------------------------------------------------------------------------------
/logs/000/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/logs/000/.DS_Store
--------------------------------------------------------------------------------
/model_data/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/model_data/.DS_Store
--------------------------------------------------------------------------------
/model_data/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/model_data/1.png
--------------------------------------------------------------------------------
/model_data/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/model_data/2.png
--------------------------------------------------------------------------------
/model_data/3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/model_data/3.png
--------------------------------------------------------------------------------
/model_data/label.txt:
--------------------------------------------------------------------------------
1 | model_data/train/162.jpg 20,23,56,54,3
2 | model_data/train/a618bd89042aca9fbede30d7e67c7628.jpg 30,53,104,98,47 376,220,409,233,47
3 | model_data/train/604.jpg 165,11,204,24,14
4 | model_data/train/88.jpg 17,251,110,329,1
5 | model_data/train/610.jpg 143,79,282,104,14
6 | model_data/train/1353.jpg 205,181,239,203,40
7 | model_data/train/1347.jpg 158,158,196,176,40
8 | model_data/train/suppply4.jpg 176,147,215,176,8 197,147,215,176,34
9 | model_data/train/77.jpg 276,183,289,202,1 59,99,71,124,1
10 | model_data/train/4e9b2fc7ac1f860d01d1353ab1c17494.jpg 125,64,222,103,10 227,66,301,105,39
11 | model_data/train/2308dfda6a55b911b0ff25bd5aea2eb8.jpg 69,81,223,131,10 236,78,344,130,39
12 | model_data/train/406.jpg 189,117,341,303,38 69,63,176,353,37
13 | model_data/train/1186.jpg 150,39,265,301,29
14 | model_data/train/cctvnew26.jpg 70,126,125,152,49
15 | model_data/train/1151.jpg 197,301,229,317,28
16 | model_data/train/suppply144.jpg 17,170,133,201,40 52,85,105,162,11
17 | model_data/train/suppply150.jpg 189,153,209,182,11
18 | model_data/train/cctvnew32.jpg 166,85,246,111,49
19 | model_data/train/1145.jpg 157,171,174,198,28
20 | model_data/train/suppply178.jpg 165,121,250,295,37
21 | model_data/train/1179.jpg 206,39,259,98,28 35,184,44,201,28
22 | model_data/train/214.jpg 92,70,268,336,5
23 | model_data/train/200.jpg 7,12,25,39,4
24 | model_data/train/566.jpg 20,271,43,298,13 19,302,44,310,41
25 | model_data/train/228.jpg 13,66,400,377,5
26 | model_data/train/389f5e8d6edf1fc67a2039f0073af0d4.jpg 61,72,324,315,20 81,163,305,210,47
27 | model_data/train/1019.jpg 256,38,267,56,24
28 | model_data/train/suppplies34.jpg 85,156,256,220,44
29 | model_data/train/cctv27.jpg 172,233,235,263,0
30 | model_data/train/suppplies20.jpg 212,296,253,327,18 257,292,276,311,46 227,208,262,223,18 353,34,384,52,18 384,39,403,52,46
31 | model_data/train/bd6a7784efbcc5ce991985e162a49351.jpg 109,274,265,316,41 135,59,272,236,13
32 | model_data/train/980.jpg 40,0,312,318,23
33 | model_data/train/c4c770bf277cd7a0392e00c647b271c0.jpg 13,18,289,85,17
34 | model_data/train/suppply11.jpg 222,189,260,234,20 228,208,252,213,47
35 | model_data/train/suppply39.jpg 86,146,137,161,30 277,156,317,169,30
36 | model_data/train/957.jpg 199,200,239,226,23
37 | model_data/train/942.jpg 51,38,87,77,23 179,207,219,234,23 193,109,204,123,23
38 | model_data/train/suppply38.jpg 37,191,94,253,6 37,253,92,275,30 87,318,96,328,6
39 | model_data/train/956.jpg 123,102,160,137,23 254,214,295,239,23
40 | model_data/train/suppply10.jpg 275,227,302,266,20 279,244,298,251,47
41 | model_data/train/1226.jpg 44,158,359,210,44
42 | model_data/train/981.jpg 268,103,298,137,24 58,84,264,340,24
43 | model_data/train/suppplies21.jpg 73,13,111,31,46 8,6,71,33,18
44 | model_data/train/598.jpg 104,381,141,389,14
45 | model_data/train/cctv26.jpg 179,275,199,311,0
46 | model_data/train/suppplies35.jpg 62,55,261,109,35 262,55,332,109,36
47 | model_data/train/1018.jpg 260,277,279,301,24
48 | model_data/train/1024.jpg 293,104,355,179,25 183,104,284,134,48
49 | model_data/train/215.jpg 20,40,384,385,5
50 | model_data/train/1178.jpg 307,335,328,357,28
51 | model_data/train/suppply179.jpg 75,34,340,323,37
52 | model_data/train/suppply151.jpg 107,193,121,221,11
53 | model_data/train/cctvnew33.jpg 45,170,371,245,49
54 | model_data/train/1150.jpg 194,213,221,243,28
55 | model_data/train/cctvnew27.jpg 170,217,254,255,49
56 | model_data/train/suppply145.jpg 94,197,106,223,11
57 | model_data/train/375.jpg 146,136,277,266,9
58 | model_data/train/1187.jpg 188,70,227,189,29
59 | model_data/train/1193.jpg 168,29,254,350,29
60 | model_data/train/407.jpg 101,74,319,329,37
61 | model_data/train/suppply186.jpg 229,17,279,50,37 243,56,265,64,38
62 | model_data/train/836.jpg 332,223,348,256,20
63 | model_data/train/188.jpg 26,7,77,34,4
64 | model_data/train/suppply5.jpg 91,253,130,290,8 112,253,130,290,34
65 | model_data/train/1346.jpg 123,340,156,362,40
66 | model_data/train/1352.jpg 166,322,226,363,40
67 | model_data/train/611.jpg 67,183,252,209,14 135,118,163,125,14
68 | model_data/train/89.jpg 71,121,96,137,1
69 | model_data/train/177.jpg 1,74,37,117,4
70 | model_data/train/175.jpg 9,8,36,52,4
71 | model_data/train/613.jpg 159,19,253,38,14
72 | model_data/train/607.jpg 24,30,104,45,14
73 | model_data/train/161.jpg 198,244,235,286,3
74 | model_data/train/d81174f763b17d26b6ec436051d527b4.jpg 33,104,191,146,10 201,112,322,150,39
75 | model_data/train/149.jpg 26,18,65,40,3
76 | model_data/train/808.jpg 184,204,217,247,19
77 | model_data/train/1350.jpg 223,115,269,132,40 141,128,159,135,40
78 | model_data/train/74.jpg 261,284,277,316,1 281,275,300,298,1
79 | model_data/train/suppply7.jpg 165,118,192,152,22
80 | model_data/train/1185.jpg 192,102,224,209,29
81 | model_data/train/377.jpg 106,182,137,217,9 178,161,288,215,36 178,217,231,243,38
82 | model_data/train/363.jpg 138,111,168,133,8 153,110,169,133,34
83 | model_data/train/onehalf20.jpg 22,27,42,63,2
84 | model_data/train/405.jpg 147,83,195,158,9 209,99,269,144,38
85 | model_data/train/1191.jpg 174,104,244,289,29
86 | model_data/train/cctvnew31.jpg 36,75,173,109,49
87 | model_data/train/1146.jpg 82,247,99,268,28
88 | model_data/train/suppply153.jpg 158,183,173,207,11
89 | model_data/train/3a49a6e8f206f0dd3852505b95260a71.jpg 119,34,246,119,18 253,54,324,117,46
90 | model_data/train/1152.jpg 49,294,63,309,28
91 | model_data/train/cctvnew19.jpg 286,110,370,143,49 295,188,319,199,49
92 | model_data/train/203.jpg 165,70,189,96,4 172,15,217,40,4 260,337,269,356,4
93 | model_data/train/217.jpg 110,72,319,299,5
94 | model_data/train/559.jpg 34,50,390,405,13
95 | model_data/train/1032.jpg 30,30,386,385,25
96 | model_data/train/1026.jpg 98,112,153,196,25 157,132,314,238,48
97 | model_data/train/cctv18.jpg 114,151,181,185,0
98 | model_data/train/suppplies23.jpg 58,36,183,113,35 180,90,217,128,36
99 | model_data/train/cctv30.jpg 156,264,229,316,0
100 | model_data/train/suppplies37.jpg 36,14,47,28,36 6,12,36,29,35
101 | model_data/train/cctv24.jpg 24,28,60,50,0
102 | model_data/train/983.jpg 160,138,199,182,24
103 | model_data/train/suppply12.jpg 174,233,192,263,20
104 | model_data/train/1224.jpg 26,175,377,239,44
105 | model_data/train/940.jpg 73,136,182,280,22
106 | model_data/train/954.jpg 143,49,192,98,23
107 | model_data/train/1219.jpg 202,208,210,245,29
108 | model_data/train/955.jpg 196,128,223,157,23 186,223,231,261,23
109 | model_data/train/799.jpg 83,108,326,204,19
110 | model_data/train/941.jpg 240,160,292,197,23 76,5,118,40,23
111 | model_data/train/969.jpg 193,200,252,228,23 211,114,231,127,23 203,270,236,292,23
112 | model_data/train/suppply13.jpg 16,26,43,69,4
113 | model_data/train/1231.jpg 85,159,258,221,44
114 | model_data/train/996.jpg 327,203,347,223,24 76,155,88,171,24
115 | model_data/train/982.jpg 155,43,203,89,24
116 | model_data/train/cctv25.jpg 67,194,221,316,0
117 | model_data/train/suppplies36.jpg 68,180,296,265,35 298,208,353,264,36 119,45,198,114,36
118 | model_data/train/226d3d9a0dc0d03cc84c9ab001e03ec8.jpg 31,113,384,197,10 85,234,342,315,39
119 | model_data/train/suppplies22.jpg 337,27,370,45,18 371,32,390,47,46
120 | model_data/train/cctv19.jpg 20,139,300,247,0
121 | model_data/train/1027.jpg 172,70,282,146,25 128,136,323,194,48 160,284,239,304,48 68,310,94,330,25
122 | model_data/train/558.jpg 127,129,153,171,13
123 | model_data/train/69d73f63709a9b581637ed7ea038ae2a.jpg 87,116,264,182,17
124 | model_data/train/216.jpg 11,1,399,413,5
125 | model_data/train/570.jpg 214,295,237,317,13 62,0,111,52,13 64,65,120,76,41
126 | model_data/train/564.jpg 200,144,276,272,13
127 | model_data/train/70c6d2e862e7c4fbcc0bfcaac138d323.jpg 16,119,152,153,10 163,119,260,151,39
128 | model_data/train/6d806497f50c4c9b604778c4d17f090b.jpg 160,116,273,238,13 154,264,278,295,41
129 | model_data/train/389.jpg 287,56,366,83,35
130 | model_data/train/suppply146.jpg 27,0,355,125,21
131 | model_data/train/1153.jpg 62,193,80,220,28
132 | model_data/train/cctvnew24.jpg 227,116,267,137,49
133 | model_data/train/1147.jpg 200,212,223,232,28
134 | model_data/train/cctvnew30.jpg 281,259,319,277,49
135 | model_data/train/suppply152.jpg 276,182,294,209,11
136 | model_data/train/85ee606f15d1fdfe041c1363ba5f99a6.jpg 65,111,256,186,18 258,139,352,188,46
137 | model_data/train/suppply185.jpg 7,13,408,352,37
138 | model_data/train/1190.jpg 188,71,226,186,29
139 | model_data/train/onehalf21.jpg 44,178,266,246,35 269,178,360,246,36
140 | model_data/train/410.jpg 83,64,325,343,9
141 | model_data/train/1184.jpg 236,259,244,287,29
142 | model_data/train/376.jpg 25,16,52,28,35 52,18,62,29,36 358,33,382,46,35 382,36,391,44,36 25,250,307,329,35 308,253,403,329,36
143 | model_data/train/61.jpg 99,67,119,94,1
144 | model_data/train/suppply6.jpg 262,184,282,221,20
145 | model_data/train/1351.jpg 162,100,206,133,40 138,215,237,299,40 335,93,351,102,40
146 | model_data/train/809.jpg 179,200,228,224,19
147 | model_data/train/1345.jpg 162,228,250,296,40 199,141,233,163,40
148 | model_data/train/148.jpg 309,140,341,151,3
149 | model_data/train/606.jpg 132,117,218,256,14
150 | model_data/train/160.jpg 1,0,26,19,3
151 | model_data/train/174.jpg 24,26,50,65,4
152 | model_data/train/612.jpg 163,101,242,111,14
153 | model_data/train/616.jpg 56,263,122,327,14
154 | model_data/train/602.jpg 39,71,385,216,14
155 | model_data/train/suppply2.jpg 92,33,240,130,21
156 | model_data/train/65.jpg 132,127,177,175,1 360,105,389,134,1
157 | model_data/train/1341.jpg 200,105,241,135,40
158 | model_data/train/1355.jpg 227,161,247,190,40
159 | model_data/train/59.jpg 101,80,329,235,1
160 | model_data/train/819.jpg 170,71,196,108,19
161 | model_data/train/onehalf19.jpg 16,21,31,33,2
162 | model_data/train/372.jpg 160,149,258,263,9
163 | model_data/train/366.jpg 133,143,251,260,8 209,143,251,257,34
164 | model_data/train/suppply181.jpg 33,150,88,186,37 129,150,189,187,37 146,195,172,204,38 47,195,74,203,38 228,195,255,203,38
165 | model_data/train/suppply156.jpg 322,146,341,173,11
166 | model_data/train/cctvnew34.jpg 237,156,270,171,49
167 | model_data/train/1143.jpg 348,192,363,226,28
168 | model_data/train/cctvnew20.jpg 237,154,270,173,49
169 | model_data/train/suppply142.jpg 78,292,289,365,40 99,81,267,277,11
170 | model_data/train/560.jpg 199,160,216,184,13
171 | model_data/train/212.jpg 0,1,416,413,5
172 | model_data/train/suppplies26.jpg 7,2,89,33,18 93,9,143,32,46
173 | model_data/train/suppplies32.jpg 91,64,261,116,44
174 | model_data/train/eb22259ea5add73722b7747297dbbd88.jpg 144,87,364,118,17
175 | model_data/train/cctv21.jpg 212,227,247,246,0
176 | model_data/train/986.jpg 123,35,301,200,24
177 | model_data/train/3ee1f19baaf6bb20d945803e05faea54.jpg 118,3,160,62,45 164,6,274,53,15
178 | model_data/train/ad45e8f37e0f3618ef7ea2efca337d44.jpg 103,110,276,184,17
179 | model_data/train/945.jpg 175,80,204,113,23 273,176,299,194,23
180 | model_data/train/1209.jpg 214,222,224,255,29
181 | model_data/train/cctv9.jpg 108,221,190,273,0
182 | model_data/train/aa800309ad993b62b014fdc7010d50ce.jpg 99,217,208,247,10 124,91,188,105,10 216,214,293,244,39 191,89,238,104,39
183 | model_data/train/onehalf8.jpg 14,163,252,234,35 253,143,410,239,36
184 | model_data/train/979.jpg 135,42,287,238,23
185 | model_data/train/1235.jpg 93,152,343,255,44
186 | model_data/train/suppply17.jpg 262,184,283,224,20
187 | model_data/train/1220.jpg 240,236,258,298,29
188 | model_data/train/1234.jpg 48,16,201,183,44
189 | model_data/train/978.jpg 114,11,268,195,23
190 | model_data/train/suppply16.jpg 192,218,221,259,20
191 | model_data/train/onehalf9.jpg 13,157,251,232,35 252,145,408,239,36
192 | model_data/train/cctv8.jpg 80,99,154,156,0
193 | model_data/train/1208.jpg 84,197,97,244,29
194 | model_data/train/944.jpg 166,197,236,227,23 172,89,187,106,23
195 | model_data/train/cb59bfdda80db3b04dc8f16194d717fa.jpg 206,55,305,121,47 86,183,133,210,47
196 | model_data/train/987.jpg 132,212,223,296,24
197 | model_data/train/cctv20.jpg 235,257,254,295,0
198 | model_data/train/suppplies33.jpg 42,158,361,207,44
199 | model_data/train/7a29aa0b22f873dbf1fe205e344bd6a1.jpg 11,13,107,38,10 114,13,184,36,39 182,200,204,211,10
200 | model_data/train/suppplies27.jpg 187,205,243,234,35 243,203,266,228,36
201 | model_data/train/549.jpg 232,247,251,280,13
202 | model_data/train/1156.jpg 214,251,231,275,28
203 | model_data/train/cctvnew21.jpg 165,178,205,199,49
204 | model_data/train/suppply143.jpg 29,128,387,177,40
205 | model_data/train/suppply157.jpg 35,33,159,59,21
206 | model_data/train/1142.jpg 262,230,275,244,28
207 | model_data/train/cctvnew35.jpg 200,108,342,173,49
208 | model_data/train/398.jpg 131,93,223,126,35 317,260,327,271,38 119,269,129,277,38 224,93,257,118,36
209 | model_data/train/1195.jpg 173,83,250,305,29
210 | model_data/train/401.jpg 92,224,154,251,35 153,231,170,250,36
211 | model_data/train/367.jpg 98,109,196,194,8 153,109,195,192,34
212 | model_data/train/suppply180.jpg 355,159,392,215,37 7,288,75,385,37
213 | model_data/train/1181.jpg 291,168,305,192,29
214 | model_data/train/onehalf18.jpg 22,27,41,63,2
215 | model_data/train/818.jpg 314,323,412,399,19
216 | model_data/train/58.jpg 101,79,328,237,1
217 | model_data/train/suppply3.jpg 55,108,261,274,8 159,108,261,274,34
218 | model_data/train/824.jpg 310,185,333,231,20
219 | model_data/train/64.jpg 84,168,141,197,1
220 | model_data/train/165.jpg 12,10,53,22,3 56,406,107,416,3 317,411,332,416,3
221 | model_data/train/603.jpg 276,92,320,159,14
222 | model_data/train/617.jpg 84,251,134,284,14
223 | model_data/train/171.jpg 45,128,201,301,4
224 | model_data/train/654417c661566f0b58033b82d7b4c1d4.jpg 24,119,92,137,47 143,203,150,212,20 114,224,125,227,47 122,104,147,114,47
225 | model_data/train/601.jpg 5,25,85,43,14
226 | model_data/train/167.jpg 13,12,42,23,3 52,362,69,369,3
227 | model_data/train/173.jpg 152,89,264,217,4
228 | model_data/train/615.jpg 198,187,254,201,14
229 | model_data/train/suppply1.jpg 20,31,84,67,4
230 | model_data/train/72.jpg 28,31,54,66,1
231 | model_data/train/64235928d3fb760d7262485fd485a5eb.jpg 31,51,379,268,45 16,295,398,368,15
232 | model_data/train/1356.jpg 161,68,215,92,40
233 | model_data/train/1342.jpg 216,114,257,134,40
234 | model_data/train/359.jpg 91,308,241,352,8 168,307,243,352,34
235 | model_data/train/6d291328313723fec030328428048c98.jpg 296,5,352,88,20 303,42,339,57,47 82,271,98,301,20 84,285,93,290,47
236 | model_data/train/suppply182.jpg 109,114,306,224,37
237 | model_data/train/1197.jpg 337,134,358,212,29 302,303,313,351,29
238 | model_data/train/1183.jpg 195,0,212,85,29 68,262,76,295,29
239 | model_data/train/371.jpg 44,180,268,247,35 271,181,359,246,36
240 | model_data/train/1168.jpg 91,230,104,247,28
241 | model_data/train/suppply169.jpg 8,60,67,137,25 67,47,296,126,48
242 | model_data/train/suppply141.jpg 28,31,54,64,1
243 | model_data/train/cctvnew37.jpg 16,192,150,240,49
244 | model_data/train/1140.jpg 236,111,248,147,27
245 | model_data/train/suppply155.jpg 291,188,316,221,11
246 | model_data/train/239.jpg 62,72,361,369,5
247 | model_data/train/211.jpg 1,1,416,416,5
248 | model_data/train/577.jpg 114,163,298,253,13
249 | model_data/train/563.jpg 294,145,308,163,13
250 | model_data/train/205.jpg 29,353,80,382,4
251 | model_data/train/588.jpg 117,106,272,150,14 217,279,258,291,14
252 | model_data/train/suppplies31.jpg 89,65,261,118,44
253 | model_data/train/cctv22.jpg 349,20,383,44,0
254 | model_data/train/1008.jpg 42,25,57,43,24 95,61,105,74,24
255 | model_data/train/suppplies25.jpg 253,135,389,200,46 10,109,240,200,18
256 | model_data/train/4aab34eaeaf11fdb0804a7ff11ad4f13.jpg 64,87,263,186,17
257 | model_data/train/1020.jpg 198,305,222,322,24
258 | model_data/train/suppplies19.jpg 14,9,92,39,18 96,19,141,39,46
259 | model_data/train/985.jpg 105,45,181,119,24
260 | model_data/train/952.jpg 196,98,227,128,23 36,195,59,213,23 151,199,172,217,23 254,198,277,215,23 356,193,378,214,23
261 | model_data/train/suppply28.jpg 61,60,146,147,6 159,93,326,138,30
262 | model_data/train/946.jpg 197,124,239,166,23 326,230,351,249,23 328,173,347,192,23
263 | model_data/train/suppply14.jpg 153,223,167,251,20
264 | model_data/train/suppply15.jpg 170,217,192,252,20
265 | model_data/train/1223.jpg 37,163,366,246,44
266 | model_data/train/suppply29.jpg 174,20,302,61,30 102,10,163,74,6 353,273,362,282,6 365,274,393,281,30
267 | model_data/train/953.jpg 179,230,226,271,23
268 | model_data/train/timg.jpg 57,12,349,130,17
269 | model_data/train/984.jpg 158,105,249,220,24
270 | model_data/train/990.jpg 106,177,195,238,24
271 | model_data/train/1035.jpg 167,62,261,205,25 137,216,292,279,48
272 | model_data/train/suppplies18.jpg 70,11,103,27,46 13,5,69,29,18
273 | model_data/train/suppplies24.jpg 42,48,334,186,18 58,212,344,376,46
274 | model_data/train/cctv23.jpg 112,305,222,346,0
275 | model_data/train/589.jpg 161,146,261,167,14
276 | model_data/train/210.jpg 10,5,61,29,4
277 | model_data/train/576.jpg 81,32,345,293,13 63,343,353,385,41
278 | model_data/train/238.jpg 206,267,275,363,5
279 | model_data/train/1141.jpg 153,224,176,253,28
280 | model_data/train/cctvnew36.jpg 45,170,371,246,49
281 | model_data/train/suppply154.jpg 95,236,110,263,11
282 | model_data/train/suppply140.jpg 29,33,53,65,1 353,301,384,317,19
283 | model_data/train/1155.jpg 294,104,308,129,28
284 | model_data/train/cctvnew22.jpg 286,111,369,143,49 292,188,320,200,49
285 | model_data/train/1169.jpg 275,240,304,256,28
286 | model_data/train/1182.jpg 291,166,305,190,29
287 | model_data/train/370.jpg 26,38,67,68,8 48,37,66,68,34
288 | model_data/train/suppply183.jpg 160,137,242,225,37
289 | model_data/train/358.jpg 28,30,255,130,8 146,30,253,127,34
290 | model_data/train/1357.jpg 185,314,222,341,40
291 | model_data/train/73.jpg 243,323,268,351,1
292 | model_data/train/199.jpg 49,77,82,105,4
293 | model_data/train/614.jpg 47,113,113,134,14
294 | model_data/train/98.jpg 22,24,43,65,2
295 | model_data/train/600.jpg 60,247,85,299,14 221,73,285,92,14
296 | model_data/train/166.jpg 11,21,48,37,3
297 | model_data/train/673.jpg 60,128,111,179,16 274,268,294,286,16 273,99,314,123,16 10,181,159,213,43 219,115,332,146,43 257,281,312,304,43 232,134,297,155,42 48,214,126,235,42 269,296,300,310,42
298 | model_data/train/1324.jpg 208,97,382,179,21
299 | model_data/train/868.jpg 25,128,108,183,44
300 | model_data/train/840.jpg 224,204,244,234,20
301 | model_data/train/1318.jpg 0,197,253,241,33 53,258,251,309,32 101,73,329,184,7
302 | model_data/train/854.jpg 221,278,241,308,20
303 | model_data/train/465.jpg 108,0,292,274,11 1,318,405,416,40
304 | model_data/train/471.jpg 116,0,300,133,11 0,353,82,415,11 4,158,412,206,40 97,367,416,405,40
305 | model_data/train/317.jpg 103,70,328,186,7 0,198,254,242,33 55,259,251,309,32
306 | model_data/train/suppply127.jpg 82,232,114,279,19 308,270,373,296,19 257,0,403,43,19
307 | model_data/train/1132.jpg 255,27,351,52,27
308 | model_data/train/1126.jpg 69,158,119,202,27
309 | model_data/train/suppply133.jpg 10,56,129,163,1 295,29,397,141,4
310 | model_data/train/suppplies7.jpg 11,14,113,49,18 196,232,211,239,18 118,23,178,48,46
311 | model_data/train/511.jpg 252,262,291,284,12
312 | model_data/train/263.jpg 74,100,344,374,6
313 | model_data/train/043d75d3bd5d885f875eb70ae04888d5.jpg 115,69,158,102,47
314 | model_data/train/1052.jpg 253,1,330,99,25 137,15,246,59,48
315 | model_data/train/1293.jpg 95,33,320,389,37
316 | model_data/train/908.jpg 152,46,290,225,22
317 | model_data/train/suppply66.jpg 25,26,50,65,4 318,322,329,337,28
318 | model_data/train/suppply72.jpg 226,215,278,252,20
319 | model_data/train/1250.jpg 81,263,281,335,44
320 | model_data/train/934.jpg 264,142,300,189,22 272,383,287,401,22
321 | model_data/train/1278.jpg 54,60,189,380,37 206,119,394,316,38
322 | model_data/train/920.jpg 45,167,97,249,22 317,162,352,219,22
323 | model_data/train/002_01.jpg 219,47,343,384,45
324 | model_data/train/921.jpg 132,117,155,159,22
325 | model_data/train/1279.jpg 75,29,137,71,38 224,71,247,87,38 72,135,136,178,38 74,239,136,281,38 75,346,137,385,38 223,280,247,298,38 224,386,247,401,38 224,177,247,192,38 215,14,254,67,37 26,18,71,82,37 346,17,391,83,37 345,122,391,191,9 217,119,254,172,9 28,125,70,189,9 26,228,69,293,9 215,221,255,277,9 346,227,390,291,9 345,332,388,398,9 217,328,254,381,9 26,333,71,397,9
326 | model_data/train/935.jpg 128,59,169,113,22 41,246,90,309,22 273,381,287,403,22
327 | model_data/train/suppply73.jpg 103,49,321,136,40 0,19,89,80,11
328 | model_data/train/909.jpg 132,201,144,222,22
329 | model_data/train/suppply67.jpg 153,55,179,91,22
330 | model_data/train/suppply98.jpg 133,181,295,210,33 134,215,238,246,32 20,181,135,246,7
331 | model_data/train/1292.jpg 60,200,86,219,38 177,198,203,220,38 309,199,335,220,38 35,67,109,189,37 108,321,154,394,37
332 | model_data/train/289.jpg 91,87,156,165,6 133,325,163,363,6 165,117,291,153,30 134,364,163,375,30
333 | model_data/train/1047.jpg 272,108,352,212,25 225,228,406,296,48
334 | model_data/train/504.jpg 101,171,293,276,12
335 | model_data/train/262.jpg 147,110,310,236,30 68,182,135,288,6
336 | model_data/train/276.jpg 167,85,249,113,30 124,61,163,118,6
337 | model_data/train/cctvnew6.jpg 93,60,149,94,49
338 | model_data/train/suppplies6.jpg 168,121,313,227,44
339 | model_data/train/1127.jpg 69,155,118,197,27
340 | model_data/train/suppply126.jpg 353,308,366,317,19 24,26,49,65,4
341 | model_data/train/1133.jpg 125,293,166,304,27
342 | model_data/train/470.jpg 152,83,262,239,11 93,258,321,311,40
343 | model_data/train/316.jpg 169,38,238,87,7 137,91,215,113,33
344 | model_data/train/464.jpg 108,43,303,234,11 4,256,411,322,40
345 | model_data/train/855.jpg 139,205,160,230,20
346 | model_data/train/699.jpg 171,135,245,209,16 156,259,260,289,42 130,209,283,241,43
347 | model_data/train/80545abcd6ca2792ec9d360ce4eb090f.jpg 174,36,236,53,47 171,1,243,94,20 4,155,32,190,20 207,309,217,325,20 7,171,29,178,47
348 | model_data/train/896.jpg 8,2,149,48,21
349 | model_data/train/114.jpg 3,14,28,54,2
350 | model_data/train/672.jpg 157,57,258,178,16 68,189,350,254,43 98,296,313,358,42
351 | model_data/train/666.jpg 169,74,231,138,16 170,139,230,152,43 137,151,265,188,42
352 | model_data/train/100.jpg 69,41,165,121,2 242,41,338,121,2 203,236,298,314,2
353 | model_data/train/e6b51cec36b05716e697cba9a34815db.jpg 251,84,286,105,41
354 | model_data/train/670.jpg 61,128,111,178,16 271,97,312,122,16 274,264,294,287,16 14,182,159,215,43 221,115,331,145,43 255,282,313,303,43 46,215,125,237,42 231,135,297,156,42 268,296,299,313,42 361,184,392,235,42
355 | model_data/train/116.jpg 13,21,34,60,2 252,227,267,265,2
356 | model_data/train/aca9d89b7b1eaa092ebfc45766eb39b8.jpg 229,159,328,275,13 49,172,88,219,13 44,224,88,239,41 78,74,320,141,41
357 | model_data/train/664.jpg 126,209,186,271,16 132,302,181,313,43 114,279,193,300,42
358 | model_data/train/1708ea2cfd76d6924ff818fe2ffd9ade.jpg 73,120,283,155,17
359 | model_data/train/83692eac922beda6aa8c79443e414913.jpg 34,19,197,95,10 211,11,338,102,39
360 | model_data/train/843.jpg 165,335,221,391,20
361 | model_data/train/314.jpg 13,66,100,141,7 81,82,184,132,33 241,138,277,163,33 240,107,286,137,32
362 | model_data/train/4bdb82778b61384c2d9f47902403ceee.jpg 127,136,231,225,18 232,175,275,236,46
363 | model_data/train/472.jpg 48,166,109,264,11 121,188,360,249,40
364 | model_data/train/328.jpg 41,163,204,254,7 191,166,376,218,32
365 | model_data/train/suppply130.jpg 117,127,283,193,19
366 | model_data/train/1125.jpg 37,200,73,212,27 249,291,276,300,27
367 | model_data/train/1131.jpg 73,196,122,222,27
368 | model_data/train/suppply124.jpg 125,147,305,190,44
369 | model_data/train/499.jpg 116,170,135,200,11 227,300,373,366,40
370 | model_data/train/suppplies4.jpg 321,124,356,143,35
371 | model_data/train/suppply118.jpg 147,159,208,202,21
372 | model_data/train/260.jpg 137,267,278,297,30 138,119,279,254,6
373 | model_data/train/1092.jpg 79,50,123,69,31 32,47,76,86,26
374 | model_data/train/506.jpg 196,324,215,333,12
375 | model_data/train/1086.jpg 13,106,407,309,26
376 | model_data/train/cctvnew4.jpg 281,261,321,278,49
377 | model_data/train/c69c55eafaa56d77e101af0a02cc8e8c.jpg 52,97,383,138,17
378 | model_data/train/248.jpg 38,45,382,367,5
379 | model_data/train/1051.jpg 0,122,79,197,25
380 | model_data/train/1045.jpg 9,128,53,161,25
381 | model_data/train/1079.jpg 158,80,333,134,31 55,70,135,125,26
382 | model_data/train/1290.jpg 39,269,145,391,9 279,273,390,390,9
383 | model_data/train/1253.jpg 80,170,125,225,9 168,224,234,261,38 172,143,305,222,36
384 | model_data/train/suppply65.jpg 7,9,25,37,4
385 | model_data/train/suppply59.jpg 235,167,260,182,15 237,142,256,168,45 86,29,113,59,45 79,54,117,77,15 92,90,116,104,15
386 | model_data/train/937.jpg 278,131,319,186,22 272,382,288,402,22
387 | model_data/train/suppply58.jpg 226,177,245,201,45 224,203,248,213,15 32,23,45,42,45
388 | model_data/train/suppply64.jpg 161,156,198,173,15 176,177,202,187,15 165,125,193,155,45
389 | model_data/train/1246.jpg 113,157,232,238,44
390 | model_data/train/1291.jpg 176,84,293,182,38
391 | model_data/train/1078.jpg 28,278,87,304,26 89,280,145,292,31
392 | model_data/train/1044.jpg 127,23,198,111,25 200,71,320,175,48
393 | model_data/train/1050.jpg 128,86,161,136,25
394 | model_data/train/8f1ce59b35a81dada0cd36fd2b0a5181.jpg 74,38,221,84,10 232,29,362,73,39
395 | model_data/train/249.jpg 90,11,374,404,5
396 | model_data/train/9234bf63dc4558558cb3c5fce6596314.jpg 27,156,141,199,10 146,171,230,211,39
397 | model_data/train/1087.jpg 39,124,211,287,26 225,148,379,216,31
398 | model_data/train/275.jpg 340,18,395,86,6 340,94,396,113,30
399 | model_data/train/cctvnew5.jpg 100,92,164,111,49 196,47,211,54,49
400 | model_data/train/507.jpg 0,231,216,387,12
401 | model_data/train/1093.jpg 2,110,213,263,26 225,129,410,195,31
402 | model_data/train/498.jpg 59,273,75,295,11 172,109,227,131,40 86,208,138,233,40 30,322,87,360,40 114,198,136,215,11 220,74,272,92,11 165,107,173,116,11
403 | model_data/train/suppply119.jpg 186,112,211,146,22
404 | model_data/train/suppplies5.jpg 92,153,343,256,44
405 | model_data/train/1118.jpg 263,302,347,327,27
406 | model_data/train/1130.jpg 60,38,264,92,27
407 | model_data/train/suppply125.jpg 187,241,234,259,19
408 | model_data/train/suppply131.jpg 226,190,267,204,19
409 | model_data/train/1124.jpg 127,73,172,111,27 285,373,325,402,27
410 | model_data/train/467.jpg 103,30,288,270,11
411 | model_data/train/473.jpg 1,0,416,268,11 31,303,388,331,40
412 | model_data/train/895.jpg 5,0,180,41,21
413 | model_data/train/665.jpg 318,105,372,166,16 118,69,259,179,42 306,198,387,235,42 292,147,408,202,43 110,0,292,135,43
414 | model_data/train/671.jpg 157,55,258,178,16 98,295,315,362,42 66,187,351,253,43
415 | model_data/train/117.jpg 134,162,158,204,2
416 | model_data/train/b95261e03a2ec59bb870ae9b237ed664.jpg 196,113,294,179,10 78,73,120,138,10 119,104,162,171,39 296,91,354,144,39
417 | model_data/train/891.jpg 66,112,347,233,21
418 | model_data/train/885.jpg 117,73,302,131,21 125,195,184,217,1
419 | model_data/train/661.jpg 296,199,347,249,16 157,227,186,262,16 260,251,383,280,43 280,285,361,308,42 62,189,115,203,42 119,241,183,290,43 120,271,178,306,42 195,195,237,209,42 177,169,250,187,42 34,195,58,201,43
420 | model_data/train/107.jpg 208,292,224,327,2
421 | model_data/train/852.jpg 188,243,212,278,20
422 | model_data/train/1322.jpg 79,46,125,69,31 29,42,72,79,26
423 | model_data/train/339.jpg 8,2,40,33,8 25,3,39,31,34
424 | model_data/train/ced7d04748f6df5c319add3fde3bbb8d.jpg 49,147,169,192,18 173,145,261,179,46
425 | model_data/train/311.jpg 149,196,409,328,33
426 | model_data/train/305.jpg 194,224,314,258,33 172,150,378,221,32 31,150,187,271,7
427 | model_data/train/suppplies1.jpg 44,180,269,245,35 269,181,361,246,36
428 | model_data/train/1108.jpg 119,159,170,177,27 296,123,336,169,27
429 | model_data/train/8b7e337ddd1fac07ea3ac974fb02c5bf.jpg 82,97,127,136,41
430 | model_data/train/1120.jpg 237,139,309,158,27 193,285,228,298,27
431 | model_data/train/suppply135.jpg 39,196,159,264,3
432 | model_data/train/suppply121.jpg 93,59,115,103,22
433 | model_data/train/1097.jpg 27,4,71,45,26 83,9,130,33,31
434 | model_data/train/503.jpg 63,170,187,245,12
435 | model_data/train/265.jpg 52,86,368,303,6 66,312,352,353,30
436 | model_data/train/cctvnew1.jpg 101,135,145,155,49 218,134,261,155,49
437 | model_data/train/517.jpg 172,112,197,125,12
438 | model_data/train/1068.jpg 86,94,181,171,26 190,106,329,159,31
439 | model_data/train/1054.jpg 139,48,187,110,25 189,56,339,112,48
440 | model_data/train/1040.jpg 112,104,131,135,25 129,89,202,132,48
441 | model_data/train/1295.jpg 354,159,392,216,37 8,287,76,388,37 233,12,284,57,38
442 | model_data/train/e55ac12d8cb0f134c0493a496ee9e433.jpg 58,106,362,144,17
443 | model_data/train/1281.jpg 121,53,283,339,9
444 | model_data/train/suppply48.jpg 72,18,325,245,45 23,272,379,371,15
445 | model_data/train/926.jpg 118,80,160,136,22
446 | model_data/train/932.jpg 258,118,303,177,22 151,147,178,187,22
447 | model_data/train/fe263f5c0bbdb701a86bb3807ed02197.jpg 46,31,185,74,10 196,33,295,73,39
448 | model_data/train/1256.jpg 147,191,269,314,9 44,10,286,55,35 288,10,372,53,36 355,385,383,398,36 378,402,393,412,38
449 | model_data/train/suppply60.jpg 367,219,406,236,15 370,192,400,219,45
450 | model_data/train/suppply61.jpg 240,83,270,119,45 235,121,277,134,15
451 | model_data/train/8d892a39dab1085301300e6b937b0112.jpg 118,86,227,166,18 232,83,322,143,46
452 | model_data/train/suppply75.jpg 115,265,144,286,20
453 | model_data/train/933.jpg 250,149,300,216,22
454 | model_data/train/suppply49.jpg 36,135,378,202,15
455 | model_data/train/927.jpg 167,198,188,237,22
456 | model_data/train/1294.jpg 63,62,172,350,37 237,69,350,355,37
457 | model_data/train/700.jpg 173,47,256,185,16 105,190,325,260,43 131,305,298,372,42
458 | model_data/train/1041.jpg 37,24,78,54,25 79,26,178,48,48 26,363,60,392,25 60,366,150,385,48
459 | model_data/train/516.jpg 35,283,69,302,12
460 | model_data/train/1096.jpg 44,47,85,86,26 95,52,141,74,31
461 | model_data/train/264.jpg 172,134,245,233,6 171,238,244,265,30
462 | model_data/train/98fbedaee881a63a0335e148d63122c8.jpg 59,3,390,181,17
463 | model_data/train/1c4af5c5bd021aa1525b52d5d95d8193.jpg 135,59,260,138,15 105,106,135,155,45
464 | model_data/train/suppply120.jpg 258,105,273,138,22 274,248,324,277,21
465 | model_data/train/1121.jpg 127,166,287,220,27
466 | model_data/train/suppply134.jpg 40,54,376,361,1
467 | model_data/train/1109.jpg 208,118,274,146,27
468 | model_data/train/489.jpg 201,172,217,187,11
469 | model_data/train/669a6a0c7910a9cd8f21542958706fc0.jpg 165,255,287,383,20 171,312,242,328,47
470 | model_data/train/suppply108.jpg 151,158,212,184,32 169,104,232,153,7
471 | model_data/train/304.jpg 98,176,204,214,32 29,175,98,222,7
472 | model_data/train/462.jpg 114,51,292,234,11 6,263,402,328,40
473 | model_data/train/476.jpg 116,227,130,244,11
474 | model_data/train/310.jpg 56,161,204,250,7 222,217,317,239,33 195,164,361,215,32
475 | model_data/train/338.jpg 170,314,242,392,34
476 | model_data/train/a2abc56153ea5a594b39f08b1ad4ae48.jpg 36,133,133,193,10 133,84,252,158,39
477 | model_data/train/59fd4192b20372198316a5e3a5daa9ee.jpg 83,102,175,129,10 179,91,258,120,39
478 | model_data/train/112.jpg 192,68,247,143,2
479 | model_data/train/674.jpg 172,126,243,199,16 147,243,262,276,42 123,195,293,238,43
480 | model_data/train/104.jpg 144,245,186,272,2 282,367,325,394,2
481 | model_data/train/676.jpg 193,307,252,360,16 115,321,170,341,43
482 | model_data/train/110.jpg 241,251,256,275,2
483 | model_data/train/845.jpg 49,258,64,299,20
484 | model_data/train/689.jpg 67,195,133,244,16 333,201,361,224,16 87,58,113,75,16 305,58,329,77,16 61,277,137,296,42 326,237,365,246,42 41,241,159,264,43 315,220,372,234,43 293,77,343,88,43 75,75,123,85,43
485 | model_data/train/851.jpg 234,145,296,253,20
486 | model_data/train/1321.jpg 243,112,303,153,26 303,126,356,155,31
487 | model_data/train/312.jpg 213,14,287,106,7 186,95,261,151,33 197,123,258,180,32
488 | model_data/train/cctvnew40.jpg 224,118,267,137,49
489 | model_data/train/1137.jpg 169,319,292,343,27
490 | model_data/train/suppply136.jpg 48,16,355,253,1
491 | model_data/train/1123.jpg 193,56,240,68,27
492 | model_data/train/514.jpg 90,40,395,243,12
493 | model_data/train/cctvnew2.jpg 307,117,345,138,49
494 | model_data/train/272.jpg 167,114,257,206,6 167,212,258,236,30
495 | model_data/train/266.jpg 97,280,314,381,30 108,19,313,288,6
496 | model_data/train/1094.jpg 57,120,164,256,26 185,133,350,213,26
497 | model_data/train/500.jpg 216,127,232,153,11
498 | model_data/train/suppply88.jpg 201,99,342,176,21
499 | model_data/train/925.jpg 58,187,81,216,22
500 | model_data/train/suppply63.jpg 206,145,225,168,45 203,167,228,183,15
501 | model_data/train/919.jpg 43,281,68,295,22
502 | model_data/train/1255.jpg 134,144,281,237,37 174,256,245,280,38
503 | model_data/train/suppply77.jpg 55,112,106,141,21 159,119,202,147,21
504 | model_data/train/918.jpg 157,47,176,72,22
505 | model_data/train/74f3017b371c9f4834b1ac4601a9b016.jpg 71,201,341,257,15 93,35,320,188,45
506 | model_data/train/suppply76.jpg 146,239,179,265,20
507 | model_data/train/suppply62.jpg 149,84,179,95,15 152,60,174,83,45
508 | model_data/train/1297.jpg 130,295,204,357,38
509 | model_data/train/1283.jpg 166,146,244,255,9
510 | model_data/train/suppply89.jpg 132,161,253,273,21
511 | model_data/train/1056.jpg 203,36,287,109,25 193,61,379,164,48
512 | model_data/train/1042.jpg 271,108,351,212,25 225,229,407,295,48
513 | model_data/train/35b9493e4e62a8e43cd048444609ea5a.jpg 69,26,198,91,18 205,40,278,93,46
514 | model_data/train/267.jpg 84,121,163,220,6 175,158,334,202,30
515 | model_data/train/1095.jpg 38,23,165,89,26 133,244,259,309,26 169,32,282,62,31 267,253,378,281,31
516 | model_data/train/515.jpg 153,235,296,269,12 87,353,117,360,12 161,352,191,360,12 235,352,267,360,12 308,352,339,361,12 308,402,340,411,12 234,403,266,411,12 161,403,192,412,12 86,402,117,412,12 1,1,24,9,12
517 | model_data/train/cctvnew3.jpg 166,102,208,133,49 25,30,60,51,0
518 | model_data/train/99f04483e207e9694785b3f5fca4cbc8.jpg 34,3,128,108,13 11,130,124,172,41 318,243,345,291,13
519 | model_data/train/529.jpg 155,221,208,248,12 202,341,218,353,12
520 | model_data/train/suppply137.jpg 210,137,295,181,19
521 | model_data/train/1136.jpg 217,277,327,308,27
522 | model_data/train/cctvnew41.jpg 198,109,344,173,49
523 | model_data/train/suppply123.jpg 229,238,293,269,19
524 | model_data/train/9e7564be22f8e3f86c1a74e00d3932cc.jpg 33,26,258,162,18 272,62,394,156,46
525 | model_data/train/suppplies3.jpg 86,61,238,172,44
526 | model_data/train/313.jpg 143,287,288,327,33
527 | model_data/train/0b50c0716c5ee81cddd7e94cb95f4ddd.jpg 27,160,312,239,17
528 | model_data/train/1320.jpg 53,158,204,253,7 193,162,362,215,32 224,219,317,239,33
529 | model_data/train/850.jpg 202,194,385,414,20
530 | model_data/train/844.jpg 106,0,135,49,20
531 | model_data/train/1308.jpg 155,250,270,280,32
532 | model_data/train/677.jpg 158,94,194,148,16 202,307,297,341,43 217,102,312,139,43 183,261,272,302,42
533 | model_data/train/111.jpg 133,213,149,236,2
534 | model_data/train/105.jpg 11,18,32,59,2
535 | model_data/train/663.jpg 201,120,274,191,16 32,229,59,256,16 393,117,403,157,16 149,189,317,241,42 176,250,296,280,43 23,280,70,293,43 19,256,75,276,42
536 | model_data/train/893.jpg 93,59,316,106,21
537 | model_data/train/120.jpg 230,268,267,305,2
538 | model_data/train/134.jpg 0,1,416,231,3
539 | model_data/train/685.jpg 232,59,294,152,16 203,133,318,189,43 222,206,293,239,42
540 | model_data/train/1305.jpg 321,124,356,142,35
541 | model_data/train/691.jpg 139,1,276,174,16 25,185,390,270,43 69,334,345,416,42
542 | model_data/train/861.jpg 115,72,307,133,21 123,198,185,216,21
543 | model_data/train/336.jpg 329,322,387,387,34
544 | model_data/train/suppply106.jpg 191,70,362,114,32 212,116,286,134,33 51,66,202,145,7
545 | model_data/train/1113.jpg 68,180,94,189,27
546 | model_data/train/1107.jpg 7,25,108,50,27 46,258,71,269,27
547 | model_data/train/suppply112.jpg 193,100,284,176,21 252,250,283,264,21
548 | model_data/train/256.jpg 2,0,416,323,6 1,357,414,416,30
549 | model_data/train/7d8fda15ce4faaa3e55dc7e51c5e8dd7.jpg 130,87,181,118,18 286,87,341,121,18 343,95,377,118,46
550 | model_data/train/530.jpg 29,272,62,289,12
551 | model_data/train/1098.jpg 38,22,82,62,26 93,26,140,50,31
552 | model_data/train/518.jpg 11,328,49,345,12
553 | model_data/train/84a3237a9fe19f16553d97744df40f8f.jpg 91,132,116,178,13 80,192,168,302,41
554 | model_data/train/295.jpg 120,16,318,199,7 44,211,282,315,33 40,261,388,395,32
555 | model_data/train/1067.jpg 265,153,321,175,31 212,152,263,189,26
556 | model_data/train/1073.jpg 149,76,294,118,31 11,78,94,136,26 350,80,416,137,26
557 | model_data/train/281.jpg 179,126,222,156,30 189,98,235,143,6
558 | model_data/train/suppply84.jpg 23,14,201,42,21
559 | model_data/train/suppply90.jpg 30,131,396,249,21
560 | model_data/train/a70ab94c4309d3a8272de85d3bb4e455.jpg 193,136,242,169,47
561 | model_data/train/suppply47.jpg 119,14,195,41,15 90,17,117,45,45
562 | model_data/train/suppply53.jpg 139,182,279,220,15 149,137,261,190,45
563 | model_data/train/f133aa8cec41ccd90bcf9bfb84f87b70.jpg 95,65,146,116,45 153,69,292,110,15 157,169,183,195,45
564 | model_data/train/8c0c7a9c8ab2663dcdf4f6d49d52d53e.jpeg 188,54,248,156,13 186,170,252,189,41
565 | model_data/train/901.jpg 162,89,173,109,22
566 | model_data/train/1258.jpg 3,223,277,352,36 163,62,254,155,9 1,359,133,415,38
567 | model_data/train/914.jpg 181,47,202,76,22
568 | model_data/train/suppply52.jpg 46,38,357,241,45 3,257,416,359,15
569 | model_data/train/1270.jpg 95,33,320,386,37
570 | model_data/train/suppply46.jpg 131,116,156,157,22
571 | model_data/train/suppply91.jpg 100,53,277,86,44
572 | model_data/train/suppply85.jpg 16,4,168,31,21
573 | model_data/train/280.jpg 153,76,266,207,6 141,215,277,252,30
574 | model_data/train/525.jpg 198,39,225,54,12
575 | model_data/train/243.jpg 133,106,253,323,5
576 | model_data/train/18bbfcdd24e3a871017d3e0572a54843.jpg 0,50,57,110,13 97,71,217,91,41 301,295,323,316,13
577 | model_data/train/22ce18b4dce8aa14bda1e7b2033b6d9b.jpg 135,74,229,134,18 235,88,290,134,46
578 | model_data/train/531.jpg 191,194,324,246,12
579 | model_data/train/1106.jpg 128,105,183,131,27
580 | model_data/train/492.jpg 251,211,259,230,11
581 | model_data/train/suppply113.jpg 31,90,258,163,21
582 | model_data/train/486.jpg 82,198,98,224,11
583 | model_data/train/1112.jpg 21,84,386,260,27
584 | model_data/train/337.jpg 179,36,213,79,8 197,36,214,78,34
585 | model_data/train/323.jpg 130,120,278,227,7 72,245,250,289,33
586 | model_data/train/860.jpg 146,93,270,167,20
587 | model_data/train/690.jpg 186,76,232,120,16 162,120,251,139,43 178,151,241,171,42
588 | model_data/train/684.jpg 103,75,165,136,16 276,85,335,143,16 72,284,189,314,42 239,274,352,308,42
589 | model_data/train/135.jpg 0,0,416,228,3
590 | model_data/train/889.jpg 247,242,316,275,21
591 | model_data/train/87aa340315e3702905719bd6efd4efdd.jpg 253,73,333,99,15
592 | model_data/train/1306.jpg 55,171,172,269,7 186,178,360,229,32 186,235,307,264,33
593 | model_data/train/692.jpg 157,57,259,178,16 66,189,349,253,43 101,297,314,356,42
594 | model_data/train/686.jpg 242,104,290,175,16 218,170,310,204,43 237,223,294,249,42
595 | model_data/train/335.jpg 310,204,343,256,8 328,209,343,254,34
596 | model_data/train/321.jpg 18,114,197,287,7 203,223,322,265,33 180,121,379,216,32
597 | model_data/train/suppply111.jpg 1,54,169,173,7 193,127,326,181,32 139,61,362,128,33
598 | model_data/train/490.jpg 199,221,221,252,11
599 | model_data/train/1110.jpg 137,70,193,94,27
600 | model_data/train/484.jpg 190,153,209,184,11
601 | model_data/train/suppply105.jpg 17,13,53,24,3 162,372,181,377,3 106,372,128,379,3
602 | model_data/train/8d7f72a1e8c81cbd66df37b0f446c84a.jpg 152,69,232,85,47 357,213,378,226,47 46,220,58,229,47 5,236,13,243,47
603 | model_data/train/suppply139.jpg 29,32,53,67,1 354,299,384,318,19
604 | model_data/train/b4f8847009577bfd1b7f8c28b356395b.jpg 256,19,355,50,47
605 | model_data/train/533.jpg 204,311,232,320,12
606 | model_data/train/255.jpg 158,103,258,266,6 162,269,253,305,30
607 | model_data/train/08f5fe23a79a8225bf4cde93cfa97e7d.jpg 19,23,99,45,10 105,23,164,44,39 33,123,58,131,10
608 | model_data/train/0d319caa194790b2907c74b52c410100.jpg 115,127,225,165,10 235,128,316,160,39
609 | model_data/train/282.jpg 164,273,181,295,6 159,282,174,299,30
610 | model_data/train/1070.jpg 203,59,390,115,26 20,60,190,168,26
611 | model_data/train/296.jpg 201,222,322,265,33 180,122,379,216,32 20,115,198,289,7
612 | model_data/train/01d8a1c3d88f62c30d945f4d45ba7eba.jpg 111,164,217,219,18 225,175,289,216,46
613 | model_data/train/suppply93.jpg 41,178,166,304,7 182,185,367,252,32 183,259,309,299,33
614 | model_data/train/suppply87.jpg 119,105,313,205,44
615 | model_data/train/1299.jpg 97,60,178,151,37 97,162,178,252,37 97,267,176,354,37
616 | model_data/train/6217272bccf8947089e1c25790848600.jpg 114,153,263,241,17
617 | model_data/train/1272.jpg 149,135,252,266,9
618 | model_data/train/ffc7f5bf9aafa0b0293560c764a814c7.jpg 27,162,380,198,17
619 | model_data/train/suppply50.jpg 78,69,338,240,45 80,253,334,299,15
620 | model_data/train/suppply44.jpg 59,92,117,130,15 71,43,112,98,45
621 | model_data/train/902.jpg 81,117,105,160,22
622 | model_data/train/suppply78.jpg 93,33,240,129,21
623 | model_data/train/3ae7020d4521c511a5874f0b9b4899e2.jpg 34,115,346,185,17
624 | model_data/train/suppply79.jpg 193,101,283,176,21 252,250,283,265,21
625 | model_data/train/suppply45.jpg 244,336,267,363,45 237,139,270,166,45 173,77,229,121,45 235,84,377,118,15 271,338,325,358,15
626 | model_data/train/1267.jpg 166,47,262,110,36 165,116,261,144,35
627 | model_data/train/suppply51.jpg 91,307,322,366,15 59,24,356,296,45
628 | model_data/train/1298.jpg 176,85,293,183,38
629 | model_data/train/suppply86.jpg 5,24,34,66,4
630 | model_data/train/edc72e7f83cd613d618bf84c53f182d5.jpg 7,6,85,90,20 17,44,70,54,47
631 | model_data/train/suppply92.jpg 56,80,81,111,4
632 | model_data/train/1065.jpg 86,24,145,39,31 24,19,84,44,26
633 | model_data/train/283.jpg 203,153,217,159,30 203,135,216,153,6
634 | model_data/train/1071.jpg 77,68,216,126,26 227,80,358,107,31
635 | model_data/train/532.jpg 180,200,199,210,12
636 | model_data/train/254.jpg 124,266,297,304,30 116,70,304,259,6
637 | model_data/train/240.jpg 4,8,399,409,5
638 | model_data/train/526.jpg 305,71,334,93,12
639 | model_data/train/dc63f7017bad402727fbfae778c7b1e3.jpg 23,233,131,275,10 140,235,222,274,39
640 | model_data/train/suppply138.jpg 33,24,58,56,1
641 | model_data/train/1139.jpg 101,190,178,221,27
642 | model_data/train/suppply104.jpg 177,216,217,231,32
643 | model_data/train/suppply110.jpg 98,33,197,67,32 16,30,105,94,7
644 | model_data/train/1105.jpg 22,157,285,208,27
645 | model_data/train/491.jpg 274,183,293,210,11
646 | model_data/train/308.jpg 107,27,182,197,7 50,195,169,288,33
647 | model_data/train/320.jpg 144,113,290,220,7 83,238,253,282,33
648 | model_data/train/334.jpg 166,86,219,135,8 194,86,219,134,34 5,1,11,12,34
649 | model_data/train/1313.jpg 129,74,330,178,7 24,197,248,236,33 87,245,243,281,32
650 | model_data/train/687.jpg 89,197,167,246,16 113,58,143,75,16 51,245,202,269,43 77,281,178,298,42 94,75,158,86,43
651 | model_data/train/693.jpg 144,34,256,177,16 48,184,349,253,43 88,307,317,380,42
652 | model_data/train/122.jpg 230,152,248,175,2
653 | model_data/train/851b82a9ad7581280cdc76c25cfc8b3b.jpg 130,305,211,336,47
654 | model_data/train/136.jpg 16,171,202,268,3
655 | model_data/train/668.jpg 177,122,246,194,16 123,191,296,234,43 152,239,266,271,42
656 | model_data/train/132.jpg 38,196,158,263,3
657 | model_data/train/26759be146474fe2a05b7fdb0da1d244.jpg 46,0,248,101,10 257,46,362,112,39
658 | model_data/train/898.jpg 20,250,394,374,21 19,85,381,155,44
659 | model_data/train/867.jpg 110,155,236,237,44
660 | model_data/train/697.jpg 119,0,297,253,16 22,266,390,372,43
661 | model_data/train/683.jpg 186,86,245,167,16 300,216,323,246,16 136,193,222,265,42 290,265,328,279,42 283,245,337,263,43 130,133,256,236,43
662 | model_data/train/a54d7376abb3e32633c0a0bfcdc6127d.jpg 258,82,337,169,13 255,186,344,206,41
663 | model_data/train/318.jpg 103,73,326,184,7 0,200,252,242,33 54,258,251,308,32
664 | model_data/train/324.jpg 163,74,244,149,7 124,156,217,187,33 144,196,218,227,32
665 | model_data/train/b3fd727d3bd53721ec6f69c72816772e.jpg 80,203,346,364,15 110,79,317,257,45
666 | model_data/train/suppply128.jpg 175,180,208,215,19
667 | model_data/train/1129.jpg 126,205,172,248,27
668 | model_data/train/suppply114.jpg 15,176,41,197,8 28,176,40,196,34 1,2,25,21,8 13,2,25,21,34
669 | model_data/train/suppplies8.jpg 64,13,100,30,46 9,9,62,30,18
670 | model_data/train/suppply100.jpg 186,139,374,213,32 206,218,317,248,33 36,137,199,265,7
671 | model_data/train/1115.jpg 147,171,259,205,27
672 | model_data/train/cctvnew8.jpg 34,71,175,110,49
673 | model_data/train/522.jpg 79,237,104,259,12 181,336,222,349,12
674 | model_data/train/244.jpg 26,24,396,402,5
675 | model_data/train/250.jpg 88,1,359,412,5
676 | model_data/train/1049.jpg 127,21,213,203,25 214,74,339,213,48
677 | model_data/train/1075.jpg 86,56,173,101,26 197,169,232,189,26 30,310,45,327,26 124,172,135,180,26 300,169,310,177,26 175,72,241,94,31 194,190,238,204,31 45,312,67,330,31
678 | model_data/train/293.jpg 62,239,377,307,33
679 | model_data/train/1288.jpg 241,175,303,226,38 132,197,163,237,9
680 | model_data/train/cf29efa8b53283be13f98c8af8ef407d.jpg 99,82,297,179,47
681 | model_data/train/suppply96.jpg 37,83,122,137,7 38,233,186,325,7 174,237,344,289,32 204,290,299,313,33 116,85,214,118,32
682 | model_data/train/b09a38b6deb015fef99e11168faff5bd.jpg 65,28,150,81,18 155,53,197,87,46 110,152,150,179,18 320,85,362,118,18 306,140,348,172,18 154,160,185,184,46 358,99,385,124,46
683 | model_data/train/suppply82.jpg 158,124,236,162,21
684 | model_data/train/suppply69.jpg 183,184,225,209,20
685 | model_data/train/907.jpg 259,104,274,138,22 272,248,326,275,21
686 | model_data/train/suppply55.jpg 34,101,279,189,19
687 | model_data/train/1277.jpg 63,63,171,351,37 238,67,350,353,37
688 | model_data/train/suppply41.jpg 130,30,177,83,6 201,249,218,268,6 218,254,254,265,30 183,45,288,75,30
689 | model_data/train/803963f13505d48ebf80251b730d033b.jpg 170,200,189,222,13 146,240,215,259,41
690 | model_data/train/suppply40.jpg 276,167,316,214,6 277,216,319,229,30
691 | model_data/train/suppply54.jpg 90,76,327,239,45 44,280,371,340,15
692 | model_data/train/1276.jpg 112,122,305,293,37
693 | model_data/train/suppply68.jpg 57,127,92,171,22
694 | model_data/train/906.jpg 144,124,168,173,22
695 | model_data/train/suppply83.jpg 17,7,184,34,21
696 | model_data/train/072f735c0dc9cd46854fb7b788cef8e1.jpg 136,62,281,274,13 133,313,286,356,41
697 | model_data/train/suppply97.jpg 45,11,91,27,32 11,11,49,37,7 51,27,77,35,33
698 | model_data/train/1289.jpg 119,26,282,160,9 119,247,282,382,9
699 | model_data/train/292.jpg 54,171,173,268,7 187,179,359,227,32 187,235,305,263,33
700 | model_data/train/1074.jpg 259,207,329,257,26 249,257,344,308,31
701 | model_data/train/286.jpg 215,68,295,91,30 170,52,211,95,6
702 | model_data/train/1048.jpg 35,67,62,101,25 65,64,149,97,48
703 | model_data/train/251.jpg 127,10,277,238,6 127,260,278,312,30
704 | model_data/train/537.jpg 267,217,314,243,12
705 | model_data/train/523.jpg 186,74,225,89,12
706 | model_data/train/245.jpg 93,100,273,294,5
707 | model_data/train/cctvnew9.jpg 270,120,312,149,49
708 | model_data/train/279.jpg 170,217,316,330,30 42,136,159,250,6
709 | model_data/train/suppply101.jpg 6,12,30,45,4
710 | model_data/train/480.jpg 116,218,132,246,11
711 | model_data/train/1114.jpg 149,192,192,212,27 285,70,324,110,27
712 | model_data/train/1100.jpg 13,15,61,56,26 69,19,121,43,31
713 | model_data/train/494.jpg 212,244,228,268,11
714 | model_data/train/suppplies9.jpg 16,20,86,44,18 89,26,130,45,46 192,368,217,378,46
715 | model_data/train/suppply115.jpg 73,163,292,214,44
716 | model_data/train/1128.jpg 33,141,63,161,27
717 | model_data/train/suppply129.jpg 264,324,292,337,19
718 | model_data/train/eb9d943ba7a1885660215fb0ab90cebc.jpg 203,132,238,184,13 201,198,238,215,41
719 | model_data/train/58923005ce42e6cddbf6514154206a19.jpg 101,47,305,137,17
720 | model_data/train/682.jpg 154,59,254,131,16 148,137,264,155,43 35,164,383,240,42
721 | model_data/train/eac037800fd673c87c268187b72f6acf.jpg 64,90,356,132,17
722 | model_data/train/696.jpg 140,61,274,154,16 71,155,345,197,43 117,221,301,257,42
723 | model_data/train/1302.jpg 44,9,286,56,35 289,13,374,55,36 146,189,269,313,9
724 | model_data/train/866.jpg 0,5,408,358,21
725 | model_data/train/669.jpg 187,74,231,120,16 178,150,241,169,42 163,118,252,142,42
726 | model_data/train/64053b6c2f0f0a6b55da77c9c6cc1dd3.jpg 139,46,218,62,41 219,295,230,311,13
727 | model_data/train/7651c0e94968e205d7508ca474881a79.jpg 55,82,211,175,10 221,117,310,188,39
728 | model_data/train/131.jpg 16,171,201,267,3
729 | model_data/train/864.jpg 191,112,333,226,21
730 | model_data/train/a399acf4e698b4152fe9a34f8d27fd08.jpg 22,30,124,92,10 160,22,316,91,39
731 | model_data/train/870.jpg 72,11,293,147,21
732 | model_data/train/680.jpg 66,112,115,162,16 267,227,300,257,16 28,166,155,203,42 226,237,307,300,42 59,210,125,226,43 236,268,276,299,43
733 | model_data/train/1314.jpg 186,177,359,227,32 187,235,305,262,33 55,171,172,270,7
734 | model_data/train/858.jpg 312,206,327,232,20
735 | model_data/train/694.jpg 143,0,275,220,16 28,230,389,343,43
736 | model_data/train/333.jpg 17,28,313,326,8 203,31,310,328,34
737 | model_data/train/1116.jpg 107,316,154,330,27
738 | model_data/train/482.jpg 208,201,221,221,11
739 | model_data/train/suppply103.jpg 13,197,142,236,33 42,252,142,296,32 65,86,178,185,7
740 | model_data/train/suppply117.jpg 123,216,142,246,20
741 | model_data/train/509.jpg 269,86,287,97,12
742 | model_data/train/1089.jpg 283,140,322,169,26
743 | model_data/train/535.jpg 174,6,247,44,12
744 | model_data/train/247.jpg 127,131,266,237,5
745 | model_data/train/521.jpg 297,329,347,362,12
746 | model_data/train/1062.jpg 14,146,220,279,26 229,168,412,222,31
747 | model_data/train/284.jpg 185,290,246,330,30 184,208,245,299,6
748 | model_data/train/214975a47aeed9f59b0ebea719fe6993.jpg 187,184,359,312,17
749 | model_data/train/suppply81.jpg 83,140,209,190,21
750 | model_data/train/suppply95.jpg 86,244,244,282,32 24,195,247,235,33 127,74,331,176,7
751 | model_data/train/910.jpg 123,305,148,322,22 91,87,101,101,22 235,147,247,155,22
752 | model_data/train/1248.jpg 79,264,284,333,44
753 | model_data/train/suppply42.jpg 90,222,159,250,15 57,216,88,242,45
754 | model_data/train/1260.jpg 62,64,172,350,37 237,69,349,350,37
755 | model_data/train/938.jpg 286,126,324,200,22
756 | model_data/train/1274.jpg 131,141,280,238,37 171,256,246,280,38
757 | model_data/train/suppply56.jpg 42,50,121,104,15 37,123,115,167,15 357,231,375,242,15 173,328,186,338,15 58,12,116,67,45 359,214,373,233,45
758 | model_data/train/939.jpg 19,90,106,306,22
759 | model_data/train/suppply57.jpg 59,185,82,215,22
760 | model_data/train/suppply43.jpg 175,115,244,153,19
761 | model_data/train/905.jpg 31,3,69,52,22
762 | model_data/train/911.jpg 117,80,298,259,22
763 | model_data/train/suppply80.jpg 107,54,231,147,21 194,161,266,199,21
764 | model_data/train/1063.jpg 219,184,387,225,31 19,172,208,267,26 18,371,65,397,8
765 | model_data/train/246.jpg 101,36,328,359,5
766 | model_data/train/534.jpg 140,104,160,118,12 31,33,91,54,12
767 | model_data/train/252.jpg 12,79,403,302,6 30,312,384,355,30
768 | model_data/train/1088.jpg 194,122,307,210,26 123,216,310,277,31
769 | model_data/train/suppply116.jpg 91,33,239,129,21
770 | model_data/train/483.jpg 117,172,132,193,11
771 | model_data/train/1117.jpg 22,305,63,319,27
772 | model_data/train/suppply102.jpg 159,13,244,129,21 262,170,393,231,17
773 | model_data/train/332.jpg 161,108,218,172,8 190,107,218,172,34
774 | model_data/train/15302c49192aaedadb749964cbc6ad73.jpg 20,64,253,126,18 261,94,361,131,46
775 | model_data/train/468.jpg 54,189,97,289,11 107,210,278,274,40 0,1,416,149,40
776 | model_data/train/695.jpg 158,0,214,97,16 128,98,240,136,43 148,165,225,205,42
777 | model_data/train/1315.jpg 55,171,173,271,7 186,235,306,263,33 186,176,360,229,32
778 | model_data/train/871.jpg 18,71,381,150,44 20,262,395,405,21
779 | model_data/train/865.jpg 41,101,360,258,21
780 | model_data/train/1329.jpg 43,177,384,290,21
781 | model_data/train/130.jpg 101,50,320,267,2
782 | model_data/train/124.jpg 199,27,237,53,2
783 | model_data/train/118.jpg 166,246,188,284,2
784 | model_data/train/625.jpg 115,175,137,203,45
785 | model_data/train/631.jpg 34,173,49,196,45
786 | model_data/train/194.jpg 27,17,100,63,4
787 | model_data/train/56.jpg 29,76,55,101,1
788 | model_data/train/816.jpg 199,184,214,241,19
789 | model_data/train/341.jpg 91,310,242,353,8 170,310,241,352,34 312,388,364,404,8 338,388,364,404,34
790 | model_data/train/onehalf16.jpg 41,319,247,397,35 248,284,362,352,36
791 | model_data/train/1170.jpg 39,265,51,281,28
792 | model_data/train/382.jpg 41,14,91,37,35 107,273,160,324,36 189,359,204,393,37 92,15,111,36,36 85,72,95,91,37 232,220,268,263,38 262,371,269,384,9
793 | model_data/train/suppply165.jpg 168,250,233,385,34 113,250,234,386,8 239,114,266,157,8 252,113,266,157,34
794 | model_data/train/suppply171.jpg 139,50,186,120,25 190,59,337,119,48
795 | model_data/train/396.jpg 81,152,227,226,35 232,157,318,218,36
796 | model_data/train/cctvnew13.jpg 70,127,125,150,49
797 | model_data/train/1164.jpg 211,280,238,314,28
798 | model_data/train/suppply159.jpg 104,102,239,139,8 174,100,236,140,34
799 | model_data/train/1158.jpg 103,172,122,203,28
800 | model_data/train/553.jpg 153,129,268,273,13 146,95,272,124,41
801 | model_data/train/235.jpg 160,134,222,224,5
802 | model_data/train/221.jpg 132,115,281,317,5
803 | model_data/train/547.jpg 112,125,293,352,13 105,73,299,116,41
804 | model_data/train/timg-5.jpg 144,88,365,118,17
805 | model_data/train/209.jpg 66,11,354,252,4
806 | model_data/train/584.jpg 124,74,175,87,14
807 | model_data/train/1010.jpg 88,59,131,81,24
808 | model_data/train/suppplies15.jpg 337,324,369,344,18 370,328,389,344,46
809 | model_data/train/e0e8723c7971e28e02c086abf8204dc9.jpg 39,24,182,78,10 193,31,291,86,39 244,218,274,234,10 246,230,270,244,39
810 | model_data/train/cctv12.jpg 41,10,95,43,0
811 | model_data/train/989.jpg 348,238,401,298,24
812 | model_data/train/suppply24.jpg 28,88,260,163,21
813 | model_data/train/1206.jpg 159,263,165,289,29
814 | model_data/train/onehalf7.jpg 12,7,28,15,2 103,400,112,405,2
815 | model_data/train/1212.jpg 195,196,230,288,29
816 | model_data/train/786.jpg 189,302,197,354,19
817 | model_data/train/suppply30.jpg 177,61,297,95,30 110,41,170,103,6
818 | model_data/train/suppply18.jpg 102,221,117,249,20
819 | model_data/train/962.jpg 169,87,201,122,23 281,207,310,236,23
820 | model_data/train/963.jpg 184,10,229,39,23 191,193,226,207,23
821 | model_data/train/e8a3ab9cb69a0e7406864081ec81d229.jpg 45,71,73,110,13 131,145,332,193,41
822 | model_data/train/suppply19.jpg 99,143,253,232,19
823 | model_data/train/977.jpg 55,60,359,357,23
824 | model_data/train/1213.jpg 194,1,213,85,29 67,264,76,294,29
825 | model_data/train/onehalf6.jpg 22,24,43,64,2
826 | model_data/train/suppply31.jpg 105,189,165,215,19
827 | model_data/train/suppply25.jpg 74,290,231,346,30 73,37,230,267,6
828 | model_data/train/cctv7.jpg 20,13,73,35,0
829 | model_data/train/1207.jpg 203,170,214,219,29
830 | model_data/train/suppplies14.jpg 349,317,387,356,46 284,329,349,397,18
831 | model_data/train/1011.jpg 225,298,266,340,24
832 | model_data/train/585.jpg 153,86,208,99,14
833 | model_data/train/timg-4.jpg 118,49,332,111,17
834 | model_data/train/d53fe5fe5b6b874ffc872f6439384855.jpg 104,98,252,145,17
835 | model_data/train/208.jpg 305,6,334,22,4
836 | model_data/train/220.jpg 67,8,347,401,5
837 | model_data/train/552.jpg 176,157,191,175,13 168,192,178,208,13
838 | model_data/train/234.jpg 184,32,231,103,5 80,317,106,353,5
839 | model_data/train/1159.jpg 329,235,345,256,28
840 | model_data/train/suppply158.jpg 305,193,317,219,11
841 | model_data/train/suppply170.jpg 176,231,241,256,48 246,238,285,290,25
842 | model_data/train/cctvnew12.jpg 167,85,247,111,49
843 | model_data/train/suppply164.jpg 16,368,45,393,8 31,368,45,394,34
844 | model_data/train/354.jpg 2,7,270,214,8 140,6,269,212,34
845 | model_data/train/7e1dbd38528ac121193039881853bf23.jpg 194,105,411,161,17
846 | model_data/train/onehalf17.jpg 189,320,359,346,35
847 | model_data/train/817.jpg 192,226,239,283,19
848 | model_data/train/195.jpg 149,254,165,274,4
849 | model_data/train/94.jpg 22,25,43,64,2
850 | model_data/train/618.jpg 49,139,100,174,14
851 | model_data/train/630.jpg 263,224,286,249,45
852 | model_data/train/624.jpg 228,223,244,241,45
853 | model_data/train/626.jpg 115,153,132,171,45
854 | model_data/train/140.jpg 25,28,54,57,3
855 | model_data/train/168.jpg 11,7,50,19,3 257,400,286,406,3
856 | model_data/train/96.jpg 21,24,42,62,2
857 | model_data/train/82.jpg 206,267,252,312,1
858 | model_data/train/197.jpg 153,89,265,216,4
859 | model_data/train/69.jpg 71,120,97,137,1
860 | model_data/train/1359.jpg 33,228,74,244,40
861 | model_data/train/801.jpg 196,185,213,232,19 96,184,111,235,19 226,46,256,58,19 299,319,323,333,19
862 | model_data/train/356.jpg 19,119,255,241,8 141,120,255,241,34
863 | model_data/train/342.jpg 166,112,241,149,8 175,287,217,308,8 205,111,241,147,34 198,289,217,307,34
864 | model_data/train/1198.jpg 136,155,156,235,29
865 | model_data/train/cctvnew10.jpg 170,190,196,207,49
866 | model_data/train/suppply172.jpg 159,91,239,118,48 245,99,290,163,25
867 | model_data/train/suppply166.jpg 48,66,124,133,8 89,66,124,133,34
868 | model_data/train/381.jpg 291,59,368,87,35
869 | model_data/train/1173.jpg 94,221,112,243,28
870 | model_data/train/0e99d0eec490c973228ec92ea4f030c7.jpg 64,38,221,93,47 229,121,269,176,47
871 | model_data/train/cctvnew38.jpg 26,33,60,52,0 166,104,207,130,49
872 | model_data/train/544.jpg 143,313,160,329,13
873 | model_data/train/222.jpg 99,26,316,380,5
874 | model_data/train/f13d9bfcb8a31f61b2d7f21c0fe2f192.jpg 133,158,155,190,45 157,160,217,198,15
875 | model_data/train/236.jpg 250,52,376,306,5
876 | model_data/train/550.jpg 200,230,216,252,13
877 | model_data/train/fa0900cc01d660323e42feaf9f60f93c.jpg 144,147,278,241,13 141,256,278,277,41
878 | model_data/train/587.jpg 37,188,95,202,14
879 | model_data/train/593.jpg 99,157,146,180,14
880 | model_data/train/cctv11.jpg 30,27,62,48,0 144,57,237,113,0
881 | model_data/train/suppplies16.jpg 11,10,85,38,18 88,17,131,36,46 196,225,237,248,18
882 | model_data/train/suppply33.jpg 62,62,146,148,6 158,94,326,135,30
883 | model_data/train/onehalf4.jpg 14,16,123,32,35 137,262,262,284,35 262,264,306,283,36 126,15,165,32,36
884 | model_data/train/1211.jpg 291,167,307,190,29
885 | model_data/train/785.jpg 10,12,58,41,19
886 | model_data/train/949.jpg 173,114,241,147,23 166,215,259,249,23
887 | model_data/train/cctv5.jpg 153,279,182,314,0
888 | model_data/train/suppply27.jpg 169,106,244,137,30 170,5,243,101,6
889 | model_data/train/975.jpg 112,0,327,265,23
890 | model_data/train/1239.jpg 25,174,377,241,44
891 | model_data/train/1238.jpg 37,163,369,247,44
892 | model_data/train/6d56625b8659130d2a3ab0bfcbc0e8b4.jpg 54,96,161,169,10 159,52,290,146,39
893 | model_data/train/1204.jpg 191,74,223,185,29
894 | model_data/train/cctv4.jpg 26,29,66,54,0
895 | model_data/train/948.jpg 188,95,216,133,23
896 | model_data/train/suppply26.jpg 209,197,259,262,6 265,203,378,247,30
897 | model_data/train/784.jpg 10,12,60,39,19 68,174,73,190,19
898 | model_data/train/1210.jpg 177,183,187,211,29
899 | model_data/train/onehalf5.jpg 5,27,25,63,2
900 | model_data/train/suppplies17.jpg 337,9,369,23,18 369,13,388,24,46
901 | model_data/train/cctv10.jpg 26,16,62,37,0
902 | model_data/train/592.jpg 200,241,268,299,14
903 | model_data/train/1006.jpg 20,58,45,76,24
904 | model_data/train/1012.jpg 273,241,285,263,24
905 | model_data/train/586.jpg 144,155,256,189,14
906 | model_data/train/21dafcaf97235e4da09ed69f46afe056.jpg 42,86,181,288,18 173,53,295,223,46
907 | model_data/train/223.jpg 25,61,382,310,5
908 | model_data/train/cctvnew39.jpg 101,137,144,154,49 218,135,260,156,49
909 | model_data/train/suppply167.jpg 192,16,265,127,34 107,15,265,128,8
910 | model_data/train/cctvnew11.jpg 86,117,330,210,49
911 | model_data/train/suppply173.jpg 191,219,219,263,25 142,219,187,235,48
912 | model_data/train/1199.jpg 348,150,361,202,29
913 | model_data/train/onehalf14.jpg 20,12,37,27,2
914 | model_data/train/800.jpg 23,104,393,318,19
915 | model_data/train/4c3542ec4382936c34f97c59dcd17e92.jpg 159,114,271,242,13 154,264,278,301,41
916 | model_data/train/1358.jpg 205,102,244,151,40
917 | model_data/train/54.jpg 29,31,52,66,1
918 | model_data/train/196.jpg 135,8,160,34,4
919 | model_data/train/68.jpg 287,129,381,231,1
920 | model_data/train/83.jpg 345,14,361,36,1 33,65,49,90,1
921 | model_data/train/169.jpg 16,170,200,269,3
922 | model_data/train/eeae35ff5a7ce3302642f078c086fa82.jpg 38,53,221,139,18 229,64,338,126,46
923 | model_data/train/627.jpg 100,201,112,215,45
924 | model_data/train/df04c1a4a05189d489a3db5eb5f35694.jpg 150,52,274,125,10 278,82,348,138,39
925 | model_data/train/141.jpg 59,5,352,98,3
926 | model_data/train/633.jpg 85,205,103,224,45
927 | model_data/train/330e63b094d61368f4ac4988c6c0ea77.jpg 104,49,303,136,17
928 | model_data/train/179.jpg 12,2,34,14,4
929 | model_data/train/151.jpg 20,19,73,31,3
930 | model_data/train/145.jpg 60,33,91,59,3
931 | model_data/train/623.jpg 93,108,319,244,45 87,261,328,307,15
932 | model_data/train/1348.jpg 205,334,253,360,40
933 | model_data/train/804.jpg 146,45,171,57,19
934 | model_data/train/1360.jpg 109,225,131,283,40 288,309,297,351,40
935 | model_data/train/186.jpg 12,5,61,30,4
936 | model_data/train/78.jpg 51,84,78,112,1
937 | model_data/train/409.jpg 22,11,122,177,9 183,265,230,344,9 323,288,350,310,36
938 | model_data/train/1189.jpg 190,76,216,196,29
939 | model_data/train/80946e00d6e27a62cb8e28ea7f83fd9c.jpg 181,70,331,116,15 189,229,329,272,15 117,57,173,115,45 130,228,183,280,45
940 | model_data/train/onehalf10.jpg 15,36,39,81,2
941 | model_data/train/cctvnew29.jpg 94,62,148,96,49
942 | model_data/train/suppply177.jpg 191,116,342,306,38 68,61,177,359,37
943 | model_data/train/390.jpg 138,196,158,226,9 175,182,245,238,36
944 | model_data/train/cctvnew15.jpg 172,215,255,257,49
945 | model_data/train/1162.jpg 334,220,349,253,28
946 | model_data/train/384.jpg 193,318,358,346,35 360,317,416,343,36
947 | model_data/train/suppply163.jpg 12,11,42,45,8 28,11,42,45,34
948 | model_data/train/timg-3.jpg 66,99,375,149,17
949 | model_data/train/8ab03165b0857ed0c883ff1791fbfcdf.jpg 357,182,410,306,13 356,324,412,373,41
950 | model_data/train/541.jpg 55,62,361,372,13
951 | model_data/train/233.jpg 172,111,241,216,5
952 | model_data/train/cctv14.jpg 37,106,326,277,0
953 | model_data/train/suppplies13.jpg 10,13,88,42,18 91,20,138,40,46
954 | model_data/train/d0c5e23073127bb25386a0b793e84753.jpg 206,69,252,96,47
955 | model_data/train/582.jpg 130,65,203,78,14
956 | model_data/train/1016.jpg 125,310,150,328,24
957 | model_data/train/7e5d4c3d0a4a6726896b9b373b3ae4b5.jpg 277,96,321,125,15
958 | model_data/train/cctv28.jpg 250,126,332,179,0
959 | model_data/train/1002.jpg 147,46,162,58,24 304,290,324,307,24
960 | model_data/train/596.jpg 193,80,312,162,14
961 | model_data/train/964.jpg 187,220,228,246,23 201,131,215,146,23
962 | model_data/train/970.jpg 263,227,316,252,23
963 | model_data/train/onehalf1.jpg 12,5,29,15,2
964 | model_data/train/958.jpg 244,80,268,107,23 102,256,126,279,23
965 | model_data/train/suppply36.jpg 170,72,296,108,30 107,55,162,117,6
966 | model_data/train/suppply22.jpg 169,71,296,108,30 105,54,164,119,6
967 | model_data/train/1200.jpg 47,199,56,229,29 196,207,205,256,29
968 | model_data/train/suppply23.jpg 346,302,386,341,6 346,337,386,357,30
969 | model_data/train/cctv1.jpg 42,22,192,95,0
970 | model_data/train/781.jpg 11,20,57,47,19 166,313,179,328,19
971 | model_data/train/94931ab3daf4ba9b675e1cc3ba37be4d.jpg 52,123,106,138,47 41,78,121,185,20
972 | model_data/train/1215.jpg 202,163,215,213,29
973 | model_data/train/959.jpg 193,122,222,151,23
974 | model_data/train/suppply37.jpg 146,12,256,124,6 141,133,258,164,30 186,249,212,257,30 186,225,211,249,6
975 | model_data/train/971.jpg 97,50,336,239,23
976 | model_data/train/1229.jpg 167,88,311,138,44
977 | model_data/train/d14cbc705347c4f8f9678266e43c15da.jpg 102,110,276,167,17
978 | model_data/train/597.jpg 23,38,121,57,14 95,344,111,349,14
979 | model_data/train/1003.jpg 397,57,410,73,24
980 | model_data/train/cb268f78f934ced7f91610a798bc1af7.jpg 1,46,225,109,41
981 | model_data/train/cctv29.jpg 128,146,236,209,0
982 | model_data/train/1017.jpg 47,41,57,55,24 157,17,169,31,24 291,23,302,39,24
983 | model_data/train/583.jpg 34,71,232,98,14 216,223,267,247,14 284,128,334,139,14
984 | model_data/train/2c3db1ec35d4b24d5ecf698cfc81acbc.jpg 323,108,400,181,20 338,141,384,151,47
985 | model_data/train/suppplies12.jpg 35,157,367,247,44
986 | model_data/train/cctv15.jpg 74,308,189,380,0
987 | model_data/train/554.jpg 193,266,225,296,13
988 | model_data/train/232.jpg 285,226,414,394,5
989 | model_data/train/226.jpg 75,69,339,332,5
990 | model_data/train/540.jpg 85,168,223,220,12
991 | model_data/train/568.jpg 113,97,298,262,13 104,57,302,88,41
992 | model_data/train/timg-2.jpg 117,85,299,175,17
993 | model_data/train/suppply162.jpg 161,94,216,129,8 191,94,216,128,34
994 | model_data/train/suppply176.jpg 35,64,109,181,37 109,311,154,381,37 62,194,87,213,38 178,193,204,212,38 309,194,334,214,38 37,278,48,287,38 37,40,48,49,38
995 | model_data/train/391.jpg 125,118,298,296,9
996 | model_data/train/1163.jpg 289,210,313,231,28
997 | model_data/train/cctvnew28.jpg 271,120,311,146,49
998 | model_data/train/352.jpg 185,232,289,323,8 230,232,288,324,34
999 | model_data/train/onehalf11.jpg 25,51,352,259,35 354,200,396,275,36
1000 | model_data/train/408.jpg 156,117,262,241,37
1001 | model_data/train/187.jpg 25,24,51,64,4
1002 | model_data/train/1e4d63d972488bbd8d7b7972650e3494.jpg 124,18,196,38,41
1003 | model_data/train/193.jpg 235,218,260,252,4
1004 | model_data/train/805.jpg 92,40,117,52,19 262,178,279,236,19
1005 | model_data/train/144.jpg 23,29,53,58,3
1006 | model_data/train/622.jpg 86,155,110,189,45
1007 | model_data/train/636.jpg 122,175,147,200,45
1008 | model_data/train/178.jpg 57,31,367,288,4
1009 | model_data/train/86.jpg 24,29,49,63,1
1010 | model_data/train/92.jpg 93,25,326,252,2
1011 | model_data/train/84.jpg 91,94,111,117,1
1012 | model_data/train/90.jpg 182,90,241,155,1
1013 | model_data/train/146.jpg 17,7,62,26,3
1014 | model_data/train/152.jpg 23,32,60,57,3
1015 | model_data/train/634.jpg 264,189,282,211,45
1016 | model_data/train/53.jpg 30,129,141,240,1
1017 | model_data/train/185.jpg 8,13,25,41,4
1018 | model_data/train/suppply8.jpg 102,219,117,249,20
1019 | model_data/train/9fd24d9e08b8d359fdd53c3e77b65667.jpg 9,91,402,180,10 24,195,409,284,39
1020 | model_data/train/24e534f2820fc23c9bc97cb2d131c578.jpg 150,52,286,82,41
1021 | model_data/train/onehalf13.jpg 21,23,42,64,2
1022 | model_data/train/1149.jpg 357,143,367,159,28
1023 | model_data/train/4fcccf2949560d5ec00d287b7d028ddb.jpg 2,64,178,131,18 43,230,112,255,18 189,81,289,131,46 115,237,153,255,46
1024 | model_data/train/suppply148.jpg 203,231,223,257,11
1025 | model_data/train/suppply160.jpg 181,96,215,128,8 200,96,215,127,34
1026 | model_data/train/b99cae56d5141f36cc691407ae1809b6.jpg 198,180,216,200,13 182,221,234,236,41
1027 | model_data/train/cctvnew16.jpg 99,93,165,111,49
1028 | model_data/train/1161.jpg 209,283,226,299,28
1029 | model_data/train/393.jpg 146,138,277,265,9
1030 | model_data/train/suppply174.jpg 155,37,327,64,48 80,35,150,76,25
1031 | model_data/train/218.jpg 55,6,350,400,5
1032 | model_data/train/230.jpg 134,94,274,309,5
1033 | model_data/train/542.jpg 59,263,69,278,13
1034 | model_data/train/224.jpg 19,29,394,401,5
1035 | model_data/train/suppplies10.jpg 18,19,85,44,18 87,27,130,43,46 56,94,81,103,18
1036 | model_data/train/1029.jpg 180,65,267,137,48 274,117,317,216,25
1037 | model_data/train/cctv17.jpg 25,30,61,51,0 286,298,355,344,0
1038 | model_data/train/1001.jpg 185,155,220,174,24
1039 | model_data/train/suppplies38.jpg 128,31,305,66,44
1040 | model_data/train/1015.jpg 379,115,393,132,24 243,263,267,290,24
1041 | model_data/train/998.jpg 116,66,156,97,24
1042 | model_data/train/967.jpg 206,256,230,288,23
1043 | model_data/train/1203.jpg 176,136,197,209,29
1044 | model_data/train/cctv3.jpg 39,27,94,49,0
1045 | model_data/train/suppply21.jpg 251,187,401,269,30 23,68,238,318,6
1046 | model_data/train/suppply35.jpg 151,87,177,125,22
1047 | model_data/train/suppply34.jpg 160,77,332,174,30 48,8,149,149,6
1048 | model_data/train/782.jpg 11,16,56,42,19
1049 | model_data/train/onehalf3.jpg 135,183,152,217,2
1050 | model_data/train/cctv2.jpg 302,96,391,133,0 13,145,49,165,0
1051 | model_data/train/suppply20.jpg 15,141,137,302,6 155,138,396,252,30
1052 | model_data/train/a6fc1cf80a40fd06a8c3c06fa9809ec8.jpg 174,133,238,156,41 178,34,234,116,13
1053 | model_data/train/972.jpg 143,0,274,194,23
1054 | model_data/train/1014.jpg 111,303,123,320,24
1055 | model_data/train/580.jpg 217,26,381,276,13 205,334,387,379,41
1056 | model_data/train/suppplies39.jpg 129,30,305,66,44
1057 | model_data/train/594.jpg 114,102,209,119,14
1058 | model_data/train/1000.jpg 264,198,285,214,24
1059 | model_data/train/cctv16.jpg 149,171,220,203,0
1060 | model_data/train/1028.jpg 25,115,76,199,25 76,84,302,184,48 269,225,322,244,48
1061 | model_data/train/suppplies11.jpg 16,13,95,45,18 99,20,148,43,46
1062 | model_data/train/543.jpg 136,61,279,274,13 134,316,283,349,41
1063 | model_data/train/225.jpg 92,73,308,403,5
1064 | model_data/train/231.jpg 59,26,326,370,5
1065 | model_data/train/557.jpg 169,237,244,287,13
1066 | model_data/train/219.jpg 114,75,290,260,5
1067 | model_data/train/45628360d0d08cd1eb228738e2bdfc19.jpg 33,117,216,218,18 227,118,346,191,46
1068 | model_data/train/timg-1.jpg 170,136,288,166,17
1069 | model_data/train/1160.jpg 116,251,136,269,28
1070 | model_data/train/cctvnew17.jpg 15,190,152,242,49
1071 | model_data/train/b7a1b627032a6d97c0cfca4d44eac723.jpg 59,60,279,99,17
1072 | model_data/train/392.jpg 148,190,268,313,9 46,10,288,54,35 286,10,374,55,36
1073 | model_data/train/suppply175.jpg 141,155,223,194,48 240,136,312,229,25
1074 | model_data/train/suppply161.jpg 81,12,298,98,19
1075 | model_data/train/suppply149.jpg 35,100,376,254,19
1076 | model_data/train/1148.jpg 83,212,97,235,28
1077 | model_data/train/e68b69cb536800b1e777ff29a05bbe09.jpg 100,49,362,125,10 110,126,335,204,39
1078 | model_data/train/onehalf12.jpg 27,51,352,262,35 353,198,396,277,36
1079 | model_data/train/351.jpg 4,15,65,58,8 36,13,65,57,34
1080 | model_data/train/deac052a298670af2e76372c73b74ad2.jpg 25,131,240,248,17
1081 | model_data/train/190.jpg 91,389,125,408,4
1082 | model_data/train/suppply9.jpg 102,219,134,277,20
1083 | model_data/train/184.jpg 370,349,396,379,4
1084 | model_data/train/e1cf7510391a3ea407669d5cfea81abf.jpg 64,2,263,32,15 0,0,54,35,45
1085 | model_data/train/153.jpg 281,158,330,204,3
1086 | model_data/train/621.jpg 306,216,319,232,45
1087 | model_data/train/147.jpg 281,129,325,144,3
1088 | model_data/train/370cd2a10f23da9f14130b40981f2f42.jpg 200,95,245,131,47
1089 | model_data/train/609.jpg 251,131,323,175,14
1090 |
--------------------------------------------------------------------------------
/model_data/my_anchors.txt:
--------------------------------------------------------------------------------
1 | 15,17, 23,34, 33,16, 50,55, 56,26, 107,97, 120,37, 222,67, 227,217
--------------------------------------------------------------------------------
/model_data/my_classes.txt:
--------------------------------------------------------------------------------
1 | 1
2 | 2
3 | 3
4 | 4
5 | 5
6 | 6
7 | 7
8 | 8
9 | 9
10 | 10
11 | 11
12 | 12
13 | 13
14 | 14
15 | 15
16 | 16
17 | 17
18 | 18
19 | 19
20 | 20
21 | 21
22 | 22
23 | 23
24 | 24
25 | 25
26 | 26
27 | 27
28 | 28
29 | 29
30 | 30
31 | 7
32 | 27
33 | 8
34 | 8
35 | 9
36 | 10
37 | 10
38 | 10
39 | 10
40 | 11
41 | 12
42 | 14
43 | 17
44 | 17
45 | 22
46 | 16
47 | 19
48 | 21
49 | 26
50 | 1
51 |
--------------------------------------------------------------------------------
/result/result.csv:
--------------------------------------------------------------------------------
1 | 0fbf0dd31f2c1747f245e12c6486612c.jpeg (394,520,352,113) 14 (450,104,247,363) 14
2 | cctvnew3.jpg (426,150,112,30) 1 (61,38,99,43) 1
3 |
--------------------------------------------------------------------------------
/sample/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/sample/.DS_Store
--------------------------------------------------------------------------------
/sample/0fbf0dd31f2c1747f245e12c6486612c.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/sample/0fbf0dd31f2c1747f245e12c6486612c.jpeg
--------------------------------------------------------------------------------
/sample/cctvnew3.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/sample/cctvnew3.jpg
--------------------------------------------------------------------------------
/train.py:
--------------------------------------------------------------------------------
1 | """
2 | Retrain the YOLO model for your own dataset.
3 | """
4 | import numpy as np
5 | import keras.backend as K
6 | from keras.layers import Input, Lambda
7 | from keras.models import Model
8 |
9 | from keras.optimizers import Adam
10 | from keras.callbacks import TensorBoard, ModelCheckpoint, EarlyStopping, ReduceLROnPlateau
11 |
12 | from yolo3.model import preprocess_true_boxes, yolo_body, tiny_yolo_body, yolo_loss
13 | from yolo3.utils import get_random_data
14 |
15 |
16 | def _main():
17 | annotation_path = 'model_data/label.txt'
18 | log_dir = 'logs/000/'
19 | classes_path = 'model_data/my_classes.txt'
20 | anchors_path = 'model_data/my_anchors.txt'
21 | class_names = get_classes(classes_path)
22 | anchors = get_anchors(anchors_path)
23 | input_shape = (416, 416) # multiple of 32, hw
24 | model = create_model(input_shape, anchors, len(class_names), weights_path='model_data/best_weights.h5')
25 | train(model, annotation_path, input_shape, anchors, len(class_names), log_dir=log_dir)
26 |
27 |
28 | def train(model, annotation_path, input_shape, anchors, num_classes, log_dir='logs/'):
29 | model.compile(optimizer=Adam(lr=1e-4), loss={
30 | 'yolo_loss': lambda y_true, y_pred: y_pred})
31 | # 该回调函数将日志信息写入TensorBorad,使得你可以动态的观察训练和测试指标的图像以及不同层的激活值直方图。
32 | logging = TensorBoard(log_dir=log_dir)
33 | # 该回调函数将在每个epoch后保存模型到filepath
34 | checkpoint = ModelCheckpoint(log_dir + "best_weights.h5",
35 | verbose=1,
36 | save_weights_only=True, save_best_only=True, mode='auto', period=1)
37 | # 当评价指标不在提升时,减少学习率
38 | reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=15, verbose=1)
39 | # 当监测值不再改善时,该回调函数将中止训练
40 | early_stopping = EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=1)
41 |
42 | callbacks = [checkpoint, logging]
43 | batch_size = 1
44 | val_split = 0.1
45 | with open(annotation_path) as f:
46 | lines = f.readlines()
47 | np.random.shuffle(lines) # 打乱排序
48 | num_val = int(len(lines) * val_split)
49 | num_train = len(lines) - num_val # 拿出0.1做val集
50 | print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, batch_size))
51 |
52 | model.fit_generator(data_generator_wrap(lines[:num_train], batch_size, input_shape, anchors, num_classes),
53 | steps_per_epoch=max(1, num_train // batch_size),
54 | validation_data=data_generator_wrap(lines[num_train:], batch_size, input_shape, anchors,
55 | num_classes),
56 | validation_steps=max(1, num_val // batch_size),
57 | epochs=500,
58 | initial_epoch=0, callbacks=callbacks)
59 | model.save_weights(log_dir + 'trained_weights.h5')
60 | model.save(log_dir + 'model.h5')
61 |
62 |
63 | def get_classes(classes_path):
64 | with open(classes_path) as f:
65 | class_names = f.readlines()
66 | class_names = [c.strip() for c in class_names]
67 | return class_names
68 |
69 |
70 | def get_anchors(anchors_path):
71 | with open(anchors_path) as f:
72 | anchors = f.readline()
73 | anchors = [float(x) for x in anchors.split(',')]
74 | return np.array(anchors).reshape(-1, 2)
75 |
76 |
77 | def create_model(input_shape, anchors, num_classes, load_pretrained=False, freeze_body=False,
78 | weights_path='model_data/yolo_weights.h5'):
79 | K.clear_session() # get a new session
80 | image_input = Input(shape=(None, None, 3))
81 | h, w = input_shape
82 | num_anchors = len(anchors)
83 | y_true = [Input(shape=(h // {0: 32, 1: 16, 2: 8}[l], w // {0: 32, 1: 16, 2: 8}[l], \
84 | num_anchors // 3, num_classes + 5)) for l in range(3)]
85 |
86 | model_body = yolo_body(image_input, num_anchors // 3, num_classes)
87 | print('Create YOLOv3 model with {} anchors and {} classes.'.format(num_anchors, num_classes))
88 |
89 | if load_pretrained:
90 | model_body.load_weights(weights_path, by_name=True, skip_mismatch=True)
91 | print('Load weights {}.'.format(weights_path))
92 | if freeze_body:
93 | # Do not freeze 3 output layers.
94 | num = len(model_body.layers) - 7
95 | for i in range(num): model_body.layers[i].trainable = False
96 | print('Freeze the first {} layers of total {} layers.'.format(num, len(model_body.layers)))
97 |
98 | model_loss = Lambda(yolo_loss, output_shape=(1,), name='yolo_loss',
99 | arguments={'anchors': anchors, 'num_classes': num_classes, 'ignore_thresh': 0.5})(
100 | [*model_body.output, *y_true])
101 | model = Model([model_body.input, *y_true], model_loss)
102 | return model
103 |
104 |
105 | def data_generator(annotation_lines, batch_size, input_shape, anchors, num_classes):
106 | n = len(annotation_lines)
107 | np.random.shuffle(annotation_lines)
108 | i = 0
109 | while True:
110 | image_data = []
111 | box_data = []
112 | for b in range(batch_size):
113 | i %= n
114 | image, box = get_random_data(annotation_lines[i], input_shape, random=True)
115 | image_data.append(image)
116 | box_data.append(box)
117 | i += 1
118 | image_data = np.array(image_data)
119 | box_data = np.array(box_data)
120 | y_true = preprocess_true_boxes(box_data, input_shape, anchors, num_classes)
121 | yield [image_data, *y_true], np.zeros(batch_size)
122 |
123 |
124 | def data_generator_wrap(annotation_lines, batch_size, input_shape, anchors, num_classes):
125 | n = len(annotation_lines)
126 | if n == 0 or batch_size <= 0: return None
127 | return data_generator(annotation_lines, batch_size, input_shape, anchors, num_classes)
128 |
129 |
130 | if __name__ == '__main__':
131 | _main()
132 |
--------------------------------------------------------------------------------
/xml_to_data.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import os
3 | from xml.etree import ElementTree
4 |
5 |
6 | class XML_preprocessor(object):
7 |
8 | def __init__(self, data_path):
9 | self.path_prefix = data_path
10 | self.data = []
11 | self._preprocess_XML()
12 |
13 | def _preprocess_XML(self):
14 | filenames = os.listdir(self.path_prefix)
15 | # filenames = filenames[:500]
16 | print(filenames)
17 |
18 | for filename in filenames:
19 | temp = []
20 | print(filename)
21 | if filename == '.DS_Store':
22 | continue
23 | tree = ElementTree.parse(self.path_prefix + filename)
24 | root = tree.getroot()
25 | image_name = root.find('filename').text
26 | temp.append('model_data/train/' + image_name)
27 | for object_tree in root.findall('object'):
28 | for bounding_box in object_tree.iter('bndbox'):
29 | xmin = float(bounding_box.find('xmin').text)
30 | ymin = float(bounding_box.find('ymin').text)
31 | xmax = float(bounding_box.find('xmax').text)
32 | ymax = float(bounding_box.find('ymax').text)
33 | class_name = object_tree.find('name').text
34 |
35 | temp.append(
36 | str(int(xmin)) + "," + str(int(ymin)) + "," + str(int(xmax)) + "," + str(int(ymax)) + "," + str(
37 | int(class_name) - 1))
38 | # temp.append(
39 | # str(int(xmin)) + "," + str(int(ymin)) + "," + str(int(xmax)) + "," + str(int(ymax)) + "," + str(
40 | # 0))
41 |
42 | self.data.append(temp)
43 |
44 |
45 | # ## example on how to use it
46 | # import pickle
47 | #
48 | #
49 | data = XML_preprocessor("model_data/label_train/").data
50 | # pickle.dump(data, open('data/train.pkl', 'wb'))
51 |
52 |
53 | with open('model_data/label.txt', 'w')as file:
54 | for cutWords in data:
55 | file.write(' '.join(cutWords) + '\n')
56 |
--------------------------------------------------------------------------------
/yolo.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Class definition of YOLO_v3 style detection model on image and video
4 | """
5 |
6 | import colorsys
7 | import os
8 | from timeit import default_timer as timer
9 |
10 | import numpy as np
11 | from keras import backend as K
12 | from keras.models import load_model
13 | from keras.layers import Input
14 | from PIL import Image, ImageFont, ImageDraw
15 |
16 | from yolo3.model import yolo_eval, yolo_body, tiny_yolo_body
17 | from yolo3.utils import letterbox_image
18 | import os
19 | from keras.utils import multi_gpu_model
20 |
21 |
22 | class YOLO(object):
23 | _defaults = {
24 | "model_path": './model_data/2.h5',
25 | "anchors_path": './model_data/my_anchors.txt',
26 | "classes_path": './model_data/my_classes.txt',
27 | "score": 0.3,
28 | "iou": 0.45,
29 | "model_image_size": (416, 416),
30 | "gpu_num": 1,
31 | }
32 |
33 | @classmethod
34 | def get_defaults(cls, n):
35 | if n in cls._defaults:
36 | return cls._defaults[n]
37 | else:
38 | return "Unrecognized attribute name '" + n + "'"
39 |
40 | def __init__(self, **kwargs):
41 | self.__dict__.update(self._defaults) # set up default values
42 | self.__dict__.update(kwargs) # and update with user overrides
43 | self.class_names = self._get_class()
44 | self.anchors = self._get_anchors()
45 | self.sess = K.get_session()
46 | self.boxes, self.scores, self.classes = self.generate()
47 |
48 | def _get_class(self):
49 | classes_path = os.path.expanduser(self.classes_path)
50 | with open(classes_path) as f:
51 | class_names = f.readlines()
52 | class_names = [c.strip() for c in class_names]
53 | return class_names
54 |
55 | def _get_anchors(self):
56 | anchors_path = os.path.expanduser(self.anchors_path)
57 | with open(anchors_path) as f:
58 | anchors = f.readline()
59 | anchors = [float(x) for x in anchors.split(',')]
60 | return np.array(anchors).reshape(-1, 2)
61 |
62 | def generate(self):
63 | model_path = os.path.expanduser(self.model_path)
64 | assert model_path.endswith('.h5'), 'Keras model or weights must be a .h5 file.'
65 |
66 | # Load model, or construct model and load weights.
67 | num_anchors = len(self.anchors)
68 | num_classes = len(self.class_names)
69 | is_tiny_version = num_anchors == 6 # default setting
70 | try:
71 | self.yolo_model = load_model(model_path, compile=False)
72 | except:
73 | self.yolo_model = tiny_yolo_body(Input(shape=(None, None, 3)), num_anchors // 2, num_classes) \
74 | if is_tiny_version else yolo_body(Input(shape=(None, None, 3)), num_anchors // 3, num_classes)
75 | self.yolo_model.load_weights(self.model_path) # make sure model, anchors and classes match
76 | else:
77 | assert self.yolo_model.layers[-1].output_shape[-1] == \
78 | num_anchors / len(self.yolo_model.output) * (num_classes + 5), \
79 | 'Mismatch between model and given anchor and class sizes'
80 |
81 | print('{} model, anchors, and classes loaded.'.format(model_path))
82 |
83 | # Generate colors for drawing bounding boxes.
84 | hsv_tuples = [(x / len(self.class_names), 1., 1.)
85 | for x in range(len(self.class_names))]
86 | self.colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
87 | self.colors = list(
88 | map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)),
89 | self.colors))
90 | np.random.seed(10101) # Fixed seed for consistent colors across runs.
91 | np.random.shuffle(self.colors) # Shuffle colors to decorrelate adjacent classes.
92 | np.random.seed(None) # Reset seed to default.
93 |
94 | # Generate output tensor targets for filtered bounding boxes.
95 | self.input_image_shape = K.placeholder(shape=(2,))
96 | if self.gpu_num >= 2:
97 | self.yolo_model = multi_gpu_model(self.yolo_model, gpus=self.gpu_num)
98 | boxes, scores, classes = yolo_eval(self.yolo_model.output, self.anchors,
99 | len(self.class_names), self.input_image_shape,
100 | score_threshold=self.score, iou_threshold=self.iou)
101 | return boxes, scores, classes
102 |
103 | def detect_image(self, image):
104 | start = timer()
105 |
106 | if self.model_image_size != (None, None):
107 | assert self.model_image_size[0] % 32 == 0, 'Multiples of 32 required'
108 | assert self.model_image_size[1] % 32 == 0, 'Multiples of 32 required'
109 | boxed_image = letterbox_image(image, tuple(reversed(self.model_image_size)))
110 | else:
111 | new_image_size = (image.width - (image.width % 32),
112 | image.height - (image.height % 32))
113 | boxed_image = letterbox_image(image, new_image_size)
114 | image_data = np.array(boxed_image, dtype='float32')
115 |
116 | # print(image_data.shape)
117 | image_data /= 255.
118 | image_data = np.expand_dims(image_data, 0) # Add batch dimension.
119 |
120 | out_boxes, out_scores, out_classes = self.sess.run(
121 | [self.boxes, self.scores, self.classes],
122 | feed_dict={
123 | self.yolo_model.input: image_data,
124 | self.input_image_shape: [image.size[1], image.size[0]],
125 | K.learning_phase(): 0
126 | })
127 |
128 | # print('Found {} boxes for {}'.format(len(out_boxes), 'img'))
129 |
130 | font = ImageFont.truetype(font='font/font.ttf',
131 | size=np.floor(3e-2 * image.size[1] + 0.5).astype('int32'))
132 | thickness = (image.size[0] + image.size[1]) // 300
133 |
134 | temp = []
135 | for i, c in reversed(list(enumerate(out_classes))):
136 | predicted_class = self.class_names[c]
137 | print(predicted_class)
138 | box = out_boxes[i]
139 | score = out_scores[i]
140 |
141 | label = '{} {:.2f}'.format(predicted_class, score)
142 | draw = ImageDraw.Draw(image)
143 | label_size = draw.textsize(label, font)
144 |
145 | top, left, bottom, right = box
146 | top = max(0, np.floor(top + 0.5).astype('int32'))
147 | left = max(0, np.floor(left + 0.5).astype('int32'))
148 | bottom = min(image.size[1], np.floor(bottom + 0.5).astype('int32'))
149 | right = min(image.size[0], np.floor(right + 0.5).astype('int32'))
150 | # print(label, (left, top), (right, bottom))
151 | temp.append(
152 | '(' + str(left) + ',' + str(top) + ',' + str(abs(left - right)) + ',' + str(abs(top - bottom)) + ')')
153 | temp.append(predicted_class)
154 | if top - label_size[1] >= 0:
155 | text_origin = np.array([left, top - label_size[1]])
156 | else:
157 | text_origin = np.array([left, top + 1])
158 |
159 | # My kingdom for a good redistributable image drawing library.
160 | for i in range(thickness):
161 | draw.rectangle(
162 | [left + i, top + i, right - i, bottom - i],
163 | outline=self.colors[c])
164 | draw.rectangle(
165 | [tuple(text_origin), tuple(text_origin + label_size)],
166 | fill=self.colors[c])
167 | draw.text(text_origin, label, fill=(0, 0, 0), font=font)
168 | del draw
169 |
170 | end = timer()
171 | # print(end - start)
172 | return image, temp #temp是将box的左上角坐标以及宽高返回
173 |
174 | def close_session(self):
175 | self.sess.close()
176 |
177 |
178 | def detect_video(yolo, video_path=0, output_path=""):
179 | import cv2
180 | vid = cv2.VideoCapture(0)
181 | if not vid.isOpened():
182 | raise IOError("Couldn't open webcam or video")
183 | video_FourCC = int(vid.get(cv2.CAP_PROP_FOURCC))
184 | video_fps = vid.get(cv2.CAP_PROP_FPS)
185 | video_size = (int(vid.get(cv2.CAP_PROP_FRAME_WIDTH)),
186 | int(vid.get(cv2.CAP_PROP_FRAME_HEIGHT)))
187 | isOutput = True if output_path != "" else False
188 | if isOutput:
189 | print("!!! TYPE:", type(output_path), type(video_FourCC), type(video_fps), type(video_size))
190 | out = cv2.VideoWriter(output_path, video_FourCC, video_fps, video_size)
191 | accum_time = 0
192 | curr_fps = 0
193 | fps = "FPS: ??"
194 | prev_time = timer()
195 | while True:
196 | return_value, frame = vid.read()
197 | image = Image.fromarray(frame)
198 | image = yolo.detect_image(image)
199 | result = np.asarray(image)
200 | curr_time = timer()
201 | exec_time = curr_time - prev_time
202 | prev_time = curr_time
203 | accum_time = accum_time + exec_time
204 | curr_fps = curr_fps + 1
205 | if accum_time > 1:
206 | accum_time = accum_time - 1
207 | fps = "FPS: " + str(curr_fps)
208 | curr_fps = 0
209 | cv2.putText(result, text=fps, org=(3, 15), fontFace=cv2.FONT_HERSHEY_SIMPLEX,
210 | fontScale=0.50, color=(255, 0, 0), thickness=2)
211 | cv2.namedWindow("result", cv2.WINDOW_NORMAL)
212 | cv2.imshow("result", result)
213 | if isOutput:
214 | out.write(result)
215 | if cv2.waitKey(1) & 0xFF == ord('q'):
216 | break
217 | yolo.close_session()
218 |
--------------------------------------------------------------------------------
/yolo3/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/yolo3/.DS_Store
--------------------------------------------------------------------------------
/yolo3/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/yolo3/__init__.py
--------------------------------------------------------------------------------
/yolo3/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/yolo3/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/yolo3/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/yolo3/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/yolo3/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/yolo3/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/yolo3/__pycache__/model.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/yolo3/__pycache__/model.cpython-37.pyc
--------------------------------------------------------------------------------
/yolo3/__pycache__/utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/yolo3/__pycache__/utils.cpython-36.pyc
--------------------------------------------------------------------------------
/yolo3/__pycache__/utils.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ZzzzzZXxxX/yolo3_keras_Logo_Detection/2e6a23b0fa2c06afdf94689733c84b46432fe5c7/yolo3/__pycache__/utils.cpython-37.pyc
--------------------------------------------------------------------------------
/yolo3/model.py:
--------------------------------------------------------------------------------
1 | """YOLO_v3 Model Defined in Keras."""
2 |
3 | from functools import wraps
4 |
5 | import numpy as np
6 | import tensorflow as tf
7 | from keras import backend as K
8 | from keras.layers import Conv2D, Add, ZeroPadding2D, UpSampling2D, Concatenate, MaxPooling2D
9 | from keras.layers.advanced_activations import LeakyReLU
10 | from keras.layers.normalization import BatchNormalization
11 | from keras.models import Model
12 | from keras.regularizers import l2
13 |
14 | from yolo3.utils import compose
15 |
16 |
17 | @wraps(Conv2D)
18 | def DarknetConv2D(*args, **kwargs):
19 | """Wrapper to set Darknet parameters for Convolution2D."""
20 | darknet_conv_kwargs = {'kernel_regularizer': l2(5e-4)}
21 | darknet_conv_kwargs['padding'] = 'valid' if kwargs.get('strides')==(2,2) else 'same'
22 | darknet_conv_kwargs.update(kwargs)
23 | return Conv2D(*args, **darknet_conv_kwargs)
24 |
25 | def DarknetConv2D_BN_Leaky(*args, **kwargs):
26 | """Darknet Convolution2D followed by BatchNormalization and LeakyReLU."""
27 | no_bias_kwargs = {'use_bias': False}
28 | no_bias_kwargs.update(kwargs)
29 | return compose(
30 | DarknetConv2D(*args, **no_bias_kwargs),
31 | BatchNormalization(),
32 | LeakyReLU(alpha=0.1))
33 |
34 | def resblock_body(x, num_filters, num_blocks):
35 | '''A series of resblocks starting with a downsampling Convolution2D'''
36 | # Darknet uses left and top padding instead of 'same' mode
37 | x = ZeroPadding2D(((1,0),(1,0)))(x)
38 | x = DarknetConv2D_BN_Leaky(num_filters, (3,3), strides=(2,2))(x)
39 | for i in range(num_blocks):
40 | y = compose(
41 | DarknetConv2D_BN_Leaky(num_filters//2, (1,1)),
42 | DarknetConv2D_BN_Leaky(num_filters, (3,3)))(x)
43 | x = Add()([x,y])
44 | return x
45 |
46 | def darknet_body(x):
47 | '''Darknent body having 52 Convolution2D layers'''
48 | x = DarknetConv2D_BN_Leaky(32, (3,3))(x)
49 | x = resblock_body(x, 64, 1)
50 | x = resblock_body(x, 128, 2)
51 | x = resblock_body(x, 256, 8)
52 | x = resblock_body(x, 512, 8)
53 | x = resblock_body(x, 1024, 4)
54 | return x
55 |
56 | def make_last_layers(x, num_filters, out_filters):
57 | '''6 Conv2D_BN_Leaky layers followed by a Conv2D_linear layer'''
58 | x = compose(
59 | DarknetConv2D_BN_Leaky(num_filters, (1,1)),
60 | DarknetConv2D_BN_Leaky(num_filters*2, (3,3)),
61 | DarknetConv2D_BN_Leaky(num_filters, (1,1)),
62 | DarknetConv2D_BN_Leaky(num_filters*2, (3,3)),
63 | DarknetConv2D_BN_Leaky(num_filters, (1,1)))(x)
64 | y = compose(
65 | DarknetConv2D_BN_Leaky(num_filters*2, (3,3)),
66 | DarknetConv2D(out_filters, (1,1)))(x)
67 | return x, y
68 |
69 |
70 | def yolo_body(inputs, num_anchors, num_classes):
71 | """Create YOLO_V3 model CNN body in Keras."""
72 | darknet = Model(inputs, darknet_body(inputs))
73 | x, y1 = make_last_layers(darknet.output, 512, num_anchors*(num_classes+5))
74 |
75 | x = compose(
76 | DarknetConv2D_BN_Leaky(256, (1,1)),
77 | UpSampling2D(2))(x)
78 | x = Concatenate()([x,darknet.layers[152].output])
79 | x, y2 = make_last_layers(x, 256, num_anchors*(num_classes+5))
80 |
81 | x = compose(
82 | DarknetConv2D_BN_Leaky(128, (1,1)),
83 | UpSampling2D(2))(x)
84 | x = Concatenate()([x,darknet.layers[92].output])
85 | x, y3 = make_last_layers(x, 128, num_anchors*(num_classes+5))
86 |
87 | return Model(inputs, [y1,y2,y3])
88 |
89 | def tiny_yolo_body(inputs, num_anchors, num_classes):
90 | '''Create Tiny YOLO_v3 model CNN body in keras.'''
91 | x1 = compose(
92 | DarknetConv2D_BN_Leaky(16, (3,3)),
93 | MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='same'),
94 | DarknetConv2D_BN_Leaky(32, (3,3)),
95 | MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='same'),
96 | DarknetConv2D_BN_Leaky(64, (3,3)),
97 | MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='same'),
98 | DarknetConv2D_BN_Leaky(128, (3,3)),
99 | MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='same'),
100 | DarknetConv2D_BN_Leaky(256, (3,3)))(inputs)
101 | x2 = compose(
102 | MaxPooling2D(pool_size=(2,2), strides=(2,2), padding='same'),
103 | DarknetConv2D_BN_Leaky(512, (3,3)),
104 | MaxPooling2D(pool_size=(2,2), strides=(1,1), padding='same'),
105 | DarknetConv2D_BN_Leaky(1024, (3,3)),
106 | DarknetConv2D_BN_Leaky(256, (1,1)))(x1)
107 | y1 = compose(
108 | DarknetConv2D_BN_Leaky(512, (3,3)),
109 | DarknetConv2D(num_anchors*(num_classes+5), (1,1)))(x2)
110 |
111 | x2 = compose(
112 | DarknetConv2D_BN_Leaky(128, (1,1)),
113 | UpSampling2D(2))(x2)
114 | y2 = compose(
115 | Concatenate(),
116 | DarknetConv2D_BN_Leaky(256, (3,3)),
117 | DarknetConv2D(num_anchors*(num_classes+5), (1,1)))([x2,x1])
118 |
119 | return Model(inputs, [y1,y2])
120 |
121 |
122 | def yolo_head(feats, anchors, num_classes, input_shape, calc_loss=False):
123 | """Convert final layer features to bounding box parameters."""
124 | num_anchors = len(anchors)
125 | # Reshape to batch, height, width, num_anchors, box_params.
126 | anchors_tensor = K.reshape(K.constant(anchors), [1, 1, 1, num_anchors, 2])
127 |
128 | grid_shape = K.shape(feats)[1:3] # height, width
129 | grid_y = K.tile(K.reshape(K.arange(0, stop=grid_shape[0]), [-1, 1, 1, 1]),
130 | [1, grid_shape[1], 1, 1])
131 | grid_x = K.tile(K.reshape(K.arange(0, stop=grid_shape[1]), [1, -1, 1, 1]),
132 | [grid_shape[0], 1, 1, 1])
133 | grid = K.concatenate([grid_x, grid_y])
134 | grid = K.cast(grid, K.dtype(feats))
135 |
136 | feats = K.reshape(
137 | feats, [-1, grid_shape[0], grid_shape[1], num_anchors, num_classes + 5])
138 |
139 | # Adjust preditions to each spatial grid point and anchor size.
140 | box_xy = (K.sigmoid(feats[..., :2]) + grid) / K.cast(grid_shape[::-1], K.dtype(feats))
141 | box_wh = K.exp(feats[..., 2:4]) * anchors_tensor / K.cast(input_shape[::-1], K.dtype(feats))
142 | box_confidence = K.sigmoid(feats[..., 4:5])
143 | box_class_probs = K.sigmoid(feats[..., 5:])
144 |
145 | if calc_loss == True:
146 | return grid, feats, box_xy, box_wh
147 | return box_xy, box_wh, box_confidence, box_class_probs
148 |
149 |
150 | def yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape):
151 | '''Get corrected boxes'''
152 | box_yx = box_xy[..., ::-1]
153 | box_hw = box_wh[..., ::-1]
154 | input_shape = K.cast(input_shape, K.dtype(box_yx))
155 | image_shape = K.cast(image_shape, K.dtype(box_yx))
156 | new_shape = K.round(image_shape * K.min(input_shape/image_shape))
157 | offset = (input_shape-new_shape)/2./input_shape
158 | scale = input_shape/new_shape
159 | box_yx = (box_yx - offset) * scale
160 | box_hw *= scale
161 |
162 | box_mins = box_yx - (box_hw / 2.)
163 | box_maxes = box_yx + (box_hw / 2.)
164 | boxes = K.concatenate([
165 | box_mins[..., 0:1], # y_min
166 | box_mins[..., 1:2], # x_min
167 | box_maxes[..., 0:1], # y_max
168 | box_maxes[..., 1:2] # x_max
169 | ])
170 |
171 | # Scale boxes back to original image shape.
172 | boxes *= K.concatenate([image_shape, image_shape])
173 | return boxes
174 |
175 |
176 | def yolo_boxes_and_scores(feats, anchors, num_classes, input_shape, image_shape):
177 | '''Process Conv layer output'''
178 | box_xy, box_wh, box_confidence, box_class_probs = yolo_head(feats,
179 | anchors, num_classes, input_shape)
180 | boxes = yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape)
181 | boxes = K.reshape(boxes, [-1, 4])
182 | box_scores = box_confidence * box_class_probs
183 | box_scores = K.reshape(box_scores, [-1, num_classes])
184 | return boxes, box_scores
185 |
186 |
187 | def yolo_eval(yolo_outputs,
188 | anchors,
189 | num_classes,
190 | image_shape,
191 | max_boxes=20,
192 | score_threshold=.6,
193 | iou_threshold=.5):
194 | """Evaluate YOLO model on given input and return filtered boxes."""
195 | num_layers = len(yolo_outputs)
196 | anchor_mask = [[6,7,8], [3,4,5], [0,1,2]] if num_layers==3 else [[3,4,5], [1,2,3]] # default setting
197 | input_shape = K.shape(yolo_outputs[0])[1:3] * 32
198 | boxes = []
199 | box_scores = []
200 | for l in range(num_layers):
201 | _boxes, _box_scores = yolo_boxes_and_scores(yolo_outputs[l],
202 | anchors[anchor_mask[l]], num_classes, input_shape, image_shape)
203 | boxes.append(_boxes)
204 | box_scores.append(_box_scores)
205 | boxes = K.concatenate(boxes, axis=0)
206 | box_scores = K.concatenate(box_scores, axis=0)
207 |
208 | mask = box_scores >= score_threshold
209 | max_boxes_tensor = K.constant(max_boxes, dtype='int32')
210 | boxes_ = []
211 | scores_ = []
212 | classes_ = []
213 | for c in range(num_classes):
214 | # TODO: use keras backend instead of tf.
215 | class_boxes = tf.boolean_mask(boxes, mask[:, c])
216 | class_box_scores = tf.boolean_mask(box_scores[:, c], mask[:, c])
217 | nms_index = tf.image.non_max_suppression(
218 | class_boxes, class_box_scores, max_boxes_tensor, iou_threshold=iou_threshold)
219 | class_boxes = K.gather(class_boxes, nms_index)
220 | class_box_scores = K.gather(class_box_scores, nms_index)
221 | classes = K.ones_like(class_box_scores, 'int32') * c
222 | boxes_.append(class_boxes)
223 | scores_.append(class_box_scores)
224 | classes_.append(classes)
225 | boxes_ = K.concatenate(boxes_, axis=0)
226 | scores_ = K.concatenate(scores_, axis=0)
227 | classes_ = K.concatenate(classes_, axis=0)
228 |
229 | return boxes_, scores_, classes_
230 |
231 |
232 | def preprocess_true_boxes(true_boxes, input_shape, anchors, num_classes):
233 | '''Preprocess true boxes to training input format
234 |
235 | Parameters
236 | ----------
237 | true_boxes: array, shape=(m, T, 5)
238 | Absolute x_min, y_min, x_max, y_max, class_id relative to input_shape.
239 | input_shape: array-like, hw, multiples of 32
240 | anchors: array, shape=(N, 2), wh
241 | num_classes: integer
242 |
243 | Returns
244 | -------
245 | y_true: list of array, shape like yolo_outputs, xywh are reletive value
246 |
247 | '''
248 | assert (true_boxes[..., 4]0
269 |
270 | for b in range(m):
271 | # Discard zero rows.
272 | wh = boxes_wh[b, valid_mask[b]]
273 | if len(wh)==0: continue
274 | # Expand dim to apply broadcasting.
275 | wh = np.expand_dims(wh, -2)
276 | box_maxes = wh / 2.
277 | box_mins = -box_maxes
278 |
279 | intersect_mins = np.maximum(box_mins, anchor_mins)
280 | intersect_maxes = np.minimum(box_maxes, anchor_maxes)
281 | intersect_wh = np.maximum(intersect_maxes - intersect_mins, 0.)
282 | intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1]
283 | box_area = wh[..., 0] * wh[..., 1]
284 | anchor_area = anchors[..., 0] * anchors[..., 1]
285 | iou = intersect_area / (box_area + anchor_area - intersect_area)
286 |
287 | # Find best anchor for each true box
288 | best_anchor = np.argmax(iou, axis=-1)
289 |
290 | for t, n in enumerate(best_anchor):
291 | for l in range(num_layers):
292 | if n in anchor_mask[l]:
293 | i = np.floor(true_boxes[b,t,0]*grid_shapes[l][1]).astype('int32')
294 | j = np.floor(true_boxes[b,t,1]*grid_shapes[l][0]).astype('int32')
295 | k = anchor_mask[l].index(n)
296 | c = true_boxes[b,t, 4].astype('int32')
297 | y_true[l][b, j, i, k, 0:4] = true_boxes[b,t, 0:4]
298 | y_true[l][b, j, i, k, 4] = 1
299 | y_true[l][b, j, i, k, 5+c] = 1
300 |
301 | return y_true
302 |
303 |
304 | def box_iou(b1, b2):
305 | '''Return iou tensor
306 |
307 | Parameters
308 | ----------
309 | b1: tensor, shape=(i1,...,iN, 4), xywh
310 | b2: tensor, shape=(j, 4), xywh
311 |
312 | Returns
313 | -------
314 | iou: tensor, shape=(i1,...,iN, j)
315 |
316 | '''
317 |
318 | # Expand dim to apply broadcasting.
319 | b1 = K.expand_dims(b1, -2)
320 | b1_xy = b1[..., :2]
321 | b1_wh = b1[..., 2:4]
322 | b1_wh_half = b1_wh/2.
323 | b1_mins = b1_xy - b1_wh_half
324 | b1_maxes = b1_xy + b1_wh_half
325 |
326 | # Expand dim to apply broadcasting.
327 | b2 = K.expand_dims(b2, 0)
328 | b2_xy = b2[..., :2]
329 | b2_wh = b2[..., 2:4]
330 | b2_wh_half = b2_wh/2.
331 | b2_mins = b2_xy - b2_wh_half
332 | b2_maxes = b2_xy + b2_wh_half
333 |
334 | intersect_mins = K.maximum(b1_mins, b2_mins)
335 | intersect_maxes = K.minimum(b1_maxes, b2_maxes)
336 | intersect_wh = K.maximum(intersect_maxes - intersect_mins, 0.)
337 | intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1]
338 | b1_area = b1_wh[..., 0] * b1_wh[..., 1]
339 | b2_area = b2_wh[..., 0] * b2_wh[..., 1]
340 | iou = intersect_area / (b1_area + b2_area - intersect_area)
341 |
342 | return iou
343 |
344 |
345 | def yolo_loss(args, anchors, num_classes, ignore_thresh=.5, print_loss=False):
346 | '''Return yolo_loss tensor
347 |
348 | Parameters
349 | ----------
350 | yolo_outputs: list of tensor, the output of yolo_body or tiny_yolo_body
351 | y_true: list of array, the output of preprocess_true_boxes
352 | anchors: array, shape=(N, 2), wh
353 | num_classes: integer
354 | ignore_thresh: float, the iou threshold whether to ignore object confidence loss
355 |
356 | Returns
357 | -------
358 | loss: tensor, shape=(1,)
359 |
360 | '''
361 | num_layers = len(anchors)//3 # default setting
362 | yolo_outputs = args[:num_layers]
363 | y_true = args[num_layers:]
364 | anchor_mask = [[6,7,8], [3,4,5], [0,1,2]] if num_layers==3 else [[3,4,5], [1,2,3]]
365 | input_shape = K.cast(K.shape(yolo_outputs[0])[1:3] * 32, K.dtype(y_true[0]))
366 | grid_shapes = [K.cast(K.shape(yolo_outputs[l])[1:3], K.dtype(y_true[0])) for l in range(num_layers)]
367 | loss = 0
368 | m = K.shape(yolo_outputs[0])[0] # batch size, tensor
369 | mf = K.cast(m, K.dtype(yolo_outputs[0]))
370 |
371 | for l in range(num_layers):
372 | object_mask = y_true[l][..., 4:5]
373 | true_class_probs = y_true[l][..., 5:]
374 |
375 | grid, raw_pred, pred_xy, pred_wh = yolo_head(yolo_outputs[l],
376 | anchors[anchor_mask[l]], num_classes, input_shape, calc_loss=True)
377 | pred_box = K.concatenate([pred_xy, pred_wh])
378 |
379 | # Darknet raw box to calculate loss.
380 | raw_true_xy = y_true[l][..., :2]*grid_shapes[l][::-1] - grid
381 | raw_true_wh = K.log(y_true[l][..., 2:4] / anchors[anchor_mask[l]] * input_shape[::-1])
382 | raw_true_wh = K.switch(object_mask, raw_true_wh, K.zeros_like(raw_true_wh)) # avoid log(0)=-inf
383 | box_loss_scale = 2 - y_true[l][...,2:3]*y_true[l][...,3:4]
384 |
385 | # Find ignore mask, iterate over each of batch.
386 | ignore_mask = tf.TensorArray(K.dtype(y_true[0]), size=1, dynamic_size=True)
387 | object_mask_bool = K.cast(object_mask, 'bool')
388 | def loop_body(b, ignore_mask):
389 | true_box = tf.boolean_mask(y_true[l][b,...,0:4], object_mask_bool[b,...,0])
390 | iou = box_iou(pred_box[b], true_box)
391 | best_iou = K.max(iou, axis=-1)
392 | ignore_mask = ignore_mask.write(b, K.cast(best_iou0:
61 | np.random.shuffle(box)
62 | if len(box)>max_boxes: box = box[:max_boxes]
63 | box[:, [0,2]] = box[:, [0,2]]*scale + dx
64 | box[:, [1,3]] = box[:, [1,3]]*scale + dy
65 | box_data[:len(box)] = box
66 |
67 | return image_data, box_data
68 |
69 | # resize image
70 | new_ar = w/h * rand(1-jitter,1+jitter)/rand(1-jitter,1+jitter)
71 | scale = rand(.25, 2)
72 | if new_ar < 1:
73 | nh = int(scale*h)
74 | nw = int(nh*new_ar)
75 | else:
76 | nw = int(scale*w)
77 | nh = int(nw/new_ar)
78 | image = image.resize((nw,nh), Image.BICUBIC)
79 |
80 | # place image
81 | dx = int(rand(0, w-nw))
82 | dy = int(rand(0, h-nh))
83 | new_image = Image.new('RGB', (w,h), (128,128,128))
84 | new_image.paste(image, (dx, dy))
85 | image = new_image
86 |
87 | # flip image or not
88 | flip = rand()<.5
89 | if flip: image = image.transpose(Image.FLIP_LEFT_RIGHT)
90 |
91 | # distort image
92 | hue = rand(-hue, hue)
93 | sat = rand(1, sat) if rand()<.5 else 1/rand(1, sat)
94 | val = rand(1, val) if rand()<.5 else 1/rand(1, val)
95 | x = rgb_to_hsv(np.array(image)/255.)
96 | x[..., 0] += hue
97 | x[..., 0][x[..., 0]>1] -= 1
98 | x[..., 0][x[..., 0]<0] += 1
99 | x[..., 1] *= sat
100 | x[..., 2] *= val
101 | x[x>1] = 1
102 | x[x<0] = 0
103 | image_data = hsv_to_rgb(x) # numpy array, 0 to 1
104 |
105 | # correct boxes
106 | box_data = np.zeros((max_boxes,5))
107 | if len(box)>0:
108 | np.random.shuffle(box)
109 | box[:, [0,2]] = box[:, [0,2]]*nw/iw + dx
110 | box[:, [1,3]] = box[:, [1,3]]*nh/ih + dy
111 | if flip: box[:, [0,2]] = w - box[:, [2,0]]
112 | box[:, 0:2][box[:, 0:2]<0] = 0
113 | box[:, 2][box[:, 2]>w] = w
114 | box[:, 3][box[:, 3]>h] = h
115 | box_w = box[:, 2] - box[:, 0]
116 | box_h = box[:, 3] - box[:, 1]
117 | box = box[np.logical_and(box_w>1, box_h>1)] # discard invalid box
118 | if len(box)>max_boxes: box = box[:max_boxes]
119 | box_data[:len(box)] = box
120 |
121 | return image_data, box_data
122 |
--------------------------------------------------------------------------------
/yolo_images.py:
--------------------------------------------------------------------------------
1 | import os
2 | from yolo import YOLO
3 | from PIL import Image
4 | import matplotlib.pyplot as plt
5 | from tqdm import tqdm
6 |
7 | path_test = "./sample/"
8 | path_test_1 = ""
9 | path_result = "./result/result.csv"
10 |
11 |
12 | def detect_img(yolo, path_result):
13 | filenames = os.listdir(path_test)
14 | info = []
15 | for filename in tqdm(filenames):
16 | img_path = path_test + filename
17 | try:
18 | image = Image.open(img_path)
19 | r_image, temp = yolo.detect_image(image)
20 | # r_image.show()
21 | plt.imshow(r_image)
22 | plt.show()
23 | temp.insert(0, filename)
24 | # print(temp)
25 | info.append(temp)
26 | except Exception as e:
27 | print("错误文件:" + img_path)
28 |
29 | with open(path_result, 'w')as file:
30 | for cutWords in info:
31 | file.write('\t'.join(cutWords) + '\n')
32 |
33 |
34 | if __name__ == '__main__':
35 | detect_img(YOLO(), path_result)
36 |
--------------------------------------------------------------------------------