├── .idea
├── HyperLPR_Python.iml
├── misc.xml
├── modules.xml
└── workspace.xml
├── Readme.md
├── benchmark.py
├── dataset
├── 0.jpg
├── 1.jpg
├── 10.jpg
├── 12.JPG
├── 14.JPG
├── 2.jpg
├── 24.jpg
├── 25.JPG
├── 3144391.png
├── 4.jpg
├── 5.jpg
├── 6.jpg
├── 7.jpg
├── 8.jpg
└── 9.jpg
├── hyperlpr
├── __init__.py
├── deskew.py
├── detect.py
├── finemapping.py
├── pipline.py
├── recognizer.py
└── segmentation.py
├── model
├── cascade.xml
├── char_judgement.h5
└── char_rec.h5
├── res
├── 1.png
├── 10.png
├── 2.png
├── 3.png
├── 4.png
├── 5.png
├── 6.png
├── 7.png
├── 8.png
└── 9.png
└── simple_.py
/.idea/HyperLPR_Python.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
120 |
121 |
122 |
123 | rgb
124 | refine
125 | print
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 | 1498658803574
353 |
354 |
355 | 1498658803574
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 |
379 |
380 |
381 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 |
391 |
392 |
393 |
394 |
395 |
396 |
397 |
398 |
399 |
400 |
401 |
402 |
403 |
404 |
405 |
406 |
407 |
408 |
409 |
410 |
411 |
412 |
413 |
414 |
415 |
416 |
417 |
418 |
419 |
420 |
421 |
422 |
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 |
431 |
432 |
433 |
434 |
435 |
436 |
437 |
438 |
439 |
440 |
441 |
442 |
443 |
444 |
445 |
446 |
447 |
448 |
449 |
450 |
451 |
452 |
453 |
454 |
455 |
456 |
457 |
458 |
459 |
460 |
461 |
462 |
463 |
464 |
465 |
466 |
467 |
468 |
469 |
470 |
471 |
472 |
473 |
474 |
475 |
476 |
477 |
478 |
479 |
480 |
481 |
482 |
483 |
484 |
485 |
486 |
487 |
488 |
489 |
490 |
491 |
492 |
493 |
494 |
495 |
496 |
497 |
498 |
499 |
500 |
501 |
502 |
503 |
504 |
505 |
506 |
507 |
508 |
509 |
510 |
511 |
512 |
513 |
514 |
515 |
516 |
517 |
518 |
519 |
520 |
521 |
522 |
523 |
524 |
525 |
526 |
527 |
528 |
529 |
530 |
531 |
532 |
533 |
534 |
535 |
536 |
537 |
538 |
539 |
540 |
541 |
542 |
543 |
544 |
545 |
546 |
547 |
548 |
549 |
550 |
551 |
552 |
553 |
554 |
555 |
556 |
557 |
558 |
559 |
560 |
561 |
562 |
563 |
564 |
565 |
566 |
567 |
568 |
569 |
570 |
571 |
572 |
573 |
574 |
575 |
576 |
577 |
578 |
579 |
580 |
581 |
582 |
583 |
584 |
585 |
586 |
587 |
588 |
589 |
590 |
591 |
592 |
593 |
594 |
595 |
596 |
597 |
598 |
599 |
600 |
601 |
602 |
603 |
604 |
605 |
606 |
607 |
608 |
609 |
610 |
611 |
612 |
613 |
614 |
615 |
616 |
617 |
618 |
619 |
620 |
621 |
622 |
623 |
624 |
625 |
626 |
627 |
628 |
629 |
630 |
631 |
632 |
633 |
634 |
635 |
636 |
637 |
638 |
639 |
640 |
641 |
642 |
643 |
644 |
645 |
646 |
647 |
648 |
649 |
650 |
651 |
652 |
653 |
654 |
655 |
656 |
657 |
658 |
659 |
660 |
661 |
662 |
663 |
664 |
665 |
666 |
667 |
668 |
669 |
670 |
671 |
672 |
673 |
674 |
675 |
676 |
677 |
678 |
679 |
680 |
681 |
682 |
683 |
684 |
685 |
686 |
687 |
688 |
689 |
690 |
691 |
692 |
693 |
694 |
695 |
696 |
697 |
698 |
699 |
700 |
701 |
702 |
703 |
704 |
705 |
706 |
707 |
708 |
709 |
710 |
711 |
712 |
713 |
714 |
715 |
716 |
717 |
718 |
719 |
720 |
721 |
722 |
723 |
724 |
725 |
726 |
727 |
728 |
729 |
730 |
731 |
732 |
733 |
734 |
735 |
736 |
737 |
738 |
739 |
740 |
741 |
742 |
743 |
744 |
745 |
746 |
747 |
748 |
749 |
750 |
751 |
752 |
753 |
754 |
755 |
756 |
757 |
758 |
759 |
760 |
761 |
762 |
763 |
--------------------------------------------------------------------------------
/Readme.md:
--------------------------------------------------------------------------------
1 | ## HyperLPR Python
2 |
3 | 仓库已经移至:https://github.com/zeusees/HyperLPR
4 |
--------------------------------------------------------------------------------
/benchmark.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/benchmark.py
--------------------------------------------------------------------------------
/dataset/0.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/0.jpg
--------------------------------------------------------------------------------
/dataset/1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/1.jpg
--------------------------------------------------------------------------------
/dataset/10.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/10.jpg
--------------------------------------------------------------------------------
/dataset/12.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/12.JPG
--------------------------------------------------------------------------------
/dataset/14.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/14.JPG
--------------------------------------------------------------------------------
/dataset/2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/2.jpg
--------------------------------------------------------------------------------
/dataset/24.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/24.jpg
--------------------------------------------------------------------------------
/dataset/25.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/25.JPG
--------------------------------------------------------------------------------
/dataset/3144391.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/3144391.png
--------------------------------------------------------------------------------
/dataset/4.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/4.jpg
--------------------------------------------------------------------------------
/dataset/5.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/5.jpg
--------------------------------------------------------------------------------
/dataset/6.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/6.jpg
--------------------------------------------------------------------------------
/dataset/7.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/7.jpg
--------------------------------------------------------------------------------
/dataset/8.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/8.jpg
--------------------------------------------------------------------------------
/dataset/9.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/dataset/9.jpg
--------------------------------------------------------------------------------
/hyperlpr/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/hyperlpr/__init__.py
--------------------------------------------------------------------------------
/hyperlpr/deskew.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/hyperlpr/deskew.py
--------------------------------------------------------------------------------
/hyperlpr/detect.py:
--------------------------------------------------------------------------------
1 |
2 | import cv2
3 | import numpy as np
4 |
5 |
6 |
7 | watch_cascade = cv2.CascadeClassifier('./model/cascade.xml')
8 |
9 |
10 | def computeSafeRegion(shape,bounding_rect):
11 | top = bounding_rect[1] # y
12 | bottom = bounding_rect[1] + bounding_rect[3] # y + h
13 | left = bounding_rect[0] # x
14 | right = bounding_rect[0] + bounding_rect[2] # x + w
15 |
16 | min_top = 0
17 | max_bottom = shape[0]
18 | min_left = 0
19 | max_right = shape[1]
20 |
21 | # print "computeSateRegion input shape",shape
22 | if top < min_top:
23 | top = min_top
24 | # print "tap top 0"
25 | if left < min_left:
26 | left = min_left
27 | # print "tap left 0"
28 |
29 | if bottom > max_bottom:
30 | bottom = max_bottom
31 | #print "tap max_bottom max"
32 | if right > max_right:
33 | right = max_right
34 | #print "tap max_right max"
35 |
36 | # print "corr",left,top,right,bottom
37 | return [left,top,right-left,bottom-top]
38 |
39 |
40 | def cropped_from_image(image,rect):
41 | x, y, w, h = computeSafeRegion(image.shape,rect)
42 | return image[y:y+h,x:x+w]
43 |
44 |
45 | def detectPlateRough(image_gray,resize_h = 720,en_scale =1.06 ):
46 | scale = image_gray.shape[1]/float(image_gray.shape[0])
47 | image = cv2.resize(image_gray, (int(scale*resize_h), resize_h))
48 | image_gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
49 | watches = watch_cascade.detectMultiScale(image_gray, en_scale, 1, minSize=(36, 9))
50 |
51 | cropped_images = []
52 | for (x, y, w, h) in watches:
53 | x -= w * 0.1;
54 | w += w * 0.2
55 | y -= h * 0.6;
56 | h += h * 1.1;
57 |
58 | cropped = cropped_from_image(image, (int(x), int(y), int(w), int(h)))
59 | cropped_images.append(cropped)
60 | return cropped_images
61 |
--------------------------------------------------------------------------------
/hyperlpr/finemapping.py:
--------------------------------------------------------------------------------
1 |
2 | import cv2
3 | import numpy as np
4 |
5 | from skimage.filters import (threshold_otsu, threshold_niblack,
6 | threshold_sauvola)
7 |
8 |
9 | def fitLine_ransac(pts,zero_add = 0 ):
10 | if len(pts)>=2:
11 | [vx, vy, x, y] = cv2.fitLine(pts, cv2.DIST_HUBER, 0, 0.01, 0.01)
12 | lefty = int((-x * vy / vx) + y)
13 | righty = int(((136- x) * vy / vx) + y)
14 | return lefty+30+zero_add,righty+30+zero_add
15 | return 0,0
16 |
17 |
18 | def findContoursAndDrawBoundingBox(gray_image):
19 |
20 |
21 | line_upper = [];
22 | line_lower = [];
23 |
24 | line_experiment = []
25 |
26 | grouped_rects = []
27 | for k in np.linspace(-1.8, -0.2,5):
28 | thresh_niblack = threshold_niblack(gray_image, window_size=25, k=k)
29 | binary_niblack = gray_image > thresh_niblack
30 | binary_niblack = binary_niblack.astype(np.uint8) * 255
31 | imagex, contours, hierarchy = cv2.findContours(binary_niblack.copy(),cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
32 | for contour in contours:
33 | bdbox = cv2.boundingRect(contour)
34 | if (bdbox[3]/float(bdbox[2])>0.5 and bdbox[3]*bdbox[2]>100 and bdbox[3]*bdbox[2]<1300) or (bdbox[3]/float(bdbox[2])>3 and bdbox[3]*bdbox[2]<100):
35 | # cv2.rectangle(rgb,(bdbox[0],bdbox[1]),(bdbox[0]+bdbox[2],bdbox[1]+bdbox[3]),(255,0,0),1)
36 | line_upper.append([bdbox[0],bdbox[1]])
37 | line_lower.append([bdbox[0]+bdbox[2],bdbox[1]+bdbox[3]])
38 | line_experiment.append([bdbox[0],bdbox[1]])
39 | line_experiment.append([bdbox[0]+bdbox[2],bdbox[1]+bdbox[3]])
40 | # grouped_rects.append(bdbox)
41 |
42 | rgb = cv2.copyMakeBorder(gray_image,30,30,0,0,cv2.BORDER_REPLICATE)
43 | leftyA, rightyA = fitLine_ransac(np.array(line_lower),2)
44 | rows,cols = rgb.shape[:2]
45 |
46 | # rgb = cv2.line(rgb, (cols - 1, rightyA), (0, leftyA), (0, 0, 255), 1,cv2.LINE_AA)
47 |
48 | leftyB, rightyB = fitLine_ransac(np.array(line_upper),-2)
49 |
50 | rows,cols = rgb.shape[:2]
51 |
52 | # rgb = cv2.line(rgb, (cols - 1, rightyB), (0, leftyB), (0,255, 0), 1,cv2.LINE_AA)
53 | pts_map1 = np.float32([[cols - 1, rightyA], [0, leftyA],[cols - 1, rightyB], [0, leftyB]])
54 | pts_map2 = np.float32([[136,36],[0,36],[136,0],[0,0]])
55 |
56 | mat = cv2.getPerspectiveTransform(pts_map1,pts_map2)
57 | image = cv2.warpPerspective(rgb,mat,(136,36))
58 | return image
--------------------------------------------------------------------------------
/hyperlpr/pipline.py:
--------------------------------------------------------------------------------
1 | #coding=utf-8
2 | import detect
3 | import finemapping as fm
4 |
5 | import segmentation
6 |
7 | import cv2
8 |
9 |
10 | import time
11 |
12 | def SimpleRecognizePlate(image):
13 | t0 = time.time()
14 | images = detect.detectPlateRough(image)
15 | for image in images:
16 | image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
17 | image =cv2.resize(image,(136,36))
18 |
19 | image_gray = fm.findContoursAndDrawBoundingBox(image)
20 | cv2.imshow("image,",image_gray)
21 | cv2.waitKey(0)
22 | blocks,res,confidence = segmentation.slidingWindowsEval(image_gray)
23 | if confidence>4.5:
24 | print "车牌:",res,"置信度:",confidence
25 | else:
26 | print "不确定的车牌:", res, "置信度:", confidence
27 |
28 | print time.time() - t0,"s"
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/hyperlpr/recognizer.py:
--------------------------------------------------------------------------------
1 | #coding=utf-8
2 | from keras.models import Sequential
3 | from keras.layers import Dense, Dropout, Activation, Flatten
4 | from keras.layers import Convolution2D, MaxPooling2D
5 | from keras.optimizers import SGD
6 | from keras import backend as K
7 |
8 | K.set_image_dim_ordering('tf')
9 |
10 |
11 | import cv2
12 | import numpy as np
13 |
14 |
15 |
16 | index = {u"京": 0, u"沪": 1, u"津": 2, u"渝": 3, u"冀": 4, u"晋": 5, u"蒙": 6, u"辽": 7, u"吉": 8, u"黑": 9, u"苏": 10, u"浙": 11, u"皖": 12,
17 | u"闽": 13, u"赣": 14, u"鲁": 15, u"豫": 16, u"鄂": 17, u"湘": 18, u"粤": 19, u"桂": 20, u"琼": 21, u"川": 22, u"贵": 23, u"云": 24,
18 | u"藏": 25, u"陕": 26, u"甘": 27, u"青": 28, u"宁": 29, u"新": 30, u"0": 31, u"1": 32, u"2": 33, u"3": 34, u"4": 35, u"5": 36,
19 | u"6": 37, u"7": 38, u"8": 39, u"9": 40, u"A": 41, u"B": 42, u"C": 43, u"D": 44, u"E": 45, u"F": 46, u"G": 47, u"H": 48,
20 | u"J": 49, u"K": 50, u"L": 51, u"M": 52, u"N": 53, u"P": 54, u"Q": 55, u"R": 56, u"S": 57, u"T": 58, u"U": 59, u"V": 60,
21 | u"W": 61, u"X": 62, u"Y": 63, u"Z": 64,u"港":65,u"学":66 ,u"O":67 ,u"使":68,u"警":69,u"澳":70,u"挂":71};
22 |
23 | chars = ["京", "沪", "津", "渝", "冀", "晋", "蒙", "辽", "吉", "黑", "苏", "浙", "皖", "闽", "赣", "鲁", "豫", "鄂", "湘", "粤", "桂",
24 | "琼", "川", "贵", "云", "藏", "陕", "甘", "青", "宁", "新", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A",
25 | "B", "C", "D", "E", "F", "G", "H", "J", "K", "L", "M", "N", "P",
26 | "Q", "R", "S", "T", "U", "V", "W", "X",
27 | "Y", "Z","港","学","O","使","警","澳","挂" ];
28 |
29 |
30 |
31 | def Getmodel_tensorflow(nb_classes):
32 | # nb_classes = len(charset)
33 |
34 | img_rows, img_cols = 23, 23
35 | # number of convolutional filters to use
36 | nb_filters = 32
37 | # size of pooling area for max pooling
38 | nb_pool = 2
39 | # convolution kernel size
40 | nb_conv = 3
41 |
42 | # x = np.load('x.npy')
43 | # y = np_utils.to_categorical(range(3062)*45*5*2, nb_classes)
44 | # weight = ((type_class - np.arange(type_class)) / type_class + 1) ** 3
45 | # weight = dict(zip(range(3063), weight / weight.mean())) # 调整权重,高频字优先
46 |
47 | model = Sequential()
48 | model.add(Convolution2D(32, 5, 5,
49 | border_mode='valid',
50 | input_shape=(img_rows, img_cols,1)))
51 | model.add(Activation('relu'))
52 | model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
53 | model.add(Dropout(0.25))
54 | model.add(Convolution2D(32, 3, 3))
55 | model.add(Activation('relu'))
56 | model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
57 | model.add(Dropout(0.25))
58 | model.add(Convolution2D(512, 3, 3))
59 | # model.add(Activation('relu'))
60 | # model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
61 | # model.add(Dropout(0.25))
62 | model.add(Flatten())
63 | model.add(Dense(512))
64 | model.add(Activation('relu'))
65 | model.add(Dropout(0.5))
66 | model.add(Dense(nb_classes))
67 | model.add(Activation('softmax'))
68 | model.compile(loss='categorical_crossentropy',
69 | optimizer='adam',
70 | metrics=['accuracy'])
71 | return model
72 |
73 |
74 |
75 | model = Getmodel_tensorflow(65)
76 |
77 | model.load_weights("./model/char_rec.h5")
78 |
79 |
80 | def SimplePredict(image,pos):
81 | image = cv2.resize(image, (23, 23))
82 | image = cv2.equalizeHist(image)
83 | image = image.astype(np.float) / 255
84 | image -= image.mean()
85 | image = np.expand_dims(image, 3)
86 | res = np.array(model.predict(np.array([image]))[0])
87 |
88 | zero_add = 0 ;
89 |
90 | if pos==0:
91 | res = res[:31]
92 | elif pos==1:
93 | res = res[31+10:65]
94 | zero_add = 31+10
95 | else:
96 | res = res[31:]
97 | zero_add = 31
98 |
99 | max_id = res.argmax()
100 |
101 |
102 | return res.max(),chars[max_id+zero_add],max_id+zero_add
103 |
104 |
--------------------------------------------------------------------------------
/hyperlpr/segmentation.py:
--------------------------------------------------------------------------------
1 | #coding=utf-8
2 | import cv2
3 | import numpy as np
4 |
5 | from matplotlib import pyplot as plt
6 | import scipy.ndimage.filters as f
7 | import scipy
8 |
9 | import time
10 | import scipy.signal as l
11 |
12 |
13 |
14 |
15 |
16 | from keras.models import Sequential
17 | from keras.layers import Dense, Dropout, Activation, Flatten
18 | from keras.layers import Convolution2D, MaxPooling2D
19 | from keras.optimizers import SGD
20 | from keras import backend as K
21 |
22 | K.set_image_dim_ordering('tf')
23 |
24 |
25 | def Getmodel_tensorflow(nb_classes):
26 | # nb_classes = len(charset)
27 | img_rows, img_cols = 23, 23
28 | # number of convolutional filters to use
29 | nb_filters = 16
30 | # size of pooling area for max pooling
31 | nb_pool = 2
32 | # convolution kernel size
33 | nb_conv = 3
34 | # x = np.load('x.npy')
35 | # y = np_utils.to_categorical(range(3062)*45*5*2, nb_classes)
36 | # weight = ((type_class - np.arange(type_class)) / type_class + 1) ** 3
37 | # weight = dict(zip(range(3063), weight / weight.mean())) # 调整权重,高频字优先
38 |
39 | model = Sequential()
40 | model.add(Convolution2D(nb_filters, nb_conv, nb_conv,
41 | border_mode='valid',
42 | input_shape=(img_rows, img_cols,1)))
43 | model.add(Activation('relu'))
44 | model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
45 | model.add(Convolution2D(nb_filters, nb_conv, nb_conv))
46 | model.add(Activation('relu'))
47 | model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
48 | model.add(Flatten())
49 | model.add(Dense(256))
50 | model.add(Dropout(0.5))
51 |
52 | model.add(Activation('relu'))
53 | model.add(Dense(nb_classes))
54 | model.add(Activation('softmax'))
55 | model.compile(loss='categorical_crossentropy',
56 | optimizer='sgd',
57 | metrics=['accuracy'])
58 | return model
59 |
60 |
61 |
62 | model = Getmodel_tensorflow(3)
63 | import os
64 | model.load_weights("./model/char_judgement.h5")
65 |
66 | def get_median(data):
67 | data = sorted(data)
68 | size = len(data)
69 | # print size
70 |
71 | if size % 2 == 0: # 判断列表长度为偶数
72 | median = (data[size//2]+data[size//2-1])/2
73 | data[0] = median
74 | if size % 2 == 1: # 判断列表长度为奇数
75 | median = data[(size-1)//2]
76 | data[0] = median
77 | return data[0]
78 |
79 | def searchOptimalCuttingPoint(rgb,res_map,start,width_boundingbox,interval_range):
80 | #
81 | # for x in xrange(10):
82 | # res_map = np.vstack((res_map,res_map[-1]))
83 | length = res_map.shape[0]
84 | refine_s = -2;
85 |
86 | if width_boundingbox>20:
87 | refine_s = -9
88 |
89 |
90 |
91 | score_list = []
92 | interval_big = int(width_boundingbox * 0.3) #
93 |
94 | for zero_add in xrange(start,start+50):
95 | # for interval_small in xrange(-0,width_boundingbox/2):
96 | for i in xrange(-8,int(width_boundingbox/1)-8):
97 | for refine in xrange(refine_s,width_boundingbox/2+10):
98 |
99 |
100 | p1 = zero_add# this point is province
101 | p2 = p1 + width_boundingbox +refine #
102 | p3 = p2 + width_boundingbox + interval_big+i+1
103 | p4 = p3 + width_boundingbox +refine
104 | p5 = p4 + width_boundingbox +refine
105 | p6 = p5 + width_boundingbox +refine
106 | p7 = p6 + width_boundingbox +refine
107 |
108 | list = [p1,p2,p3,p4,p5,p6,p7]
109 | # bak = rgb.copy()
110 | #
111 | # #
112 | # for one in list:
113 | # cv2.line(bak,(one,1),(one,36),255)
114 | # cv2.imshow("imag", bak)
115 | # cv2.waitKey(0)
116 | if p7>=length:
117 |
118 | # print"break"
119 | # print p7
120 | continue
121 | # score_list = sorted(score_list, key=lambda x: x[0])
122 | # return score_list[0]
123 |
124 | score = res_map[p1][2]*3 -(res_map[p3][1]+res_map[p4][1]+res_map[p5][1]+res_map[p6][1]+res_map[p7][1])+7
125 | # print score
126 | score_list.append([score,[p1,p2,p3,p4,p5,p6,p7]])
127 |
128 | score_list = sorted(score_list , key=lambda x:x[0])
129 | # debug = cv2.cvtColor(rgb,cv2.COLOR_GRAY2RGB)
130 | # for one in score_list[-1][1]:
131 | # cv2.line(debug,(one,0),(one,36),(255,0,0),1)
132 | # #
133 | # cv2.imshow("one",debug)
134 | # cv2.waitKey(0)
135 |
136 | return score_list[-1]
137 |
138 |
139 | import sys
140 |
141 | sys.path.append('../')
142 | import recognizer as cRP
143 | from skimage.filters import (threshold_otsu, threshold_niblack,
144 | threshold_sauvola)
145 |
146 | def refineCrop(sections,width=16):
147 | new_sections = []
148 | for section in sections:
149 | # cv2.imshow("section¡",section)
150 |
151 | cv2.blur(section,(3,3),3)
152 |
153 | sec_center = np.array([section.shape[1]/2,section.shape[0]/2])
154 | thresh_niblack = threshold_niblack(section, window_size=17, k=-0.2)
155 | binary_niblack = section > thresh_niblack
156 | binary_niblack = binary_niblack.astype(np.uint8) * 255
157 | imagex, contours, hierarchy = cv2.findContours(binary_niblack,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
158 | boxs = []
159 | for contour in contours:
160 | x,y,w,h = cv2.boundingRect(contour)
161 |
162 | ratio = w/float(h)
163 | if ratio<1 and h>36*0.4 and y<16\
164 | :
165 | box = [x,y,w,h]
166 |
167 | boxs.append([box,np.array([x+w/2,y+h/2])])
168 | # cv2.rectangle(section,(x,y),(x+w,y+h),255,1)
169 |
170 |
171 |
172 |
173 | # print boxs
174 |
175 | dis_ = np.array([ ((one[1]-sec_center)**2).sum() for one in boxs])
176 | if len(dis_)==0:
177 | kernal = [0, 0, section.shape[1], section.shape[0]]
178 | else:
179 | kernal = boxs[dis_.argmin()][0]
180 |
181 | center_c = (kernal[0]+kernal[2]/2,kernal[1]+kernal[3]/2)
182 | w_2 = int(width/2)
183 | h_2 = kernal[3]/2
184 |
185 | if center_c[0] - w_2< 0:
186 | w_2 = center_c[0]
187 | new_box = [center_c[0] - w_2,kernal[1],width,kernal[3]]
188 | # print new_box[2]/float(new_box[3])
189 | if new_box[2]/float(new_box[3])>0.6:
190 | # print "异常"
191 | h = int((new_box[2]/0.35 )/2)
192 | if h>35:
193 | h = 35
194 | new_box[1] = center_c[1]- h
195 | if new_box[1]<0:
196 | new_box[1] = 1
197 |
198 | new_box[3] = h*2
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 | section = section[new_box[1]:new_box[1]+new_box[3],new_box[0]:new_box[0]+new_box[2]]
208 | # cv2.imshow("section",section)
209 | # cv2.waitKey(0)
210 | new_sections.append(section)
211 | # print new_box
212 |
213 |
214 | return new_sections
215 |
216 |
217 |
218 |
219 |
220 | def slidingWindowsEval(image):
221 | windows_size = 16;
222 | stride = 1
223 | height= image.shape[0]
224 | # print image.shape[1]
225 | p = []
226 | ch_p = []
227 |
228 | gain = []
229 | pin=[]
230 | for i in range(0,image.shape[1]-windows_size+1,stride):
231 | data = image[0:height,i:i+windows_size]
232 | data = cv2.resize(data,(23,23))
233 | # cv2.imshow("image",data)
234 | data = cv2.equalizeHist(data)
235 | data = data.astype(np.float)/255
236 | data= np.expand_dims(data,3)
237 | res = model.predict(np.array([data]))
238 | pin.append(res[0])
239 |
240 | p.append(res[0][0]+res[0][2])
241 | ch_p.append(res[0][2])
242 |
243 | gain.append(res.argmax())
244 |
245 | p = np.insert(p,0,0);
246 | p = np.insert(p,len(p),0);
247 | p = f.gaussian_filter1d(np.array(p,dtype=np.float),3)
248 | # print p
249 | sum = image.sum(axis=0)
250 |
251 |
252 | lmin = l.argrelmax(np.array(p),order = 3)[0]
253 | interval = []
254 | for i in xrange(len(lmin)-1):
255 | interval.append(lmin[i+1]-lmin[i])
256 |
257 | if(len(interval)>3):
258 | mid = get_median(interval)
259 | else:
260 | return []
261 |
262 | ch_p = np.array(ch_p)
263 | pin = np.array(pin)
264 | res = searchOptimalCuttingPoint(image,pin,0,mid,3)
265 |
266 | cutting_pts = res[1]
267 | last = cutting_pts[-1] + mid
268 | if last < image.shape[1]:
269 | cutting_pts.append(last)
270 | else:
271 | cutting_pts.append(image.shape[1]-1)
272 |
273 |
274 | name = ""
275 | confidence =0.00;
276 | seg_block = []
277 | for x in xrange(1,len(cutting_pts)):
278 | if x != len(cutting_pts)-1 and x!=1:
279 | section = image[0:36,cutting_pts[x-1]-2:cutting_pts[x]+2]
280 | elif x==1:
281 |
282 | c_head = cutting_pts[x - 1]- 2
283 | if c_head<0:
284 | c_head=0
285 | c_tail = cutting_pts[x] + 2
286 |
287 | section = image[0:36, c_head:c_tail]
288 | elif x==len(cutting_pts)-1:
289 | end = cutting_pts[x]
290 | diff = image.shape[1]-end
291 |
292 | c_head = cutting_pts[x - 1]
293 | c_tail = cutting_pts[x]
294 |
295 | if diff<7 :
296 | section = image[0:36, c_head-5:c_tail+5]
297 |
298 | else:
299 | diff-=1
300 |
301 | section = image[0:36, c_head - diff:c_tail + diff]
302 |
303 |
304 |
305 | elif x==2:
306 | section = image[0:36, cutting_pts[x - 1] - 3:cutting_pts[x-1]+ mid]
307 | else:
308 | section = image[0:36,cutting_pts[x-1]:cutting_pts[x]]
309 | seg_block.append(section)
310 | refined = refineCrop(seg_block,mid-1)
311 | for i,one in enumerate(refined):
312 |
313 |
314 | res_pre = cRP.SimplePredict(one, i )
315 | confidence+=res_pre[0]
316 |
317 | name+= res_pre[1]
318 |
319 | return seg_block,name,confidence
320 |
--------------------------------------------------------------------------------
/model/cascade.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | BOOST
5 | HAAR
6 | 13
7 | 51
8 |
9 | GAB
10 | 9.9500000476837158e-01
11 | 4.4999998807907104e-01
12 | 9.4999999999999996e-01
13 | 1
14 | 100
15 |
16 | 0
17 | 1
18 | BASIC
19 | 13
20 |
21 |
22 | <_>
23 | 5
24 | -1.4522267580032349e+00
25 |
26 | <_>
27 |
28 | 0 -1 54 -3.6055929958820343e-02
29 |
30 | 6.0774326324462891e-01 -8.8722378015518188e-01
31 | <_>
32 |
33 | 0 -1 107 -2.0387675613164902e-02
34 |
35 | 7.7247995138168335e-01 -5.8999586105346680e-01
36 | <_>
37 |
38 | 0 -1 124 1.3326611369848251e-02
39 |
40 | -3.7632712721824646e-01 8.5003948211669922e-01
41 | <_>
42 |
43 | 0 -1 134 -4.2829606682062149e-03
44 |
45 | 8.8654613494873047e-01 -3.2926064729690552e-01
46 | <_>
47 |
48 | 0 -1 14 -1.2937186285853386e-02
49 |
50 | 7.3058068752288818e-01 -3.4766268730163574e-01
51 |
52 | <_>
53 | 6
54 | -1.5885028839111328e+00
55 |
56 | <_>
57 |
58 | 0 -1 80 -2.7126984670758247e-02
59 |
60 | 7.4530833959579468e-01 -7.7404719591140747e-01
61 | <_>
62 |
63 | 0 -1 11 1.2503249570727348e-02
64 |
65 | -5.6640660762786865e-01 6.9029498100280762e-01
66 | <_>
67 |
68 | 0 -1 93 -3.0146762728691101e-03
69 |
70 | 7.7726465463638306e-01 -3.6748826503753662e-01
71 | <_>
72 |
73 | 0 -1 118 5.4267710074782372e-03
74 |
75 | -3.7967669963836670e-01 7.0787495374679565e-01
76 | <_>
77 |
78 | 0 -1 32 7.9059619456529617e-03
79 |
80 | -2.3373338580131531e-01 8.9303541183471680e-01
81 | <_>
82 |
83 | 0 -1 32 -3.5884347744286060e-03
84 |
85 | 7.3284924030303955e-01 -3.4890010952949524e-01
86 |
87 | <_>
88 | 6
89 | -1.5237013101577759e+00
90 |
91 | <_>
92 |
93 | 0 -1 138 -5.4742279462516308e-03
94 |
95 | 7.0093458890914917e-01 -7.2632515430450439e-01
96 | <_>
97 |
98 | 0 -1 56 -1.2999620288610458e-02
99 |
100 | 5.7162433862686157e-01 -6.4806801080703735e-01
101 | <_>
102 |
103 | 0 -1 128 -8.3158425986766815e-03
104 |
105 | 6.5655243396759033e-01 -4.0819194912910461e-01
106 | <_>
107 |
108 | 0 -1 45 -4.7048088163137436e-03
109 |
110 | 7.9088932275772095e-01 -2.8916743397712708e-01
111 | <_>
112 |
113 | 0 -1 36 -9.4503257423639297e-03
114 |
115 | 8.9549893140792847e-01 -2.2189857065677643e-01
116 | <_>
117 |
118 | 0 -1 98 -2.3742984049022198e-03
119 |
120 | 7.6994973421096802e-01 -2.3778341710567474e-01
121 |
122 | <_>
123 | 9
124 | -1.6848390102386475e+00
125 |
126 | <_>
127 |
128 | 0 -1 148 -1.1008351109921932e-02
129 |
130 | 6.9266408681869507e-01 -6.9424539804458618e-01
131 | <_>
132 |
133 | 0 -1 49 -7.9110905528068542e-02
134 |
135 | 4.9248400330543518e-01 -5.5745613574981689e-01
136 | <_>
137 |
138 | 0 -1 17 -3.3094406127929688e-02
139 |
140 | 6.7079842090606689e-01 -2.9443582892417908e-01
141 | <_>
142 |
143 | 0 -1 33 -1.1182541027665138e-02
144 |
145 | 6.9587022066116333e-01 -2.9297658801078796e-01
146 | <_>
147 |
148 | 0 -1 41 5.6877331808209419e-03
149 |
150 | -4.1855454444885254e-01 4.4961541891098022e-01
151 | <_>
152 |
153 | 0 -1 135 -1.1613013222813606e-02
154 |
155 | 7.1536397933959961e-01 -2.2911681234836578e-01
156 | <_>
157 |
158 | 0 -1 60 -1.6314105596393347e-03
159 |
160 | 4.3971005082130432e-01 -3.4548181295394897e-01
161 | <_>
162 |
163 | 0 -1 131 -1.4335573650896549e-02
164 |
165 | 7.4669879674911499e-01 -2.2625781595706940e-01
166 | <_>
167 |
168 | 0 -1 108 -1.8973004072904587e-02
169 |
170 | 5.8849400281906128e-01 -2.6387512683868408e-01
171 |
172 | <_>
173 | 7
174 | -1.4197646379470825e+00
175 |
176 | <_>
177 |
178 | 0 -1 85 -1.3295678421854973e-02
179 |
180 | 6.7215567827224731e-01 -7.0070111751556396e-01
181 | <_>
182 |
183 | 0 -1 144 -3.4284770488739014e-02
184 |
185 | 7.0911777019500732e-01 -4.2823189496994019e-01
186 | <_>
187 |
188 | 0 -1 44 -5.9104524552822113e-03
189 |
190 | 7.2327709197998047e-01 -2.5727829337120056e-01
191 | <_>
192 |
193 | 0 -1 119 1.2964321300387383e-02
194 |
195 | -2.5583907961845398e-01 6.5514224767684937e-01
196 | <_>
197 |
198 | 0 -1 12 -5.8112673461437225e-02
199 |
200 | -8.6399143934249878e-01 2.1411906182765961e-01
201 | <_>
202 |
203 | 0 -1 1 1.3006861507892609e-01
204 |
205 | 3.0077114701271057e-01 -7.4798297882080078e-01
206 | <_>
207 |
208 | 0 -1 146 1.2051236815750599e-02
209 |
210 | -2.9260447621345520e-01 7.8372502326965332e-01
211 |
212 | <_>
213 | 13
214 | -1.4935564994812012e+00
215 |
216 | <_>
217 |
218 | 0 -1 71 2.1672071889042854e-02
219 |
220 | -6.8630045652389526e-01 5.9326994419097900e-01
221 | <_>
222 |
223 | 0 -1 43 -3.1070169061422348e-02
224 |
225 | 4.4696405529975891e-01 -6.0956352949142456e-01
226 | <_>
227 |
228 | 0 -1 0 8.0796377733349800e-03
229 |
230 | -3.3622500300407410e-01 5.8460593223571777e-01
231 | <_>
232 |
233 | 0 -1 74 1.3035086914896965e-02
234 |
235 | -2.4737064540386200e-01 6.4570289850234985e-01
236 | <_>
237 |
238 | 0 -1 87 7.9579129815101624e-03
239 |
240 | -1.7135229706764221e-01 7.5398427248001099e-01
241 | <_>
242 |
243 | 0 -1 94 1.9107902422547340e-02
244 |
245 | -1.9006156921386719e-01 6.8315607309341431e-01
246 | <_>
247 |
248 | 0 -1 24 -5.7281225919723511e-02
249 |
250 | -8.8678687810897827e-01 1.9636797904968262e-01
251 | <_>
252 |
253 | 0 -1 109 4.0787877514958382e-03
254 |
255 | -2.1121983230113983e-01 7.3891258239746094e-01
256 | <_>
257 |
258 | 0 -1 102 -6.0659639537334442e-02
259 |
260 | -8.6473542451858521e-01 1.8542627990245819e-01
261 | <_>
262 |
263 | 0 -1 67 6.8689538165926933e-03
264 |
265 | 1.0454143583774567e-01 -9.4943034648895264e-01
266 | <_>
267 |
268 | 0 -1 20 -1.7832096666097641e-02
269 |
270 | 4.5414650440216064e-01 -2.8086206316947937e-01
271 | <_>
272 |
273 | 0 -1 82 -3.9202673360705376e-03
274 |
275 | -7.6836824417114258e-01 1.6098938882350922e-01
276 | <_>
277 |
278 | 0 -1 78 2.6726756244897842e-02
279 |
280 | -2.1851523220539093e-01 5.9207379817962646e-01
281 |
282 | <_>
283 | 14
284 | -1.9655559062957764e+00
285 |
286 | <_>
287 |
288 | 0 -1 50 -9.0542443096637726e-02
289 |
290 | 1.7747858166694641e-01 -7.9298347234725952e-01
291 | <_>
292 |
293 | 0 -1 132 -2.0814165472984314e-02
294 |
295 | 5.1532518863677979e-01 -4.3470549583435059e-01
296 | <_>
297 |
298 | 0 -1 18 -3.6909624934196472e-02
299 |
300 | 6.0782665014266968e-01 -2.5326192378997803e-01
301 | <_>
302 |
303 | 0 -1 3 -1.0824845731258392e-01
304 |
305 | -6.8392693996429443e-01 2.1645331382751465e-01
306 | <_>
307 |
308 | 0 -1 13 -5.2038915455341339e-02
309 |
310 | -7.7188664674758911e-01 1.6428375244140625e-01
311 | <_>
312 |
313 | 0 -1 83 -2.1428268402814865e-02
314 |
315 | -7.3650783300399780e-01 1.5463940799236298e-01
316 | <_>
317 |
318 | 0 -1 104 -4.2818438261747360e-02
319 |
320 | 5.2569919824600220e-01 -2.6353141665458679e-01
321 | <_>
322 |
323 | 0 -1 69 -5.2878684364259243e-03
324 |
325 | -6.8202823400497437e-01 1.5291532874107361e-01
326 | <_>
327 |
328 | 0 -1 140 -4.2778979986906052e-03
329 |
330 | 3.9424580335617065e-01 -2.9801166057586670e-01
331 | <_>
332 |
333 | 0 -1 70 -3.5832617431879044e-02
334 |
335 | -8.8727104663848877e-01 1.4659039676189423e-01
336 | <_>
337 |
338 | 0 -1 136 4.0348324924707413e-02
339 |
340 | -2.1501532196998596e-01 6.7277902364730835e-01
341 | <_>
342 |
343 | 0 -1 53 1.3250357005745173e-03
344 |
345 | -4.6066722273826599e-01 2.9663875699043274e-01
346 | <_>
347 |
348 | 0 -1 55 5.1691032946109772e-02
349 |
350 | 1.0885474830865860e-01 -7.2441399097442627e-01
351 | <_>
352 |
353 | 0 -1 75 -4.9587553367018700e-03
354 |
355 | 5.5649846792221069e-01 -1.9111640751361847e-01
356 |
357 | <_>
358 | 14
359 | -1.8806717395782471e+00
360 |
361 | <_>
362 |
363 | 0 -1 79 -3.6643587052822113e-02
364 |
365 | 2.2463768720626831e-01 -6.8901211023330688e-01
366 | <_>
367 |
368 | 0 -1 21 -1.6010627150535583e-02
369 |
370 | 4.3478518724441528e-01 -4.6285673975944519e-01
371 | <_>
372 |
373 | 0 -1 16 -1.0211508721113205e-02
374 |
375 | 4.0597334504127502e-01 -4.3837055563926697e-01
376 | <_>
377 |
378 | 0 -1 9 -1.5323377214372158e-02
379 |
380 | -7.4894309043884277e-01 1.7648462951183319e-01
381 | <_>
382 |
383 | 0 -1 123 1.4641515910625458e-02
384 |
385 | -2.6943933963775635e-01 4.6699064970016479e-01
386 | <_>
387 |
388 | 0 -1 72 -5.7664709165692329e-03
389 |
390 | 5.5168890953063965e-01 -2.0955991744995117e-01
391 | <_>
392 |
393 | 0 -1 77 1.1733749881386757e-02
394 |
395 | -2.9529547691345215e-01 3.7804162502288818e-01
396 | <_>
397 |
398 | 0 -1 110 -4.2451457120478153e-03
399 |
400 | 5.8102583885192871e-01 -1.6553941369056702e-01
401 | <_>
402 |
403 | 0 -1 111 5.2165836095809937e-03
404 |
405 | -1.5034157037734985e-01 7.2884672880172729e-01
406 | <_>
407 |
408 | 0 -1 139 1.2897187843918800e-02
409 |
410 | -1.4908260107040405e-01 6.1264133453369141e-01
411 | <_>
412 |
413 | 0 -1 141 1.2586537748575211e-02
414 |
415 | -1.7079356312751770e-01 6.3173097372055054e-01
416 | <_>
417 |
418 | 0 -1 26 -5.8342528063803911e-04
419 |
420 | -6.6152995824813843e-01 1.9030474126338959e-01
421 | <_>
422 |
423 | 0 -1 92 8.4475707262754440e-03
424 |
425 | -1.8907830119132996e-01 5.8481484651565552e-01
426 | <_>
427 |
428 | 0 -1 73 1.7303328495472670e-03
429 |
430 | 1.6801537573337555e-01 -7.3478484153747559e-01
431 |
432 | <_>
433 | 15
434 | -1.7428997755050659e+00
435 |
436 | <_>
437 |
438 | 0 -1 52 8.2039218395948410e-03
439 |
440 | -5.9649121761322021e-01 5.0000000000000000e-01
441 | <_>
442 |
443 | 0 -1 114 -1.2954433448612690e-02
444 |
445 | 5.1217520236968994e-01 -3.6571875214576721e-01
446 | <_>
447 |
448 | 0 -1 8 -2.7362398803234100e-02
449 |
450 | 3.5489535331726074e-01 -4.0464013814926147e-01
451 | <_>
452 |
453 | 0 -1 30 4.2457506060600281e-02
454 |
455 | -1.7405109107494354e-01 7.3910868167877197e-01
456 | <_>
457 |
458 | 0 -1 22 -1.8798202276229858e-02
459 |
460 | 3.1528884172439575e-01 -3.8433888554573059e-01
461 | <_>
462 |
463 | 0 -1 35 -1.0784761980175972e-02
464 |
465 | 5.8307814598083496e-01 -1.7868156731128693e-01
466 | <_>
467 |
468 | 0 -1 37 8.4274057298898697e-03
469 |
470 | -1.8773655593395233e-01 6.8742847442626953e-01
471 | <_>
472 |
473 | 0 -1 142 8.4167476743459702e-03
474 |
475 | -2.2713033854961395e-01 4.8163440823554993e-01
476 | <_>
477 |
478 | 0 -1 84 -6.1064627952873707e-03
479 |
480 | -6.5878021717071533e-01 1.6096532344818115e-01
481 | <_>
482 |
483 | 0 -1 117 -7.3043648153543472e-03
484 |
485 | 6.7449253797531128e-01 -1.6153311729431152e-01
486 | <_>
487 |
488 | 0 -1 121 8.9847277849912643e-03
489 |
490 | -1.4932602643966675e-01 6.9412559270858765e-01
491 | <_>
492 |
493 | 0 -1 40 -9.5944188535213470e-02
494 |
495 | -7.4625885486602783e-01 1.3945244252681732e-01
496 | <_>
497 |
498 | 0 -1 99 1.6932567581534386e-02
499 |
500 | -2.4269662797451019e-01 3.7880089879035950e-01
501 | <_>
502 |
503 | 0 -1 95 -8.1974361091852188e-03
504 |
505 | 5.1340365409851074e-01 -1.8465793132781982e-01
506 | <_>
507 |
508 | 0 -1 126 2.8570874128490686e-03
509 |
510 | -1.6957250237464905e-01 4.8734435439109802e-01
511 |
512 | <_>
513 | 16
514 | -1.8894079923629761e+00
515 |
516 | <_>
517 |
518 | 0 -1 47 -4.9106068909168243e-02
519 |
520 | 1.1547525227069855e-01 -7.7340489625930786e-01
521 | <_>
522 |
523 | 0 -1 4 1.2911912053823471e-02
524 |
525 | -5.4807806015014648e-01 2.7138045430183411e-01
526 | <_>
527 |
528 | 0 -1 44 -6.0372767038643360e-03
529 |
530 | 5.0477576255798340e-01 -2.5230094790458679e-01
531 | <_>
532 |
533 | 0 -1 103 6.7326296120882034e-03
534 |
535 | -1.6836160421371460e-01 6.3641476631164551e-01
536 | <_>
537 |
538 | 0 -1 105 -5.1528466865420341e-03
539 |
540 | 6.6324275732040405e-01 -1.5225183963775635e-01
541 | <_>
542 |
543 | 0 -1 39 -4.6838172711431980e-03
544 |
545 | 5.3234803676605225e-01 -1.9332525134086609e-01
546 | <_>
547 |
548 | 0 -1 138 -1.7124194651842117e-02
549 |
550 | 6.8198502063751221e-01 -1.1079726368188858e-01
551 | <_>
552 |
553 | 0 -1 101 -8.3793178200721741e-03
554 |
555 | 4.2959102988243103e-01 -2.0721201598644257e-01
556 | <_>
557 |
558 | 0 -1 91 -1.2955071404576302e-02
559 |
560 | 7.1591031551361084e-01 -1.1101853102445602e-01
561 | <_>
562 |
563 | 0 -1 48 1.5501547604799271e-03
564 |
565 | -4.3425342440605164e-01 2.1368201076984406e-01
566 | <_>
567 |
568 | 0 -1 50 9.2810168862342834e-02
569 |
570 | -1.2655220925807953e-01 8.1110650300979614e-01
571 | <_>
572 |
573 | 0 -1 65 -1.3633668422698975e-03
574 |
575 | 3.0431422591209412e-01 -3.0507484078407288e-01
576 | <_>
577 |
578 | 0 -1 81 -5.5763751268386841e-02
579 |
580 | -8.5401827096939087e-01 9.8395809531211853e-02
581 | <_>
582 |
583 | 0 -1 88 -1.1393746826797724e-03
584 |
585 | -8.1919074058532715e-01 7.3101028800010681e-02
586 | <_>
587 |
588 | 0 -1 149 8.0943722277879715e-03
589 |
590 | -1.3326695561408997e-01 6.9439333677291870e-01
591 | <_>
592 |
593 | 0 -1 96 -3.4714224748313427e-03
594 |
595 | 5.9922569990158081e-01 -1.3060113787651062e-01
596 |
597 | <_>
598 | 17
599 | -1.5919723510742188e+00
600 |
601 | <_>
602 |
603 | 0 -1 29 -2.5207929313182831e-02
604 |
605 | 7.2846716642379761e-01 -5.3672099113464355e-01
606 | <_>
607 |
608 | 0 -1 50 -6.9636121392250061e-02
609 |
610 | 3.1629270315170288e-01 -6.0865008831024170e-01
611 | <_>
612 |
613 | 0 -1 59 2.3209808859974146e-03
614 |
615 | -2.3704020678997040e-01 6.2413477897644043e-01
616 | <_>
617 |
618 | 0 -1 145 6.5027177333831787e-03
619 |
620 | -1.5149921178817749e-01 7.2632813453674316e-01
621 | <_>
622 |
623 | 0 -1 122 2.8541814535856247e-03
624 |
625 | -1.4856521785259247e-01 6.4303350448608398e-01
626 | <_>
627 |
628 | 0 -1 100 6.3829342834651470e-03
629 |
630 | -1.5112587809562683e-01 6.2198770046234131e-01
631 | <_>
632 |
633 | 0 -1 97 3.5021814983338118e-03
634 |
635 | -1.3103446364402771e-01 6.6525304317474365e-01
636 | <_>
637 |
638 | 0 -1 130 -2.2575927432626486e-03
639 |
640 | 5.6380778551101685e-01 -1.4569820463657379e-01
641 | <_>
642 |
643 | 0 -1 143 -2.1195095032453537e-03
644 |
645 | 5.6663757562637329e-01 -1.5352743864059448e-01
646 | <_>
647 |
648 | 0 -1 115 6.2073618173599243e-03
649 |
650 | -2.0195664465427399e-01 4.4291341304779053e-01
651 | <_>
652 |
653 | 0 -1 112 8.1255827099084854e-03
654 |
655 | -2.8550326824188232e-01 3.2582890987396240e-01
656 | <_>
657 |
658 | 0 -1 61 -9.7760884091258049e-03
659 |
660 | -8.4001207351684570e-01 1.3863056898117065e-01
661 | <_>
662 |
663 | 0 -1 137 1.5593828633427620e-02
664 |
665 | -1.8023425340652466e-01 6.3528090715408325e-01
666 | <_>
667 |
668 | 0 -1 127 -1.9620389211922884e-03
669 |
670 | 5.1275104284286499e-01 -1.5316767990589142e-01
671 | <_>
672 |
673 | 0 -1 120 1.1816876009106636e-02
674 |
675 | -1.1848093569278717e-01 6.8852126598358154e-01
676 | <_>
677 |
678 | 0 -1 66 1.1258029937744141e-01
679 |
680 | 1.2004764378070831e-01 -8.2287257909774780e-01
681 | <_>
682 |
683 | 0 -1 25 -1.5355203300714493e-02
684 |
685 | -7.9037851095199585e-01 7.5303018093109131e-02
686 |
687 | <_>
688 | 19
689 | -1.7195096015930176e+00
690 |
691 | <_>
692 |
693 | 0 -1 58 3.5095107741653919e-03
694 |
695 | -5.9695136547088623e-01 2.5225850939750671e-01
696 | <_>
697 |
698 | 0 -1 23 -3.6898694932460785e-02
699 |
700 | 2.5394144654273987e-01 -5.9658443927764893e-01
701 | <_>
702 |
703 | 0 -1 64 1.1670365929603577e-03
704 |
705 | -4.2040112614631653e-01 2.6795116066932678e-01
706 | <_>
707 |
708 | 0 -1 68 8.5525581380352378e-04
709 |
710 | 1.7407867312431335e-01 -7.0996165275573730e-01
711 | <_>
712 |
713 | 0 -1 68 -6.6139781847596169e-04
714 |
715 | -7.7351123094558716e-01 9.2603377997875214e-02
716 | <_>
717 |
718 | 0 -1 129 -1.1843087151646614e-02
719 |
720 | 6.3845199346542358e-01 -1.7334243655204773e-01
721 | <_>
722 |
723 | 0 -1 15 1.3143792748451233e-01
724 |
725 | 1.0689131915569305e-01 -7.2104036808013916e-01
726 | <_>
727 |
728 | 0 -1 2 1.2748451903462410e-02
729 |
730 | -2.5142535567283630e-01 3.2850253582000732e-01
731 | <_>
732 |
733 | 0 -1 19 -1.3613984920084476e-02
734 |
735 | -8.2479375600814819e-01 1.1978721618652344e-01
736 | <_>
737 |
738 | 0 -1 57 -2.3727603256702423e-03
739 |
740 | 5.3255063295364380e-01 -1.6981673240661621e-01
741 | <_>
742 |
743 | 0 -1 63 -1.0081459768116474e-03
744 |
745 | -7.3809981346130371e-01 1.1462350934743881e-01
746 | <_>
747 |
748 | 0 -1 62 1.4970975462347269e-03
749 |
750 | 8.1543825566768646e-02 -8.3706021308898926e-01
751 | <_>
752 |
753 | 0 -1 98 -3.1795450486242771e-03
754 |
755 | 3.7726828455924988e-01 -2.2478215396404266e-01
756 | <_>
757 |
758 | 0 -1 89 -2.7995246928185225e-03
759 |
760 | -7.4859011173248291e-01 1.0043221712112427e-01
761 | <_>
762 |
763 | 0 -1 147 -1.4537088572978973e-02
764 |
765 | 3.5663646459579468e-01 -2.3143070936203003e-01
766 | <_>
767 |
768 | 0 -1 113 1.4208452776074409e-02
769 |
770 | -1.2622343003749847e-01 6.4783710241317749e-01
771 | <_>
772 |
773 | 0 -1 28 6.8466551601886749e-03
774 |
775 | -3.6106663942337036e-01 2.4192215502262115e-01
776 | <_>
777 |
778 | 0 -1 90 8.3047375082969666e-03
779 |
780 | -1.9122090935707092e-01 4.2375138401985168e-01
781 | <_>
782 |
783 | 0 -1 46 4.3809041380882263e-02
784 |
785 | -1.0219569504261017e-01 7.7492046356201172e-01
786 |
787 | <_>
788 | 18
789 | -1.5711084604263306e+00
790 |
791 | <_>
792 |
793 | 0 -1 27 -3.0697839334607124e-02
794 |
795 | 4.0690979361534119e-01 -5.6072455644607544e-01
796 | <_>
797 |
798 | 0 -1 51 1.1312302201986313e-02
799 |
800 | -3.0088436603546143e-01 5.6202238798141479e-01
801 | <_>
802 |
803 | 0 -1 116 7.9871276393532753e-03
804 |
805 | -4.0993291139602661e-01 2.7285537123680115e-01
806 | <_>
807 |
808 | 0 -1 31 -4.2178584262728691e-03
809 |
810 | 3.6017870903015137e-01 -2.6955819129943848e-01
811 | <_>
812 |
813 | 0 -1 86 -1.8680499866604805e-02
814 |
815 | 4.8152899742126465e-01 -1.9560819864273071e-01
816 | <_>
817 |
818 | 0 -1 82 5.2049295045435429e-03
819 |
820 | 1.3818036019802094e-01 -6.8107104301452637e-01
821 | <_>
822 |
823 | 0 -1 150 -3.1268163584172726e-03
824 |
825 | 4.2683926224708557e-01 -1.9236524403095245e-01
826 | <_>
827 |
828 | 0 -1 10 4.3921411037445068e-02
829 |
830 | 1.0020074248313904e-01 -7.9446160793304443e-01
831 | <_>
832 |
833 | 0 -1 125 1.2329977937042713e-02
834 |
835 | -1.2785139679908752e-01 6.2576729059219360e-01
836 | <_>
837 |
838 | 0 -1 133 7.1649691089987755e-03
839 |
840 | -1.4468590915203094e-01 5.9403049945831299e-01
841 | <_>
842 |
843 | 0 -1 42 5.7392483577132225e-03
844 |
845 | -1.1010356247425079e-01 6.6883838176727295e-01
846 | <_>
847 |
848 | 0 -1 7 -1.8173437565565109e-02
849 |
850 | -7.0900553464889526e-01 1.1816041916608810e-01
851 | <_>
852 |
853 | 0 -1 34 1.9853347912430763e-02
854 |
855 | -1.1284375935792923e-01 6.6757130622863770e-01
856 | <_>
857 |
858 | 0 -1 38 -1.4014800544828176e-03
859 |
860 | 3.7029483914375305e-01 -2.0796297490596771e-01
861 | <_>
862 |
863 | 0 -1 76 -5.9838616289198399e-03
864 |
865 | -8.0346333980560303e-01 9.7232319414615631e-02
866 | <_>
867 |
868 | 0 -1 5 1.5748001169413328e-03
869 |
870 | -3.0251833796501160e-01 2.4771444499492645e-01
871 | <_>
872 |
873 | 0 -1 6 5.7789444923400879e-02
874 |
875 | 1.0192777961492538e-01 -6.9931662082672119e-01
876 | <_>
877 |
878 | 0 -1 106 2.8181229718029499e-03
879 |
880 | -1.7169345915317535e-01 4.4361612200737000e-01
881 |
882 | <_>
883 |
884 | <_>
885 | 0 0 12 2 -1.
886 | <_>
887 | 0 1 12 1 2.
888 | 0
889 | <_>
890 |
891 | <_>
892 | 0 0 42 13 -1.
893 | <_>
894 | 21 0 21 13 2.
895 | 0
896 | <_>
897 |
898 | <_>
899 | 0 0 22 2 -1.
900 | <_>
901 | 0 1 22 1 2.
902 | 0
903 | <_>
904 |
905 | <_>
906 | 0 0 48 13 -1.
907 | <_>
908 | 24 0 24 13 2.
909 | 0
910 | <_>
911 |
912 | <_>
913 | 0 1 6 12 -1.
914 | <_>
915 | 2 1 2 12 3.
916 | 0
917 | <_>
918 |
919 | <_>
920 | 0 3 2 7 -1.
921 | <_>
922 | 1 3 1 7 2.
923 | 0
924 | <_>
925 |
926 | <_>
927 | 0 3 50 8 -1.
928 | <_>
929 | 0 7 50 4 2.
930 | 0
931 | <_>
932 |
933 | <_>
934 | 0 5 18 6 -1.
935 | <_>
936 | 0 5 9 3 2.
937 | <_>
938 | 9 8 9 3 2.
939 | 0
940 | <_>
941 |
942 | <_>
943 | 0 9 40 4 -1.
944 | <_>
945 | 0 11 40 2 2.
946 | 0
947 | <_>
948 |
949 | <_>
950 | 0 10 15 3 -1.
951 | <_>
952 | 5 10 5 3 3.
953 | 0
954 | <_>
955 |
956 | <_>
957 | 1 1 9 9 -1.
958 | <_>
959 | 1 4 9 3 3.
960 | 0
961 | <_>
962 |
963 | <_>
964 | 1 2 6 7 -1.
965 | <_>
966 | 3 2 2 7 3.
967 | 0
968 | <_>
969 |
970 | <_>
971 | 1 5 34 7 -1.
972 | <_>
973 | 18 5 17 7 2.
974 | 0
975 | <_>
976 |
977 | <_>
978 | 1 9 30 4 -1.
979 | <_>
980 | 11 9 10 4 3.
981 | 0
982 | <_>
983 |
984 | <_>
985 | 1 11 50 2 -1.
986 | <_>
987 | 1 12 50 1 2.
988 | 0
989 | <_>
990 |
991 | <_>
992 | 2 2 44 9 -1.
993 | <_>
994 | 2 5 44 3 3.
995 | 0
996 | <_>
997 |
998 | <_>
999 | 3 1 4 9 -1.
1000 | <_>
1001 | 5 1 2 9 2.
1002 | 0
1003 | <_>
1004 |
1005 | <_>
1006 | 3 2 6 8 -1.
1007 | <_>
1008 | 5 2 2 8 3.
1009 | 0
1010 | <_>
1011 |
1012 | <_>
1013 | 3 3 6 7 -1.
1014 | <_>
1015 | 5 3 2 7 3.
1016 | 0
1017 | <_>
1018 |
1019 | <_>
1020 | 3 3 6 6 -1.
1021 | <_>
1022 | 3 6 6 3 2.
1023 | 0
1024 | <_>
1025 |
1026 | <_>
1027 | 3 11 28 2 -1.
1028 | <_>
1029 | 3 12 28 1 2.
1030 | 0
1031 | <_>
1032 |
1033 | <_>
1034 | 3 11 44 2 -1.
1035 | <_>
1036 | 3 12 44 1 2.
1037 | 0
1038 | <_>
1039 |
1040 | <_>
1041 | 4 0 14 13 -1.
1042 | <_>
1043 | 11 0 7 13 2.
1044 | 0
1045 | <_>
1046 |
1047 | <_>
1048 | 4 0 24 11 -1.
1049 | <_>
1050 | 12 0 8 11 3.
1051 | 0
1052 | <_>
1053 |
1054 | <_>
1055 | 4 2 34 8 -1.
1056 | <_>
1057 | 4 6 34 4 2.
1058 | 0
1059 | <_>
1060 |
1061 | <_>
1062 | 4 3 38 1 -1.
1063 | <_>
1064 | 23 3 19 1 2.
1065 | 0
1066 | <_>
1067 |
1068 | <_>
1069 | 5 0 2 1 -1.
1070 | <_>
1071 | 6 0 1 1 2.
1072 | 0
1073 | <_>
1074 |
1075 | <_>
1076 | 5 1 3 12 -1.
1077 | <_>
1078 | 6 1 1 12 3.
1079 | 0
1080 | <_>
1081 |
1082 | <_>
1083 | 5 1 14 9 -1.
1084 | <_>
1085 | 5 4 14 3 3.
1086 | 0
1087 | <_>
1088 |
1089 | <_>
1090 | 5 3 3 6 -1.
1091 | <_>
1092 | 6 3 1 6 3.
1093 | 0
1094 | <_>
1095 |
1096 | <_>
1097 | 5 3 6 6 -1.
1098 | <_>
1099 | 7 3 2 6 3.
1100 | 0
1101 | <_>
1102 |
1103 | <_>
1104 | 5 11 8 2 -1.
1105 | <_>
1106 | 5 12 8 1 2.
1107 | 0
1108 | <_>
1109 |
1110 | <_>
1111 | 6 4 3 7 -1.
1112 | <_>
1113 | 7 4 1 7 3.
1114 | 0
1115 | <_>
1116 |
1117 | <_>
1118 | 7 2 3 10 -1.
1119 | <_>
1120 | 8 2 1 10 3.
1121 | 0
1122 | <_>
1123 |
1124 | <_>
1125 | 7 3 2 8 -1.
1126 | <_>
1127 | 8 3 1 8 2.
1128 | 0
1129 | <_>
1130 |
1131 | <_>
1132 | 7 4 3 7 -1.
1133 | <_>
1134 | 8 4 1 7 3.
1135 | 0
1136 | <_>
1137 |
1138 | <_>
1139 | 7 5 3 5 -1.
1140 | <_>
1141 | 8 5 1 5 3.
1142 | 0
1143 | <_>
1144 |
1145 | <_>
1146 | 7 6 2 4 -1.
1147 | <_>
1148 | 8 6 1 4 2.
1149 | 0
1150 | <_>
1151 |
1152 | <_>
1153 | 7 7 3 1 -1.
1154 | <_>
1155 | 8 7 1 1 3.
1156 | 0
1157 | <_>
1158 |
1159 | <_>
1160 | 7 9 4 4 -1.
1161 | <_>
1162 | 7 9 2 2 2.
1163 | <_>
1164 | 9 11 2 2 2.
1165 | 0
1166 | <_>
1167 |
1168 | <_>
1169 | 8 0 21 13 -1.
1170 | <_>
1171 | 15 0 7 13 3.
1172 | 0
1173 | <_>
1174 |
1175 | <_>
1176 | 8 0 11 6 -1.
1177 | <_>
1178 | 8 3 11 3 2.
1179 | 0
1180 | <_>
1181 |
1182 | <_>
1183 | 8 1 6 2 -1.
1184 | <_>
1185 | 8 1 3 1 2.
1186 | <_>
1187 | 11 2 3 1 2.
1188 | 0
1189 | <_>
1190 |
1191 | <_>
1192 | 8 1 12 5 -1.
1193 | <_>
1194 | 12 1 4 5 3.
1195 | 0
1196 | <_>
1197 |
1198 | <_>
1199 | 8 6 3 3 -1.
1200 | <_>
1201 | 9 6 1 3 3.
1202 | 0
1203 | <_>
1204 |
1205 | <_>
1206 | 8 6 3 4 -1.
1207 | <_>
1208 | 9 6 1 4 3.
1209 | 0
1210 | <_>
1211 |
1212 | <_>
1213 | 9 0 8 7 -1.
1214 | <_>
1215 | 13 0 4 7 2.
1216 | 0
1217 | <_>
1218 |
1219 | <_>
1220 | 9 0 8 12 -1.
1221 | <_>
1222 | 13 0 4 12 2.
1223 | 0
1224 | <_>
1225 |
1226 | <_>
1227 | 9 0 8 8 -1.
1228 | <_>
1229 | 9 4 8 4 2.
1230 | 0
1231 | <_>
1232 |
1233 | <_>
1234 | 9 1 12 10 -1.
1235 | <_>
1236 | 13 1 4 10 3.
1237 | 0
1238 | <_>
1239 |
1240 | <_>
1241 | 9 1 12 11 -1.
1242 | <_>
1243 | 13 1 4 11 3.
1244 | 0
1245 | <_>
1246 |
1247 | <_>
1248 | 9 4 4 6 -1.
1249 | <_>
1250 | 9 4 2 3 2.
1251 | <_>
1252 | 11 7 2 3 2.
1253 | 0
1254 | <_>
1255 |
1256 | <_>
1257 | 9 5 4 4 -1.
1258 | <_>
1259 | 9 5 2 2 2.
1260 | <_>
1261 | 11 7 2 2 2.
1262 | 0
1263 | <_>
1264 |
1265 | <_>
1266 | 10 0 5 8 -1.
1267 | <_>
1268 | 10 4 5 4 2.
1269 | 0
1270 | <_>
1271 |
1272 | <_>
1273 | 10 1 9 10 -1.
1274 | <_>
1275 | 13 1 3 10 3.
1276 | 0
1277 | <_>
1278 |
1279 | <_>
1280 | 10 3 41 8 -1.
1281 | <_>
1282 | 10 7 41 4 2.
1283 | 0
1284 | <_>
1285 |
1286 | <_>
1287 | 10 6 6 5 -1.
1288 | <_>
1289 | 13 6 3 5 2.
1290 | 0
1291 | <_>
1292 |
1293 | <_>
1294 | 10 7 3 1 -1.
1295 | <_>
1296 | 11 7 1 1 3.
1297 | 0
1298 | <_>
1299 |
1300 | <_>
1301 | 10 7 2 2 -1.
1302 | <_>
1303 | 11 7 1 2 2.
1304 | 0
1305 | <_>
1306 |
1307 | <_>
1308 | 10 9 1 3 -1.
1309 | <_>
1310 | 10 10 1 1 3.
1311 | 0
1312 | <_>
1313 |
1314 | <_>
1315 | 10 11 6 2 -1.
1316 | <_>
1317 | 10 12 6 1 2.
1318 | 0
1319 | <_>
1320 |
1321 | <_>
1322 | 13 1 4 6 -1.
1323 | <_>
1324 | 13 3 4 2 3.
1325 | 0
1326 | <_>
1327 |
1328 | <_>
1329 | 14 0 3 1 -1.
1330 | <_>
1331 | 15 0 1 1 3.
1332 | 0
1333 | <_>
1334 |
1335 | <_>
1336 | 14 0 2 2 -1.
1337 | <_>
1338 | 15 0 1 2 2.
1339 | 0
1340 | <_>
1341 |
1342 | <_>
1343 | 14 2 2 9 -1.
1344 | <_>
1345 | 14 5 2 3 3.
1346 | 0
1347 | <_>
1348 |
1349 | <_>
1350 | 14 3 22 3 -1.
1351 | <_>
1352 | 14 4 22 1 3.
1353 | 0
1354 | <_>
1355 |
1356 | <_>
1357 | 15 0 36 9 -1.
1358 | <_>
1359 | 27 0 12 9 3.
1360 | 0
1361 | <_>
1362 |
1363 | <_>
1364 | 15 1 1 12 -1.
1365 | <_>
1366 | 15 7 1 6 2.
1367 | 0
1368 | <_>
1369 |
1370 | <_>
1371 | 15 12 2 1 -1.
1372 | <_>
1373 | 16 12 1 1 2.
1374 | 0
1375 | <_>
1376 |
1377 | <_>
1378 | 16 0 12 1 -1.
1379 | <_>
1380 | 20 0 4 1 3.
1381 | 0
1382 | <_>
1383 |
1384 | <_>
1385 | 16 9 18 4 -1.
1386 | <_>
1387 | 22 9 6 4 3.
1388 | 0
1389 | <_>
1390 |
1391 | <_>
1392 | 17 0 25 2 -1.
1393 | <_>
1394 | 17 1 25 1 2.
1395 | 0
1396 | <_>
1397 |
1398 | <_>
1399 | 17 4 6 1 -1.
1400 | <_>
1401 | 19 4 2 1 3.
1402 | 0
1403 | <_>
1404 |
1405 | <_>
1406 | 18 0 4 1 -1.
1407 | <_>
1408 | 20 0 2 1 2.
1409 | 0
1410 | <_>
1411 |
1412 | <_>
1413 | 18 1 16 2 -1.
1414 | <_>
1415 | 18 2 16 1 2.
1416 | 0
1417 | <_>
1418 |
1419 | <_>
1420 | 18 7 3 3 -1.
1421 | <_>
1422 | 19 7 1 3 3.
1423 | 0
1424 | <_>
1425 |
1426 | <_>
1427 | 19 0 12 1 -1.
1428 | <_>
1429 | 23 0 4 1 3.
1430 | 0
1431 | <_>
1432 |
1433 | <_>
1434 | 19 0 10 4 -1.
1435 | <_>
1436 | 19 2 10 2 2.
1437 | 0
1438 | <_>
1439 |
1440 | <_>
1441 | 20 1 6 10 -1.
1442 | <_>
1443 | 22 1 2 10 3.
1444 | 0
1445 | <_>
1446 |
1447 | <_>
1448 | 20 1 6 11 -1.
1449 | <_>
1450 | 22 1 2 11 3.
1451 | 0
1452 | <_>
1453 |
1454 | <_>
1455 | 20 1 6 12 -1.
1456 | <_>
1457 | 22 1 2 12 3.
1458 | 0
1459 | <_>
1460 |
1461 | <_>
1462 | 20 1 28 9 -1.
1463 | <_>
1464 | 20 4 28 3 3.
1465 | 0
1466 | <_>
1467 |
1468 | <_>
1469 | 21 0 9 1 -1.
1470 | <_>
1471 | 24 0 3 1 3.
1472 | 0
1473 | <_>
1474 |
1475 | <_>
1476 | 21 10 18 3 -1.
1477 | <_>
1478 | 27 10 6 3 3.
1479 | 0
1480 | <_>
1481 |
1482 | <_>
1483 | 22 0 15 1 -1.
1484 | <_>
1485 | 27 0 5 1 3.
1486 | 0
1487 | <_>
1488 |
1489 | <_>
1490 | 22 1 3 11 -1.
1491 | <_>
1492 | 23 1 1 11 3.
1493 | 0
1494 | <_>
1495 |
1496 | <_>
1497 | 22 2 3 9 -1.
1498 | <_>
1499 | 23 2 1 9 3.
1500 | 0
1501 | <_>
1502 |
1503 | <_>
1504 | 22 9 6 1 -1.
1505 | <_>
1506 | 24 9 2 1 3.
1507 | 0
1508 | <_>
1509 |
1510 | <_>
1511 | 23 0 3 1 -1.
1512 | <_>
1513 | 24 0 1 1 3.
1514 | 0
1515 | <_>
1516 |
1517 | <_>
1518 | 23 0 6 1 -1.
1519 | <_>
1520 | 25 0 2 1 3.
1521 | 0
1522 | <_>
1523 |
1524 | <_>
1525 | 23 6 9 1 -1.
1526 | <_>
1527 | 26 6 3 1 3.
1528 | 0
1529 | <_>
1530 |
1531 | <_>
1532 | 24 0 4 6 -1.
1533 | <_>
1534 | 24 0 2 3 2.
1535 | <_>
1536 | 26 3 2 3 2.
1537 | 0
1538 | <_>
1539 |
1540 | <_>
1541 | 24 3 4 2 -1.
1542 | <_>
1543 | 26 3 2 2 2.
1544 | 0
1545 | <_>
1546 |
1547 | <_>
1548 | 24 4 3 5 -1.
1549 | <_>
1550 | 25 4 1 5 3.
1551 | 0
1552 | <_>
1553 |
1554 | <_>
1555 | 24 7 6 3 -1.
1556 | <_>
1557 | 26 7 2 3 3.
1558 | 0
1559 | <_>
1560 |
1561 | <_>
1562 | 25 0 11 3 -1.
1563 | <_>
1564 | 25 1 11 1 3.
1565 | 0
1566 | <_>
1567 |
1568 | <_>
1569 | 25 1 1 4 -1.
1570 | <_>
1571 | 25 3 1 2 2.
1572 | 0
1573 | <_>
1574 |
1575 | <_>
1576 | 25 1 2 4 -1.
1577 | <_>
1578 | 25 1 1 2 2.
1579 | <_>
1580 | 26 3 1 2 2.
1581 | 0
1582 | <_>
1583 |
1584 | <_>
1585 | 25 8 3 2 -1.
1586 | <_>
1587 | 26 8 1 2 3.
1588 | 0
1589 | <_>
1590 |
1591 | <_>
1592 | 26 0 18 3 -1.
1593 | <_>
1594 | 26 1 18 1 3.
1595 | 0
1596 | <_>
1597 |
1598 | <_>
1599 | 26 1 4 4 -1.
1600 | <_>
1601 | 26 1 2 2 2.
1602 | <_>
1603 | 28 3 2 2 2.
1604 | 0
1605 | <_>
1606 |
1607 | <_>
1608 | 26 3 6 2 -1.
1609 | <_>
1610 | 28 3 2 2 3.
1611 | 0
1612 | <_>
1613 |
1614 | <_>
1615 | 26 3 16 9 -1.
1616 | <_>
1617 | 34 3 8 9 2.
1618 | 0
1619 | <_>
1620 |
1621 | <_>
1622 | 26 5 4 4 -1.
1623 | <_>
1624 | 26 5 2 2 2.
1625 | <_>
1626 | 28 7 2 2 2.
1627 | 0
1628 | <_>
1629 |
1630 | <_>
1631 | 27 1 6 11 -1.
1632 | <_>
1633 | 29 1 2 11 3.
1634 | 0
1635 | <_>
1636 |
1637 | <_>
1638 | 27 3 2 6 -1.
1639 | <_>
1640 | 27 3 1 3 2.
1641 | <_>
1642 | 28 6 1 3 2.
1643 | 0
1644 | <_>
1645 |
1646 | <_>
1647 | 27 4 1 4 -1.
1648 | <_>
1649 | 27 6 1 2 2.
1650 | 0
1651 | <_>
1652 |
1653 | <_>
1654 | 27 5 6 7 -1.
1655 | <_>
1656 | 29 5 2 7 3.
1657 | 0
1658 | <_>
1659 |
1660 | <_>
1661 | 27 6 6 6 -1.
1662 | <_>
1663 | 29 6 2 6 3.
1664 | 0
1665 | <_>
1666 |
1667 | <_>
1668 | 27 7 2 2 -1.
1669 | <_>
1670 | 28 7 1 2 2.
1671 | 0
1672 | <_>
1673 |
1674 | <_>
1675 | 27 7 3 2 -1.
1676 | <_>
1677 | 28 7 1 2 3.
1678 | 0
1679 | <_>
1680 |
1681 | <_>
1682 | 27 8 2 2 -1.
1683 | <_>
1684 | 28 8 1 2 2.
1685 | 0
1686 | <_>
1687 |
1688 | <_>
1689 | 29 0 13 2 -1.
1690 | <_>
1691 | 29 1 13 1 2.
1692 | 0
1693 | <_>
1694 |
1695 | <_>
1696 | 29 2 3 6 -1.
1697 | <_>
1698 | 30 2 1 6 3.
1699 | 0
1700 | <_>
1701 |
1702 | <_>
1703 | 29 2 3 10 -1.
1704 | <_>
1705 | 30 2 1 10 3.
1706 | 0
1707 | <_>
1708 |
1709 | <_>
1710 | 29 6 9 1 -1.
1711 | <_>
1712 | 32 6 3 1 3.
1713 | 0
1714 | <_>
1715 |
1716 | <_>
1717 | 29 9 9 4 -1.
1718 | <_>
1719 | 32 9 3 4 3.
1720 | 0
1721 | <_>
1722 |
1723 | <_>
1724 | 30 1 4 4 -1.
1725 | <_>
1726 | 30 1 2 2 2.
1727 | <_>
1728 | 32 3 2 2 2.
1729 | 0
1730 | <_>
1731 |
1732 | <_>
1733 | 31 0 12 2 -1.
1734 | <_>
1735 | 31 1 12 1 2.
1736 | 0
1737 | <_>
1738 |
1739 | <_>
1740 | 31 0 13 2 -1.
1741 | <_>
1742 | 31 1 13 1 2.
1743 | 0
1744 | <_>
1745 |
1746 | <_>
1747 | 31 3 4 6 -1.
1748 | <_>
1749 | 31 3 2 3 2.
1750 | <_>
1751 | 33 6 2 3 2.
1752 | 0
1753 | <_>
1754 |
1755 | <_>
1756 | 31 4 6 1 -1.
1757 | <_>
1758 | 33 4 2 1 3.
1759 | 0
1760 | <_>
1761 |
1762 | <_>
1763 | 31 8 2 4 -1.
1764 | <_>
1765 | 31 8 1 2 2.
1766 | <_>
1767 | 32 10 1 2 2.
1768 | 0
1769 | <_>
1770 |
1771 | <_>
1772 | 32 0 18 3 -1.
1773 | <_>
1774 | 32 1 18 1 3.
1775 | 0
1776 | <_>
1777 |
1778 | <_>
1779 | 32 0 19 3 -1.
1780 | <_>
1781 | 32 1 19 1 3.
1782 | 0
1783 | <_>
1784 |
1785 | <_>
1786 | 32 1 6 4 -1.
1787 | <_>
1788 | 32 1 3 2 2.
1789 | <_>
1790 | 35 3 3 2 2.
1791 | 0
1792 | <_>
1793 |
1794 | <_>
1795 | 32 1 8 2 -1.
1796 | <_>
1797 | 32 1 4 1 2.
1798 | <_>
1799 | 36 2 4 1 2.
1800 | 0
1801 | <_>
1802 |
1803 | <_>
1804 | 32 9 3 1 -1.
1805 | <_>
1806 | 33 9 1 1 3.
1807 | 0
1808 | <_>
1809 |
1810 | <_>
1811 | 33 3 6 4 -1.
1812 | <_>
1813 | 35 3 2 4 3.
1814 | 0
1815 | <_>
1816 |
1817 | <_>
1818 | 33 7 4 6 -1.
1819 | <_>
1820 | 33 7 2 3 2.
1821 | <_>
1822 | 35 10 2 3 2.
1823 | 0
1824 | <_>
1825 |
1826 | <_>
1827 | 33 9 3 1 -1.
1828 | <_>
1829 | 34 9 1 1 3.
1830 | 0
1831 | <_>
1832 |
1833 | <_>
1834 | 34 1 3 9 -1.
1835 | <_>
1836 | 35 1 1 9 3.
1837 | 0
1838 | <_>
1839 |
1840 | <_>
1841 | 34 1 4 11 -1.
1842 | <_>
1843 | 36 1 2 11 2.
1844 | 0
1845 | <_>
1846 |
1847 | <_>
1848 | 34 3 2 8 -1.
1849 | <_>
1850 | 34 3 1 4 2.
1851 | <_>
1852 | 35 7 1 4 2.
1853 | 0
1854 | <_>
1855 |
1856 | <_>
1857 | 34 4 3 4 -1.
1858 | <_>
1859 | 35 4 1 4 3.
1860 | 0
1861 | <_>
1862 |
1863 | <_>
1864 | 35 1 3 11 -1.
1865 | <_>
1866 | 36 1 1 11 3.
1867 | 0
1868 | <_>
1869 |
1870 | <_>
1871 | 35 2 6 7 -1.
1872 | <_>
1873 | 37 2 2 7 3.
1874 | 0
1875 | <_>
1876 |
1877 | <_>
1878 | 36 4 3 6 -1.
1879 | <_>
1880 | 37 4 1 6 3.
1881 | 0
1882 | <_>
1883 |
1884 | <_>
1885 | 36 4 3 7 -1.
1886 | <_>
1887 | 37 4 1 7 3.
1888 | 0
1889 | <_>
1890 |
1891 | <_>
1892 | 36 9 6 2 -1.
1893 | <_>
1894 | 38 9 2 2 3.
1895 | 0
1896 | <_>
1897 |
1898 | <_>
1899 | 38 1 12 2 -1.
1900 | <_>
1901 | 38 2 12 1 2.
1902 | 0
1903 | <_>
1904 |
1905 | <_>
1906 | 38 8 6 2 -1.
1907 | <_>
1908 | 40 8 2 2 3.
1909 | 0
1910 | <_>
1911 |
1912 | <_>
1913 | 38 10 11 3 -1.
1914 | <_>
1915 | 38 11 11 1 3.
1916 | 0
1917 | <_>
1918 |
1919 | <_>
1920 | 40 1 1 3 -1.
1921 | <_>
1922 | 40 2 1 1 3.
1923 | 0
1924 | <_>
1925 |
1926 | <_>
1927 | 41 0 6 12 -1.
1928 | <_>
1929 | 43 0 2 12 3.
1930 | 0
1931 | <_>
1932 |
1933 | <_>
1934 | 41 2 2 8 -1.
1935 | <_>
1936 | 41 2 1 4 2.
1937 | <_>
1938 | 42 6 1 4 2.
1939 | 0
1940 | <_>
1941 |
1942 | <_>
1943 | 41 4 6 2 -1.
1944 | <_>
1945 | 43 4 2 2 3.
1946 | 0
1947 | <_>
1948 |
1949 | <_>
1950 | 41 4 6 4 -1.
1951 | <_>
1952 | 43 4 2 4 3.
1953 | 0
1954 | <_>
1955 |
1956 | <_>
1957 | 43 2 3 10 -1.
1958 | <_>
1959 | 44 2 1 10 3.
1960 | 0
1961 | <_>
1962 |
1963 | <_>
1964 | 44 9 4 4 -1.
1965 | <_>
1966 | 44 9 2 2 2.
1967 | <_>
1968 | 46 11 2 2 2.
1969 | 0
1970 | <_>
1971 |
1972 | <_>
1973 | 47 0 1 6 -1.
1974 | <_>
1975 | 47 2 1 2 3.
1976 | 0
1977 |
1978 |
--------------------------------------------------------------------------------
/model/char_judgement.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/model/char_judgement.h5
--------------------------------------------------------------------------------
/model/char_rec.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/model/char_rec.h5
--------------------------------------------------------------------------------
/res/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/res/1.png
--------------------------------------------------------------------------------
/res/10.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/res/10.png
--------------------------------------------------------------------------------
/res/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/res/2.png
--------------------------------------------------------------------------------
/res/3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/res/3.png
--------------------------------------------------------------------------------
/res/4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/res/4.png
--------------------------------------------------------------------------------
/res/5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/res/5.png
--------------------------------------------------------------------------------
/res/6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/res/6.png
--------------------------------------------------------------------------------
/res/7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/res/7.png
--------------------------------------------------------------------------------
/res/8.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/res/8.png
--------------------------------------------------------------------------------
/res/9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/szad670401/HyperLPR_python/0b17acd2165aa6815c78525dbcdba2d7de80a3eb/res/9.png
--------------------------------------------------------------------------------
/simple_.py:
--------------------------------------------------------------------------------
1 | from hyperlpr import pipline
2 |
3 |
4 | import cv2
5 |
6 | # image1 = cv2.imread("./dataset/0.jpg")
7 | # image2 = cv2.imread("./dataset/1.jpg")
8 | # image3 = cv2.imread("./dataset/5.jpg")
9 | # image4 = cv2.imread("./dataset/6.jpg")
10 |
11 | image5 = cv2.imread("./dataset/3144391.png")
12 | #
13 | # pipline.SimpleRecognizePlate(image4)
14 | # pipline.SimpleRecognizePlate(image3)
15 | pipline.SimpleRecognizePlate(image5)
--------------------------------------------------------------------------------