├── requirements.txt
├── README.md
├── utils
├── augment.py
├── show_single.py
├── yizhi.py
├── show_pe.py
├── f1.py
├── search.py
├── dataset.py
└── describe.ipynb
├── our_wbf.py
└── main.py
/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | opencv-python
3 | pandas
4 | seaborn
5 | scipy
6 | torch
7 | ultralytics
8 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 数据科学队 - 算法挑战赛 - 道路病害目标检测赛道
2 |
3 | 我们是苏州大学未来科学与工程学院的数据科学队,在第五届全球校园人工智能算法精英大赛的算法挑战赛道中参赛。我们在道路病害目标检测赛道中取得了以下成绩:
4 |
5 | - A榜:0.601(全国第五)
6 | - B榜:0.415(全国第六)
7 | - 区域赛:第Ⅰ赛区本科生组第一名
8 | - 全国总决赛:本科生组第二名
9 |
10 | ## 主要方法
11 |
12 | 我们采用了以下主要方法来提升算法的性能:
13 |
14 | - 数据增广:通过对数据集进行描述性统计,我们发现部分道路病害实例数量较少。为了均衡实例分布,我们使用了数据增广方法。
15 | - 数据增强:经过对比试验,我们发现一种基于直方图均衡化的去雾算法对模型的性能提升效果最好,因此我们选择了这种数据增强方法。
16 | - 实验分析:我们进行了详细的实验分析,以优化算法的各个环节。
17 | - 基于WBF的集成预测:我们采用了基于WBF(Weighted Boxes Fusion)的集成预测方法,通过集成多个异构的基模型,进一步提升了算法的性能。
18 | - 我们采用了43个基于yolov8s和rtdetr-l训练出来的优秀模型进行集成预测
19 |
20 | ## 特殊说明
21 |
22 | 在我们的工作中,我们没有修改模型主干,仅使用了官方提供的2000张图片作为训练集,并且在训练模型时未调整任何参数。
23 | 使用到的43个pt文件由于太大不再给出,基本每一个pt的f1得分都在0.4左右。
24 | 如要复现,请自行根据代码逻辑修改代码,将装有若干pt文件的文件夹置于和main.py相同文件夹下。
25 |
26 | ## 代码说明
27 |
28 | 以下是我们的源代码文件及其功能说明:
29 |
30 | - `utils/describe.ipynb`:用于对数据集进行统计分析。
31 | - `utils/augment.py`:用于数据增广。
32 | - `utils/dataset.py`:用于处理数据集。
33 | - `utils/f1.py`:用于根据两个txt文件夹计算F1得分。
34 | - `utils/search.py`:用于搜寻最佳conf和iou。
35 | - `utils/show_pe.py`:用于展示集成预测的效果图片。
36 | - `utils/show_single.py`:用于展示单个模型的预测效果图片。
37 | - `utils/yizhi.py`:用于根据置信度和iou指定不同类别之间的预测优先级。
38 | - `main.py`:包括了基于直方图均衡化的去雾算法、推理和集成预测三个部分。
39 | - `our_wbf.py`:使得WBF算法可以对不同模型的不同类别进行赋权,赋予的权重是一个矩阵。
40 |
41 | 请注意,以上文件是我们在比赛中使用的源代码文件。如有需要,您可以根据文件名和功能说明来查看和使用相应的代码文件。
42 |
43 |
44 |
--------------------------------------------------------------------------------
/utils/augment.py:
--------------------------------------------------------------------------------
1 | import os
2 | from PIL import Image
3 |
4 | source_folder_image = ''
5 | source_folder_labels = ''
6 | files = os.listdir(source_folder_labels)
7 | names = list(set(map(lambda x: x.split('.')[0], files)))
8 |
9 | a = ''
10 | b = ''
11 | os.makedirs(a, exist_ok=True)
12 | os.makedirs(b, exist_ok=True)
13 | count = 0
14 |
15 | # 对部分类别进行数据增广
16 | for name in names:
17 | with open(source_folder_labels + '/' + name + '.txt', 'r') as label_file:
18 | labels = label_file.readlines()
19 | for label in labels:
20 | category = int(label.split()[0])
21 | image_path = source_folder_image + '/' + name + '.jpg'
22 | image = Image.open(image_path)
23 | if category in [0, 2, 3, 4, 5, 7]:
24 | count += 1
25 | # 进行左右翻转
26 | flipped_image = image.transpose(Image.FLIP_LEFT_RIGHT)
27 | flipped_image.save(a + '/' + name + '_fz.jpg')
28 | # 更新标签文件的文件名和x_center
29 | flipped_labels = []
30 | for label in labels:
31 | parts = label.split()
32 | category = int(parts[0])
33 | x_center = float(parts[1])
34 | if category in [0, 1, 2, 3, 4, 5, 6, 7]:
35 | x_center_flipped = 1 - x_center
36 | parts[1] = str(x_center_flipped)
37 | flipped_labels.append(' '.join(parts) + '\n')
38 | else:
39 | flipped_labels.append(label)
40 | flipped_labels_path = b + '/' + name + '_fz.txt'
41 | with open(flipped_labels_path, 'w') as flipped_labels_file:
42 | flipped_labels_file.writelines(flipped_labels)
43 | break
44 | print(count)
--------------------------------------------------------------------------------
/utils/show_single.py:
--------------------------------------------------------------------------------
1 | import os
2 | import cv2
3 | # 展示推理结果
4 | images_folder = '../dataseta/jpgs/'
5 | labels_folder = '../dataseta/43/t4pe0/'
6 | # images_folder = 'dataset1/images/'
7 |
8 | # labels_folder = 'dataset1/labels/'
9 | image_files = sorted(os.listdir(images_folder))
10 | label_files = sorted(os.listdir(labels_folder))
11 |
12 |
13 | for image_file, label_file in zip(image_files, label_files):
14 | image_path = os.path.join(images_folder, image_file)
15 | ww, hh = 1000, 640
16 | image = cv2.imread(image_path)
17 | image = cv2.resize(image, (ww,hh))
18 | try :
19 | with open(labels_folder+image_file.split('.')[0]+'.txt', 'r') as f:
20 | lines = f.readlines()
21 | for line in lines:
22 | parts = line.strip().split()
23 | class_id = int(parts[0])
24 | x_center = float(parts[1])
25 | y_center = float(parts[2])
26 | width = float(parts[3])
27 | height = float(parts[4])
28 | x = int((x_center - width / 2) * 1000)
29 | y = int((y_center - height / 2) * 640)
30 | w = int(width * 1000)
31 | h = int(height * 640)
32 | # cls=['横向裂缝','纵向裂缝','块状裂缝','龟裂','坑槽','修补网状裂缝','修补裂缝','修补坑槽']
33 | colorlst=[(255,0,0),(0,255,0),(0,0,255),(0,255,255),(255,0,255),(255,255,0),(255,255,255),(0,0,0)]
34 | # colorlst=[(255,0,0),(0,255,0),(0,0,255),(0,255,255),(255,0,255),(255,255,0),(0,255,255),(0,0,0)]
35 | cv2.rectangle(image, (x, y), (x + w, y + h), colorlst[class_id], 4)
36 | cv2.putText(image, str(class_id), (x, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 255, 0), 2)
37 | # 显示置信度
38 | #cv2.putText(image, "{:.2f}".format(float(parts[5])), (x+50, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 255, 255), 2)
39 | except FileNotFoundError:
40 | print('FileNotFoundError:',labels_folder+image_file.split('.')[0]+'.txt')
41 | pass
42 | cv2.namedWindow(f'{image_file}', cv2.WINDOW_NORMAL)
43 | cv2.resizeWindow(f'{image_file}', 1530, 830)
44 | cv2.moveWindow(f'{image_file}', 0, 0)
45 | cv2.imshow(f'{image_file}', image)
46 | cv2.waitKey(0)
47 | cv2.destroyAllWindows()
48 |
--------------------------------------------------------------------------------
/utils/yizhi.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | # 根据逻辑来排除一些类别的框
4 | def area(lst1,lst2):
5 | x1, y1, w1, h1 = lst1
6 | x2, y2, w2, h2 = lst2
7 | x1_min = x1 - w1 / 2
8 | y1_min = y1 - h1 / 2
9 | x1_max = x1 + w1 / 2
10 | y1_max = y1 + h1 / 2
11 | x2_min = x2 - w2 / 2
12 | y2_min = y2 - h2 / 2
13 | x2_max = x2 + w2 / 2
14 | y2_max = y2 + h2 / 2
15 | x_min = max(x1_min, x2_min)
16 | y_min = max(y1_min, y2_min)
17 | x_max = min(x1_max, x2_max)
18 | y_max = min(y1_max, y2_max)
19 | overlap_w = max(0, x_max - x_min)
20 | overlap_h = max(0, y_max - y_min)
21 | overlap_area = overlap_w * overlap_h
22 | return overlap_area
23 | src='dataseta/6/t6pe0/'
24 | save='dataseta/6/result/'
25 | os.makedirs(save, exist_ok=True)
26 | label_files = os.listdir(src)
27 | area_thres=0.8
28 |
29 | for filename in label_files:
30 | with open(src+filename, 'r') as f:
31 | lines = f.readlines()
32 | lst=[] # 所有行
33 | for line in lines:
34 | alst=list(map(float,line.strip().split(' ')))
35 | alst[0]=int(alst[0])
36 | lst.append(alst)
37 | index=[] # 块状的索引
38 | for i in range(len(lst)):
39 | if lst[i][0]==2:
40 | index.append(i)
41 | yizhi_lst=[] # 会被抑制的索引
42 | try:
43 | if index: # 如果存在块状
44 | max_i,max_area=0,0
45 | for i in index: # 找到最大的块状
46 | if lst[i][3] * lst[i][4] > max_area:
47 | max_i=i
48 | max_area=lst[i][3] * lst[i][4]
49 | b=True
50 | for i in range(len(lst)):
51 | if lst[i][0] in [0, 1]:
52 | if lst[i][5]>lst[max_i][5] and area(lst[i][1:5], lst[max_i][1:5]) / (lst[i][3] * lst[i][4]) > area_thres:
53 | b=False
54 | if b:
55 | for i in range(len(lst)):
56 | if lst[i][0] in [0, 1]:
57 | if area(lst[i][1:5], lst[max_i][1:5]) / (lst[i][3] * lst[i][4]) > area_thres:
58 | yizhi_lst.append(i)
59 | except:
60 | pass
61 | with open(save + filename, 'w') as file:
62 | for i in range(len(lst)):
63 | if i not in yizhi_lst:
64 | file.write(" ".join([str(num) for num in lst[i][:5]]) + "\n")
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/utils/show_pe.py:
--------------------------------------------------------------------------------
1 | import os
2 | import cv2
3 | import numpy as np
4 | # 同时展示集成结果和若干单个模型的结果
5 | # 模型太多看不清
6 | # 按任意键切换下一张
7 |
8 | images_folder = 'dataseta/jpgs'
9 | labels_folder = 'dataseta/6/t6pe0/labels'
10 | n=35 # 子模型数
11 | image_files = sorted(os.listdir(images_folder))
12 | label_files = sorted(os.listdir(labels_folder))
13 | labels_folders=[f'dataseta/6/t6pe{i}/labels' for i in range(n)]
14 | for image_file, label_file in zip(image_files, label_files):
15 | image_path = os.path.join(images_folder, image_file)
16 | images = []
17 | ww, hh = 1000, 640
18 | for i in range(n):
19 | image = cv2.imread(image_path)
20 | image = cv2.resize(image, (ww,hh))
21 | label_path = os.path.join(labels_folders[i], label_file)
22 | try :
23 | with open(label_path, 'r') as f:
24 | lines = f.readlines()
25 | for line in lines:
26 | parts = line.strip().split()
27 | class_id = int(parts[0])
28 | x_center = float(parts[1])
29 | y_center = float(parts[2])
30 | width = float(parts[3])
31 | height = float(parts[4])
32 | x = int((x_center - width / 2) * 1000)
33 | y = int((y_center - height / 2) * 640)
34 | w = int(width * 1000)
35 | h = int(height * 640)
36 | colorlst=[(255,0,0),(0,255,0),(0,0,255),(0,255,255),(255,0,255),(255,255,0),(255,255,255),(0,0,0)]
37 | cv2.rectangle(image, (x, y), (x + w, y + h), colorlst[class_id], 2)
38 | cv2.putText(image, str(class_id), (x, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 255, 0), 2)
39 | except FileNotFoundError:
40 | pass
41 | images.append(image)
42 | canvas = np.zeros((6 * hh, 6 * ww, 3), dtype=np.uint8)
43 | for i in range(n):
44 | row = i // 6
45 | col = i % 6
46 | start_x = col * ww
47 | start_y = row * hh
48 | canvas[start_y:start_y + hh, start_x:start_x + ww, :] = images[i]
49 | cv2.namedWindow(f'{image_file}', cv2.WINDOW_NORMAL)
50 | cv2.resizeWindow(f'{image_file}', 1530, 830)
51 | cv2.moveWindow(f'{image_file}', 0, 0)
52 | cv2.imshow(f'{image_file}', canvas)
53 | cv2.waitKey(0)
54 | cv2.destroyAllWindows()
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/utils/f1.py:
--------------------------------------------------------------------------------
1 | # 计算f1
2 | import numpy as np
3 | import os
4 | def calculate_iou(box1, box2):
5 | area_box1 = (box1[2] - box1[0]) * (box1[3] - box1[1])
6 | area_box2 = (box2[2] - box2[0]) * (box2[3] - box2[1])
7 | x_intersection = max(0, min(box1[2], box2[2]) - max(box1[0], box2[0]))
8 | y_intersection = max(0, min(box1[3], box2[3]) - max(box1[1], box2[1]))
9 | intersection = x_intersection * y_intersection
10 | union = area_box1 + area_box2 - intersection
11 | iou = intersection / union
12 | return iou
13 | def parse_boxes(txt_file):
14 | if os.path.exists(txt_file):
15 | with open(txt_file, 'r') as file:
16 | lines = file.readlines()
17 | else:
18 | lines=[]
19 | boxes = []
20 | for line in lines:
21 | data = line.strip().split(' ')
22 | label = int(data[0])
23 | x_center, y_center, width, height = map(float, data[1:])
24 | x1 = x_center - width / 2
25 | y1 = y_center - height / 2
26 | x2 = x_center + width / 2
27 | y2 = y_center + height / 2
28 | boxes.append((label, x1, y1, x2, y2))
29 | return boxes
30 | def calculate_cm(predict_folder, label_folder, nc, iou_thres=0.5):
31 | cm = np.zeros((nc + 1, nc + 1), dtype=int)
32 | for filename in os.listdir(label_folder):
33 | predict_file = os.path.join(predict_folder, filename)
34 | label_file = os.path.join(label_folder, filename)
35 | predictions = parse_boxes(predict_file)
36 | labels = parse_boxes(label_file)
37 | matched_preds = set()
38 | matched_labels = set()
39 | for i, pred in enumerate(predictions):
40 | for j, label in enumerate(labels):
41 | if calculate_iou(pred[1:], label[1:]) >= iou_thres:
42 | if i not in matched_preds and j not in matched_labels:
43 | cm[pred[0]][label[0]] += 1
44 | matched_preds.add(i)
45 | matched_labels.add(j)
46 | break
47 | for i, pred in enumerate(predictions):
48 | if i not in matched_preds:
49 | cm[pred[0]][-1] += 1
50 | for j, label in enumerate(labels):
51 | if j not in matched_labels:
52 | cm[-1][label[0]] += 1
53 | return cm
54 | def calculate_metrics(confusion_matrix):
55 | num_classes = confusion_matrix.shape[0] - 1
56 | metrics = []
57 | for class_id in range(num_classes):
58 | true_positives = confusion_matrix[class_id, class_id]
59 | false_positives = np.sum(confusion_matrix[:, class_id]) - true_positives
60 | false_negatives = np.sum(confusion_matrix[class_id, :]) - true_positives
61 | recall = true_positives / (true_positives + false_positives + 1e-9)
62 | precision = true_positives / (true_positives + false_negatives + 1e-9)
63 | f1 = 2 * (precision * recall) / (precision + recall + 1e-9)
64 | metrics.append((precision, recall, f1))
65 | return metrics
66 | #-------------------------------------------------------------------------------------------
67 | cm=calculate_cm('/root/runs/detect/t1/','/root/hf_val/labels/',nc=8,
68 | iou_thres=0.5)
69 | print(cm)
70 | print(np.array(calculate_metrics(cm)))
71 | print(np.array(calculate_metrics(cm))[:,2].sum()/8)
--------------------------------------------------------------------------------
/our_wbf.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | def prefilter_boxes(boxes, scores, labels, weights, thr):
4 | new_boxes = dict()
5 | for t in range(len(boxes)):
6 | for j in range(len(boxes[t])):
7 | score = scores[t][j]
8 | if score < thr:
9 | continue
10 | label = int(labels[t][j])
11 | box_part = boxes[t][j]
12 | x1 = float(box_part[0])
13 | y1 = float(box_part[1])
14 | x2 = float(box_part[2])
15 | y2 = float(box_part[3])
16 | b = [int(label), float(score) * weights[t][label], weights[t][label], t, x1, y1, x2, y2]
17 | if label not in new_boxes:
18 | new_boxes[label] = []
19 | new_boxes[label].append(b)
20 | for k in new_boxes:
21 | current_boxes = np.array(new_boxes[k])
22 | new_boxes[k] = current_boxes[current_boxes[:, 1].argsort()[::-1]]
23 | return new_boxes
24 |
25 |
26 | def get_weighted_box(boxes):
27 | box,conf,w,conf_list = np.zeros(8, dtype=np.float32),0,0,[]
28 | for b in boxes:
29 | box[4:] += (b[1] * b[4:])
30 | conf += b[1]
31 | conf_list.append(b[1])
32 | w += b[2]
33 | box[0],box[1],box[2],box[3],box[4:] = boxes[0][0], conf / len(boxes),w,-1,box[4:]/conf
34 | return box
35 |
36 |
37 | def find_matching_box_fast(boxes_list, new_box, match_iou):
38 | def bb_iou_array(boxes, new_box):
39 | xA = np.maximum(boxes[:, 0], new_box[0])
40 | yA = np.maximum(boxes[:, 1], new_box[1])
41 | xB = np.minimum(boxes[:, 2], new_box[2])
42 | yB = np.minimum(boxes[:, 3], new_box[3])
43 | interArea = np.maximum(xB - xA, 0) * np.maximum(yB - yA, 0)
44 | boxAArea = (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1])
45 | boxBArea = (new_box[2] - new_box[0]) * (new_box[3] - new_box[1])
46 | iou = interArea / (boxAArea + boxBArea - interArea)
47 | return iou
48 | if boxes_list.shape[0] == 0:
49 | return -1, match_iou
50 | boxes = boxes_list
51 | ious = bb_iou_array(boxes[:, 4:], new_box[4:])
52 | ious[boxes[:, 0] != new_box[0]] = -1
53 | best_idx = np.argmax(ious)
54 | best_iou = ious[best_idx]
55 | if best_iou <= match_iou:
56 | best_iou = match_iou
57 | best_idx = -1
58 | return best_idx, best_iou
59 |
60 |
61 | def weighted_boxes_fusion1(boxes_list,scores_list,labels_list,
62 | weights=None,iou_thr=0.55,skip_box_thr=0.0):
63 | filtered_boxes = prefilter_boxes(boxes_list, scores_list, labels_list, weights, skip_box_thr)
64 | if len(filtered_boxes) == 0:
65 | return np.zeros((0, 4)), np.zeros((0,)), np.zeros((0,))
66 | overall_boxes = []
67 | for label in filtered_boxes:
68 | boxes = filtered_boxes[label]
69 | new_boxes = []
70 | weighted_boxes = np.empty((0, 8))
71 |
72 | # Clusterize boxes
73 | for j in range(0, len(boxes)):
74 | index, best_iou = find_matching_box_fast(weighted_boxes, boxes[j], iou_thr)
75 | if index != -1:
76 | new_boxes[index].append(boxes[j])
77 | weighted_boxes[index] = get_weighted_box(new_boxes[index])
78 | else:
79 | new_boxes.append([boxes[j].copy()])
80 | weighted_boxes = np.vstack((weighted_boxes, boxes[j].copy()))
81 |
82 | # Rescale confidence based on number of models and boxes
83 | for i in range(len(new_boxes)):
84 | clustered_boxes = new_boxes[i]
85 | weighted_boxes[i, 1] = weighted_boxes[i, 1] * min(len(weights), len(clustered_boxes)) / sum(sublist[label] for sublist in weights)
86 | overall_boxes.append(weighted_boxes)
87 | overall_boxes = np.concatenate(overall_boxes, axis=0)
88 | overall_boxes = overall_boxes[overall_boxes[:, 1].argsort()[::-1]]
89 | boxes = overall_boxes[:, 4:]
90 | scores = overall_boxes[:, 1]
91 | labels = overall_boxes[:, 0]
92 | return boxes, scores, labels
93 |
--------------------------------------------------------------------------------
/utils/search.py:
--------------------------------------------------------------------------------
1 | from ultralytics import RTDETR
2 | import warnings
3 | warnings.filterwarnings("ignore")
4 | import numpy as np
5 | import os
6 | def calculate_iou(box1, box2):
7 | # 计算两个边界框的面积
8 | area_box1 = (box1[2] - box1[0]) * (box1[3] - box1[1])
9 | area_box2 = (box2[2] - box2[0]) * (box2[3] - box2[1])
10 | # 计算交集面积
11 | x_intersection = max(0, min(box1[2], box2[2]) - max(box1[0], box2[0]))
12 | y_intersection = max(0, min(box1[3], box2[3]) - max(box1[1], box2[1]))
13 | intersection = x_intersection * y_intersection
14 | # 计算并集面积
15 | union = area_box1 + area_box2 - intersection
16 | iou = intersection / union
17 | return iou
18 | def parse_boxes(txt_file):
19 | if os.path.exists(txt_file):
20 | with open(txt_file, 'r') as file:
21 | lines = file.readlines()
22 | else:
23 | lines=[]
24 | boxes = []
25 | for line in lines:
26 | data = line.strip().split(' ')
27 | label = int(data[0])
28 | x_center, y_center, width, height = map(float, data[1:])
29 | x1 = x_center - width / 2
30 | y1 = y_center - height / 2
31 | x2 = x_center + width / 2
32 | y2 = y_center + height / 2
33 | boxes.append((label, x1, y1, x2, y2))
34 | return boxes
35 | def calculate_cm(predict_folder, label_folder, nc, iou_thres=0.5):
36 | # 初始化混淆矩阵
37 | cm = np.zeros((nc + 1, nc + 1), dtype=int)
38 | # 遍历文件夹
39 | for filename in os.listdir(label_folder):
40 | predict_file = os.path.join(predict_folder, filename)
41 | label_file = os.path.join(label_folder, filename)
42 | predictions = parse_boxes(predict_file)
43 | labels = parse_boxes(label_file)
44 | # 标记已匹配的预测和标签
45 | matched_preds = set()
46 | matched_labels = set()
47 | # 遍历每个预测和标签框,计算 IoU 并更新混淆矩阵
48 | for i, pred in enumerate(predictions):
49 | for j, label in enumerate(labels):
50 | if calculate_iou(pred[1:], label[1:]) >= iou_thres:
51 | if i not in matched_preds and j not in matched_labels:
52 | cm[pred[0]][label[0]] += 1
53 | matched_preds.add(i)
54 | matched_labels.add(j)
55 | break
56 | # 对于未匹配的预测和标签,记为背景类别
57 | for i, pred in enumerate(predictions):
58 | if i not in matched_preds:
59 | cm[pred[0]][-1] += 1 # 最后一列为背景类别
60 | for j, label in enumerate(labels):
61 | if j not in matched_labels:
62 | cm[-1][label[0]] += 1 # 最后一行为背景类别
63 | return cm
64 | def calculate_metrics(confusion_matrix):
65 | num_classes = confusion_matrix.shape[0] - 1
66 | metrics = []
67 | for class_id in range(num_classes):
68 | true_positives = confusion_matrix[class_id, class_id]
69 | false_positives = np.sum(confusion_matrix[:, class_id]) - true_positives
70 | false_negatives = np.sum(confusion_matrix[class_id, :]) - true_positives
71 | recall = true_positives / (true_positives + false_positives + 1e-9)
72 | precision = true_positives / (true_positives + false_negatives + 1e-9)
73 | f1 = 2 * (precision * recall) / (precision + recall + 1e-7)
74 | metrics.append((precision, recall, f1))
75 | return metrics
76 | best_cm=None
77 | best_f1=0
78 | best_i=None
79 | lst_f1=[]
80 | for i in range(100):
81 | model=RTDETR('/root/runs/nice1/weights/best.pt')
82 | model(source='/root/dataset/val/images/',save_txt=True,conf=0.2+0.005*i,name=f'best_conf{i}')
83 | cm=calculate_cm(f'/root/runs/detect/best_conf{i}/labels/', '/root/dataset/val/labels/',
84 | nc=8, iou_thres=0.5)
85 | metrics=np.array(calculate_metrics(cm))
86 | f1=metrics[:,2].sum()/8
87 | if f1>best_f1:
88 | best_f1=f1
89 | best_cm=cm
90 | best_i=i
91 | print(best_f1)
92 | print(best_i)
93 | print(best_cm)
--------------------------------------------------------------------------------
/utils/dataset.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import random
4 | import cv2
5 | import numpy as np
6 | # 数据集处理工具
7 |
8 |
9 | def f10():
10 | """根据label筛选"""
11 | source_folder = 'dataseta/jpgs/'
12 | images_folder = 'dataseta/afeter_images/'
13 | labels_folder = 'dataseta/labels/'
14 | os.makedirs(images_folder, exist_ok=True)
15 | for name in os.listdir(labels_folder):
16 | shutil.copy(source_folder+name.split('.')[0]+'.jpg', images_folder)
17 | print('done')
18 | f10()
19 |
20 |
21 | def f1():
22 | """将原始数据集中的txt和jpg文件复制到dataset1"""
23 | source_folder = 'jsai_data'
24 | images_folder = 'dataset1/images'
25 | labels_folder = 'dataset1/labels'
26 | os.makedirs(images_folder, exist_ok=True)
27 | os.makedirs(labels_folder, exist_ok=True)
28 | for name in os.listdir(source_folder):
29 | if name.endswith('.txt'):
30 | source_file = os.path.join(source_folder, name)
31 | destination_file = os.path.join(labels_folder, name)
32 | shutil.copy(source_file, destination_file)
33 | if name.endswith('.jpg'):
34 | source_file = os.path.join(source_folder, name)
35 | destination_file = os.path.join(images_folder, name)
36 | shutil.copy(source_file, destination_file)
37 | print('done')
38 |
39 | def f2():
40 | """划分训练集和验证集为yolo格式到dataset2"""
41 | source_folder = 'jsai_data'
42 | a = 'dataset2/train/images'
43 | b = 'dataset2/train/labels'
44 | c = 'dataset2/val/images'
45 | d = 'dataset2/val/labels'
46 | for i in [a,b,c,d]:
47 | os.makedirs(i, exist_ok=True)
48 | files = os.listdir(source_folder)
49 | names=list(set(map(lambda x:x.split('.')[0],files)))
50 | train_names = random.sample(names, k=int(len(names) * 0.8))
51 | val_names = [x for x in names if x not in train_names]
52 | for name in train_names:
53 | shutil.copy(source_folder+'/'+name+'.jpg', a+'/'+name+'.jpg')
54 | shutil.copy(source_folder+'/'+name+'.txt', b+'/'+name+'.txt')
55 | for name in val_names:
56 | shutil.copy(source_folder + '/' + name + '.jpg', c + '/' + name + '.jpg')
57 | shutil.copy(source_folder + '/' + name + '.txt', d + '/' + name + '.txt')
58 | print('done')
59 |
60 | def f3():
61 | """检查"""
62 | a = 'dataset2/train/images'
63 | b = 'dataset2/train/labels'
64 | c = 'dataset2/val/images'
65 | d = 'dataset2/val/labels'
66 | e = 'dataset1/images'
67 | f = 'dataset1/labels'
68 | x = 'jsai_data'
69 | for i in [a,b,c,d,e,f,x]:
70 | files = os.listdir(i)
71 | print(len(files))
72 |
73 |
74 |
75 | def f4():
76 | """归类"""
77 | source_folder = 'jsai_data'
78 | a = 'dataset3/train/images'
79 | b = 'dataset3/train/labels'
80 | c = 'dataset3/val/images'
81 | d = 'dataset3/val/labels'
82 | for i in [a,b,c,d]:
83 | os.makedirs(i, exist_ok=True)
84 | files = os.listdir(source_folder)
85 | names=list(set(map(lambda x:x.split('.')[0],files)))
86 | train_names = random.sample(names, k=int(len(names) * 0.8))
87 | val_names = [x for x in names if x not in train_names]
88 | for name in names:
89 | with open(source_folder+'/'+name+".txt", 'r') as file:
90 | lines = file.readlines()
91 | modified_lines = []
92 | for line in lines:
93 | if line.strip(): # 忽略空行
94 | modified_line = '0' + line[1:]
95 | modified_lines.append(modified_line)
96 | if name in train_names:
97 | with open(b + '/' + name+".txt", 'w') as file:
98 | file.writelines(modified_lines)
99 | elif name in val_names:
100 | with open(d + '/' + name+".txt", 'w') as file:
101 | file.writelines(modified_lines)
102 | for name in train_names:
103 | shutil.copy(source_folder+'/'+name+'.jpg', a+'/'+name+'.jpg')
104 | for name in val_names:
105 | shutil.copy(source_folder + '/' + name + '.jpg', c + '/' + name + '.jpg')
106 | print('done')
107 |
108 |
109 |
110 | def f5():
111 | """预处理后划分训练集和验证集为yolo格式到dataset4"""
112 | def process(image):
113 | """数据增强 -image:图片路径 -return:处理后的图片"""
114 | image = cv2.imread(image)
115 | image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
116 | image = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)).apply(image) # 增加对比度
117 | image = cv2.equalizeHist(image) # 直方图亮度均衡
118 | kernel = np.array([[-2,-1,-2],
119 | [-1, 15,-2],
120 | [-2,-1,-2]]) # 神奇的滤波核,可操作
121 | image = cv2.filter2D(image, -1, kernel) # 锐化
122 | image = cv2.medianBlur(image, 3) # 中值滤波
123 | # 进行高斯滤波,不如中值滤波
124 | # image = cv2.GaussianBlur(image, (3, 3), 1)
125 | return image
126 | source_folder = 'jsai_data'
127 | a = 'dataset4/train/images'
128 | b = 'dataset4/train/labels'
129 | c = 'dataset4/val/images'
130 | d = 'dataset4/val/labels'
131 | for i in [a,b,c,d]:
132 | os.makedirs(i, exist_ok=True)
133 | files = os.listdir(source_folder)
134 | names=list(set(map(lambda x:x.split('.')[0],files)))
135 | train_names = random.sample(names, k=int(len(names) * 0.8))
136 | val_names = [x for x in names if x not in train_names]
137 | for name in train_names:
138 | image=process(source_folder + '/' + name + '.jpg')
139 | cv2.imwrite(a + '/' + name + '.jpg',image)
140 | shutil.copy(source_folder+'/'+name+'.txt', b+'/'+name+'.txt')
141 | print(name)
142 | for name in val_names:
143 | image = process(source_folder + '/' + name + '.jpg')
144 | cv2.imwrite(c + '/' + name + '.jpg',image)
145 | shutil.copy(source_folder + '/' + name + '.txt', d + '/' + name + '.txt')
146 | print(name)
147 | print('done')
148 |
149 |
150 | def f6():
151 | """拆分学习,数据集构建"""
152 | source_folder = 'jsai_data'
153 | a1 = 'dataset5/detect/train/images'
154 | b1 = 'dataset5/detect/train/labels'
155 | c1 = 'dataset5/detect/val/images'
156 | d1 = 'dataset5/detect/val/labels'
157 | a2 = 'dataset5/classify/train/images'
158 | b2 = 'dataset5/classify/train/labels'
159 | c2 = 'dataset5/classify/val/images'
160 | d2 = 'dataset5/classify/val/labels'
161 | for i in [a1, b1, c1, d1, a2, b2, c2, d2]:
162 | os.makedirs(i, exist_ok=True)
163 | files = os.listdir(source_folder)
164 | # detect todo
165 | names = list(set(map(lambda x: x.split('.')[0], files)))
166 | train_names = random.sample(names, k=int(len(names) * 0.8))
167 | val_names = [x for x in names if x not in train_names]
168 | for name in names:
169 | with open(source_folder + '/' + name + ".txt", 'r') as file:
170 | lines = file.readlines()
171 | modified_lines = []
172 | for line in lines:
173 | if line.strip(): # 忽略空行
174 | if line[0] in {'0','1','3','6'}:
175 | modified_line = '0' + line[1:]
176 | modified_lines.append(modified_line)
177 | else:
178 | modified_line = line
179 | modified_lines.append(modified_line)
180 | if name in train_names:
181 | with open(b1 + '/' + name + ".txt", 'w') as file:
182 | file.writelines(modified_lines)
183 | elif name in val_names:
184 | with open(d1 + '/' + name + ".txt", 'w') as file:
185 | file.writelines(modified_lines)
186 | for name in train_names:
187 | shutil.copy(source_folder + '/' + name + '.jpg', a1 + '/' + name + '.jpg')
188 | for name in val_names:
189 | shutil.copy(source_folder + '/' + name + '.jpg', c1 + '/' + name + '.jpg')
190 | print('done')
191 |
192 |
193 | def f7(aa):
194 | source_folder = 'jsai_data'
195 | a = 'dataset6/train/images'
196 | b = 'dataset6/train/labels'
197 | c = 'dataset6/val/images'
198 | d = 'dataset6/val/labels'
199 | for i in [a, b, c, d]:
200 | os.makedirs(i, exist_ok=True)
201 | files = os.listdir(source_folder)
202 | names = list(set(map(lambda x: x.split('.')[0], files)))
203 | train_names = random.sample(names, k=int(len(names) * 0.8))
204 | val_names = [x for x in names if x not in train_names]
205 | for name in names:
206 | with open(source_folder + '/' + name + ".txt", 'r') as file:
207 | lines = file.readlines()
208 | modified_lines = []
209 | for line in lines:
210 | if line.strip(): # 忽略空行
211 | bb = int(line[0])
212 | if bb in aa:
213 | modified_line = str(aa.index(bb)) + line[1:]
214 | modified_lines.append(modified_line)
215 | if name in train_names:
216 | with open(b + '/' + name + ".txt", 'w') as file:
217 | file.writelines(modified_lines)
218 | elif name in val_names:
219 | with open(d + '/' + name + ".txt", 'w') as file:
220 | file.writelines(modified_lines)
221 | for name in train_names:
222 | shutil.copy(source_folder + '/' + name + '.jpg', a + '/' + name + '.jpg')
223 | for name in val_names:
224 | shutil.copy(source_folder + '/' + name + '.jpg', c + '/' + name + '.jpg')
225 | print('done')
226 |
227 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | from ultralytics import YOLO
2 | from ultralytics import RTDETR
3 | from our_wbf import weighted_boxes_fusion1
4 | import numpy as np
5 | import os
6 | import cv2
7 |
8 | def ComputeHist(img):
9 | h, w = img.shape
10 | hist, bin_edge = np.histogram(img.reshape(1, w * h), bins=list(range(257)))
11 | return hist
12 | def ComputeMinLevel(hist, rate, pnum):
13 | sum = 0
14 | for i in range(256):
15 | sum += hist[i]
16 | if (sum >= (pnum * rate * 0.01)):
17 | return i
18 | def ComputeMaxLevel(hist, rate, pnum):
19 | sum = 0
20 | for i in range(256):
21 | sum += hist[255 - i]
22 | if (sum >= (pnum * rate * 0.01)):
23 | return 255 - i
24 | def LinearMap(minlevel, maxlevel):
25 | if (minlevel >= maxlevel):
26 | return []
27 | else:
28 | newmap = np.zeros(256)
29 | for i in range(256):
30 | if (i < minlevel):
31 | newmap[i] = 0
32 | elif (i > maxlevel):
33 | newmap[i] = 255
34 | else:
35 | newmap[i] = (i - minlevel) / (maxlevel - minlevel) * 255
36 | return newmap
37 | def CreateNewImg(img):
38 | h, w, d = img.shape
39 | newimg = np.zeros([h, w, d])
40 | for i in range(d):
41 | imghist = ComputeHist(img[:, :, i])
42 | minlevel = ComputeMinLevel(imghist, 8.3, h * w)
43 | maxlevel = ComputeMaxLevel(imghist, 2.2, h * w)
44 | newmap = LinearMap(minlevel, maxlevel)
45 | if (newmap.size == 0):
46 | continue
47 | for j in range(h):
48 | newimg[j, :, i] = newmap[img[j, :, i]]
49 | return newimg
50 | def ProcessImages(input_folder, output_folder):
51 | image_files = [f for f in os.listdir(input_folder) if f.endswith(('.jpg', '.jpeg', '.png'))]
52 | print('defogging')
53 | for file_name in image_files:
54 | img = cv2.imread(os.path.join(input_folder, file_name))
55 | if img is not None:
56 | new_img = CreateNewImg(img)
57 | output_path = os.path.join(output_folder, file_name)
58 | cv2.imwrite(output_path, new_img)
59 | print('defogged')
60 |
61 | def run():
62 | # 1.参数设置------------------------------------------------------------------------
63 | source='' # 待推理图片文件夹
64 | source_cw='' # 存放数据增强的图片,无需手动创建,运行时自动创建,只需要指定位置
65 | os.makedirs(source_cw, exist_ok=True)
66 | src='runs/detect/' # 单模型推理后存放的主目录
67 | save='output/' # 保存最后txt的目录,自动创建
68 | ProcessImages(source,source_cw)
69 | iou_thr=0.3 # 集成预测的iou
70 | single_conf=0.1 # 单模型推理的conf
71 | pe_conf=0.06 # 集成预测后生成最后txt时的conf
72 | skip_box_thr=0.35 # 集成预测时的对于每一个单模型的conf
73 | # 2.RTDETR推理--------------------------------------------------------------------
74 | lstrt=['model/B1_/weights/best.pt',
75 | 'model/B2_/weights/best.pt',
76 | 'model/B3_/weights/best.pt',
77 | 'model/B3x_/weights/best.pt',
78 | 'model/B3x_2/weights/best.pt', #
79 | 'model/B4_/weights/best.pt',
80 | 'model/B4x_/weights/best.pt',
81 | 'model/B5_/weights/best.pt',
82 | 'model/B58_/weights/best.pt',
83 | 'model/B68_/weights/best.pt', #
84 | 'model/B11_7_/weights/best.pt',
85 | 'model/hhhhh1_/weights/best.pt',
86 | 'model/hhhhh3_/weights/best.pt',
87 | 'model/hhhhh4_/weights/best.pt',
88 | 'model/hhhhh5_/weights/best.pt', #
89 | 'model/hf12_/weights/best.pt',
90 | 'model/hf14_/weights/best.pt',
91 | 'model/hf32_/weights/best.pt',
92 | 'model/hf34_/weights/best.pt',
93 | 'model/hf42_/weights/best.pt', #
94 | 'model/nice1/weights/best.pt',
95 | 'model/nice2/weights/best.pt',
96 | 'model/nice3/weights/best.pt',
97 | 'model/nice8/weights/best.pt',
98 | 'model/train64/weights/best.pt',#
99 | 'model/train69/weights/best.pt',
100 | 'model/train88/weights/best.pt'] # 27
101 | lstrt_cw=['model/B2_/weights/best.pt',
102 | 'model/B4_/weights/best.pt',
103 | 'model/B4x_/weights/best.pt',
104 | 'model/B58_/weights/best.pt',
105 | 'model/B68_/weights/best.pt',
106 | 'model/hf12_/weights/best.pt',
107 | 'model/hf14_/weights/best.pt',
108 | 'model/hf32_/weights/best.pt',
109 | 'model/hf34_/weights/best.pt',
110 | 'model/hf42_/weights/best.pt',]
111 | lstrt_zq=['model/train64/weights/best.pt',
112 | 'model/train69/weights/best.pt',]
113 | print('inferring')
114 | for i in range(len(lstrt)):
115 | model=RTDETR(lstrt[i])
116 | if lstrt[i] in lstrt_cw:
117 | _=model(source=source_cw,save_conf=True,save_txt=True,
118 | name=f'pe{i+1}',conf=single_conf)
119 | elif lstrt[i] in lstrt_zq:
120 | _=model(source=source_cw,save_conf=True,save_txt=True,
121 | name=f'pe{i+1}',conf=single_conf,augment=True)
122 | else:
123 | _=model(source=source,save_conf=True,save_txt=True,
124 | name=f'pe{i+1}',conf=single_conf)
125 | # 3.YOLO推理-----------------------------------------------------------------------
126 | lstyolo=['model/yolo1/weights/best.pt',
127 | 'model/yolo3/weights/best.pt',
128 | 'model/yolo4/weights/best.pt', # 30
129 | 'model/yolo5/weights/best.pt',
130 | 'model/A3_/weights/best.pt',
131 | 'model/A3x_/weights/best.pt',
132 | 'model/A4x_/weights/best.pt',
133 | 'model/nice7/weights/best.pt', #
134 | 'model/nice9/weights/best.pt',
135 | 'model/train78/weights/best.pt',
136 | 'model/train81/weights/best.pt',
137 | 'model/train86/weights/best.pt',
138 | 'model/train89/weights/best.pt',#
139 | 'model/train96/weights/best.pt',
140 | 'model/train699/weights/best.pt',
141 | 'model/train888/weights/best.pt'] # 43
142 | lstyolo_cw=['model/A4x_/weights/best.pt']
143 | lstyolo_zq=['model/nice9/weights/best.pt']
144 | for i in range(len(lstrt),len(lstrt)+len(lstyolo)):
145 | model=YOLO(lstyolo[i-len(lstrt)])
146 | if lstyolo[i-len(lstrt)] in lstyolo_cw:
147 | _=model(source=source_cw,save_conf=True,save_txt=True,
148 | name=f'pe{i+1}',conf=single_conf)
149 | elif lstyolo[i-len(lstrt)] in lstyolo_zq:
150 | _=model(source=source_cw,save_conf=True,save_txt=True,
151 | name=f'pe{i+1}',conf=single_conf,augment=True)
152 | else:
153 | _=model(source=source,save_conf=True,save_txt=True,
154 | name=f'pe{i+1}',conf=single_conf)
155 | print('inferred')
156 | # 4.权重--------------------------------------------------------------------------
157 | # 子模型权重矩阵,若不对类别进行单独赋权,使用向量权重,将下面的WBF函数后面的1去掉
158 | # 若对类别进行单独赋权,使用矩阵权重,将下面的WBF函数后面的1加上
159 | #weights=[1]*43
160 | weights=[[1,1,1,1,1,1,1,1],#B1_
161 | [1,1,1,1,1,1,1,1],#B2_
162 | [1,1,1,1,1,1,1,2],#B3_
163 | [1,1,1,1,1,1,1,2],#B3x_
164 | [1,1,1,1,1,1,1,2],#B3x_2
165 | [1,1,1,1,1,1,1,1],#B4_
166 | [1,1,1,1,1,1,1,2],#B4x_
167 | [1,1,1,1,1,1,1,2],#B5_
168 | [1,1,1,1,1,1,1,1],#B58_
169 | [1,1,1,1,1,1,1,2],#B68_
170 | [1,1,1,1,1,1,1,2],#B11_7_
171 | [1,1,1,1,1,1,1,2],#hhhhh1_
172 | [1,1,1,1,1,1,1,2],#hhhhh3_
173 | [1,1,1,1,1,1,1,1],#hhhhh4_
174 | [1,1,1,1,1,1,1,1],#hhhhh5_
175 | [1,1,1,1,1,1,1,1],#hf12_
176 | [1,1,1,1,1,1,1,1],#hf14_
177 | [1,1,1,1,1,1,1,1],#hf32_
178 | [1,1,1,1,1,1,1,2],#hf34_
179 | [1,1,1,1,1,1,1,2],#hf42_
180 | [1,1,1,1,1,1,1,1],#nice1
181 | [1,1,1,1,1,1,1,1],#nice2
182 | [1,1,1,1,1,1,1,1],#nice3
183 | [1,1,1,1,1,1,1,1],#nice8
184 | [1,1,1,1,1,1,1,1],#train64
185 | [1,1,1,1,1,1,1,1],#train69
186 | [1,1,1,1,1,1,1,1],#train88####
187 | [1,1,1,1,1,1,1,1],#yolo1
188 | [1,1,1,1,1,1,1,2],#yolo3
189 | [1,1,1,1,1,1,1,1],#yolo4
190 | [1,1,1,1,1,1,1,2],#yolo5
191 | [1,1,1,1,1,1,1,1],#A3_
192 | [1,1,1,1,1,1,1,1],#A3x_
193 | [1,1,1,1,1,1,1,2],#A4x_
194 | [1,1,1,1,1,1,1,1],#nice7
195 | [1,1,1,1,1,1,1,1],#nice9
196 | [1,1,1,1,1,1,1,1],#train78
197 | [1,1,1,1,1,1,1,1],#train81
198 | [1,1,1,1,1,1,1,1],#train86
199 | [1,1,1,1,1,1,1,1],#train89
200 | [1,1,1,1,1,1,1,2],#train96
201 | [1,1,1,1,1,1,1,1],#train699
202 | [1,1,1,1,1,1,1,1]]#train888
203 | n=len(weights)
204 | print('ensemble start')
205 | # 5.集成预测---------------------------------------------------------------------
206 | names=list(map(lambda x:x.split('.')[0],os.listdir(source)))
207 | pe_lst=[f'pe{i}' for i in range(1,n+1)]
208 | for name in names:
209 | labels_list,boxes_list,scores_list=[],[],[]
210 | for i in range(n):
211 | model_labels,model_boxes,model_scores=[],[],[]
212 | if os.path.exists(src+pe_lst[i]+'/labels/'+name+".txt"):
213 | with open(src+pe_lst[i]+'/labels/'+name+".txt", 'r') as file:
214 | lines = file.readlines()
215 | for line in lines:
216 | line=line.split()
217 | model_labels.append(int(line[0]))
218 | x_center, y_center, width, height = map(float,line[1:5])
219 | x1 = x_center - width / 2
220 | y1 = y_center - height / 2
221 | x2 = x_center + width / 2
222 | y2 = y_center + height / 2
223 | model_boxes.append([x1, y1, x2, y2])
224 | model_scores.append(float(line[5]))
225 | labels_list.append(model_labels)
226 | boxes_list.append(model_boxes)
227 | scores_list.append(model_scores)
228 | boxes, scores, labels = weighted_boxes_fusion1(boxes_list,scores_list,labels_list,
229 | weights=weights,iou_thr=iou_thr,skip_box_thr=skip_box_thr)
230 | os.makedirs(save, exist_ok=True)
231 | with open(save + name + ".txt", 'w') as file:
232 | for j in range(len(scores)):
233 | if scores[j]>pe_conf:
234 | box=boxes[j]
235 | box_xywh=[]
236 | box_xywh.append((box[0]+box[2])/2)
237 | box_xywh.append((box[1]+box[3])/2)
238 | box_xywh.append(box[2]-box[0])
239 | box_xywh.append(box[3]-box[1])
240 | file.write(" ".join([str(num) for num in [int(labels[j])]+box_xywh]) + "\n")
241 | print('ensemble done')
242 |
243 | if __name__=='__main__':
244 | run()
245 |
--------------------------------------------------------------------------------
/utils/describe.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {
7 | "collapsed": true
8 | },
9 | "outputs": [],
10 | "source": [
11 | "import os\n",
12 | "import pandas as pd\n",
13 | "import matplotlib.pyplot as plt"
14 | ]
15 | },
16 | {
17 | "cell_type": "code",
18 | "execution_count": 3,
19 | "outputs": [
20 | {
21 | "name": "stderr",
22 | "output_type": "stream",
23 | "text": [
24 | "C:\\Users\\86159\\AppData\\Local\\Temp\\ipykernel_27864\\3653870432.py:16: FutureWarning: The behavior of DataFrame concatenation with empty or all-NA entries is deprecated. In a future version, this will no longer exclude empty or all-NA columns when determining the result dtypes. To retain the old behavior, exclude the relevant entries before the concat operation.\n",
25 | " df = pd.concat([df, pd.DataFrame({'filename':[filename], 'cls':[cls],'x_center': [x_center], 'y_center': [y_center], 'w': [w], 'h': [h]})], ignore_index=True)\n"
26 | ]
27 | }
28 | ],
29 | "source": [
30 | "df = pd.DataFrame(columns=['filename', 'x_center', 'y_center', 'w', 'h'])\n",
31 | "folder_path = '../dataset1/labels'\n",
32 | "for filename in os.listdir(folder_path):\n",
33 | " if filename.endswith('.txt'):\n",
34 | " file_path = os.path.join(folder_path, filename)\n",
35 | " with open(file_path, 'r') as file:\n",
36 | " lines = file.readlines()\n",
37 | " for line in lines:\n",
38 | " values = line.split()\n",
39 | " if len(values) == 5:\n",
40 | " cls=int(values[0])\n",
41 | " x_center = float(values[1])\n",
42 | " y_center = float(values[2])\n",
43 | " w = float(values[3])\n",
44 | " h = float(values[4])\n",
45 | " df = pd.concat([df, pd.DataFrame({'filename':[filename], 'cls':[cls],'x_center': [x_center], 'y_center': [y_center], 'w': [w], 'h': [h]})], ignore_index=True)"
46 | ],
47 | "metadata": {
48 | "collapsed": false
49 | }
50 | },
51 | {
52 | "cell_type": "code",
53 | "execution_count": 4,
54 | "outputs": [
55 | {
56 | "data": {
57 | "text/plain": " x_center y_center w h cls\ncount 4497.000000 4497.000000 4497.000000 4497.000000 4497.000000\nmean 0.430436 0.826607 0.221473 0.144262 2.785190\nstd 0.222615 0.115717 0.232751 0.111643 2.383798\nmin 0.012500 0.222917 0.010937 0.014815 0.000000\n25% 0.258984 0.767361 0.064844 0.062500 1.000000\n50% 0.410547 0.856250 0.122656 0.109722 1.000000\n75% 0.582031 0.912500 0.280469 0.198611 6.000000\nmax 0.982422 0.988889 0.999219 0.854167 7.000000",
58 | "text/html": "
\n\n
\n \n \n | \n x_center | \n y_center | \n w | \n h | \n cls | \n
\n \n \n \n | count | \n 4497.000000 | \n 4497.000000 | \n 4497.000000 | \n 4497.000000 | \n 4497.000000 | \n
\n \n | mean | \n 0.430436 | \n 0.826607 | \n 0.221473 | \n 0.144262 | \n 2.785190 | \n
\n \n | std | \n 0.222615 | \n 0.115717 | \n 0.232751 | \n 0.111643 | \n 2.383798 | \n
\n \n | min | \n 0.012500 | \n 0.222917 | \n 0.010937 | \n 0.014815 | \n 0.000000 | \n
\n \n | 25% | \n 0.258984 | \n 0.767361 | \n 0.064844 | \n 0.062500 | \n 1.000000 | \n
\n \n | 50% | \n 0.410547 | \n 0.856250 | \n 0.122656 | \n 0.109722 | \n 1.000000 | \n
\n \n | 75% | \n 0.582031 | \n 0.912500 | \n 0.280469 | \n 0.198611 | \n 6.000000 | \n
\n \n | max | \n 0.982422 | \n 0.988889 | \n 0.999219 | \n 0.854167 | \n 7.000000 | \n
\n \n
\n
"
59 | },
60 | "execution_count": 4,
61 | "metadata": {},
62 | "output_type": "execute_result"
63 | }
64 | ],
65 | "source": [
66 | "df.describe()"
67 | ],
68 | "metadata": {
69 | "collapsed": false
70 | }
71 | },
72 | {
73 | "cell_type": "code",
74 | "execution_count": 5,
75 | "outputs": [
76 | {
77 | "data": {
78 | "text/plain": " filename x_center y_center w h cls\n0 101-out_ori.txt 0.635938 0.503241 0.719792 0.313889 5.0\n1 103-out_ori.txt 0.554948 0.698148 0.890104 0.575926 5.0\n2 103-out_ori.txt 0.413281 0.922222 0.352604 0.074074 0.0\n3 103-out_ori.txt 0.428646 0.796759 0.042708 0.173148 1.0\n4 103-out_ori.txt 0.844010 0.777778 0.205729 0.327778 1.0\n... ... ... ... ... ... ...\n4492 78-out_ori.txt 0.864323 0.625926 0.265104 0.048148 6.0\n4493 79-out_ori.txt 0.865104 0.815278 0.255208 0.065741 6.0\n4494 79-out_ori.txt 0.786979 0.681019 0.185417 0.045370 6.0\n4495 79-out_ori.txt 0.094271 0.543519 0.110417 0.088889 6.0\n4496 79-out_ori.txt 0.173698 0.471759 0.257812 0.037963 6.0\n\n[4497 rows x 6 columns]",
79 | "text/html": "\n\n
\n \n \n | \n filename | \n x_center | \n y_center | \n w | \n h | \n cls | \n
\n \n \n \n | 0 | \n 101-out_ori.txt | \n 0.635938 | \n 0.503241 | \n 0.719792 | \n 0.313889 | \n 5.0 | \n
\n \n | 1 | \n 103-out_ori.txt | \n 0.554948 | \n 0.698148 | \n 0.890104 | \n 0.575926 | \n 5.0 | \n
\n \n | 2 | \n 103-out_ori.txt | \n 0.413281 | \n 0.922222 | \n 0.352604 | \n 0.074074 | \n 0.0 | \n
\n \n | 3 | \n 103-out_ori.txt | \n 0.428646 | \n 0.796759 | \n 0.042708 | \n 0.173148 | \n 1.0 | \n
\n \n | 4 | \n 103-out_ori.txt | \n 0.844010 | \n 0.777778 | \n 0.205729 | \n 0.327778 | \n 1.0 | \n
\n \n | ... | \n ... | \n ... | \n ... | \n ... | \n ... | \n ... | \n
\n \n | 4492 | \n 78-out_ori.txt | \n 0.864323 | \n 0.625926 | \n 0.265104 | \n 0.048148 | \n 6.0 | \n
\n \n | 4493 | \n 79-out_ori.txt | \n 0.865104 | \n 0.815278 | \n 0.255208 | \n 0.065741 | \n 6.0 | \n
\n \n | 4494 | \n 79-out_ori.txt | \n 0.786979 | \n 0.681019 | \n 0.185417 | \n 0.045370 | \n 6.0 | \n
\n \n | 4495 | \n 79-out_ori.txt | \n 0.094271 | \n 0.543519 | \n 0.110417 | \n 0.088889 | \n 6.0 | \n
\n \n | 4496 | \n 79-out_ori.txt | \n 0.173698 | \n 0.471759 | \n 0.257812 | \n 0.037963 | \n 6.0 | \n
\n \n
\n
4497 rows × 6 columns
\n
"
80 | },
81 | "execution_count": 5,
82 | "metadata": {},
83 | "output_type": "execute_result"
84 | }
85 | ],
86 | "source": [
87 | "df"
88 | ],
89 | "metadata": {
90 | "collapsed": false
91 | }
92 | },
93 | {
94 | "cell_type": "code",
95 | "execution_count": 7,
96 | "outputs": [
97 | {
98 | "data": {
99 | "text/plain": " filename x_center y_center w h cls\n3 103-out_ori.txt 0.428646 0.796759 0.042708 0.173148 1.0\n4 103-out_ori.txt 0.844010 0.777778 0.205729 0.327778 1.0\n8 105-out_ori.txt 0.877604 0.519444 0.159375 0.122222 1.0\n12 108-out_ori.txt 0.879948 0.568056 0.213021 0.206481 1.0\n15 110-out_ori.txt 0.866146 0.591667 0.175000 0.233333 1.0\n... ... ... ... ... ... ...\n4433 24632-out_ori.txt 0.078125 0.877778 0.150000 0.119444 1.0\n4434 24632-out_ori.txt 0.061719 0.747222 0.120313 0.083333 1.0\n4438 24635-out_ori.txt 0.108594 0.884722 0.198437 0.222222 1.0\n4467 63-out_ori.txt 0.675260 0.682870 0.110937 0.275000 1.0\n4480 65-out_ori.txt 0.103125 0.736111 0.072917 0.075926 1.0\n\n[2483 rows x 6 columns]",
100 | "text/html": "\n\n
\n \n \n | \n filename | \n x_center | \n y_center | \n w | \n h | \n cls | \n
\n \n \n \n | 3 | \n 103-out_ori.txt | \n 0.428646 | \n 0.796759 | \n 0.042708 | \n 0.173148 | \n 1.0 | \n
\n \n | 4 | \n 103-out_ori.txt | \n 0.844010 | \n 0.777778 | \n 0.205729 | \n 0.327778 | \n 1.0 | \n
\n \n | 8 | \n 105-out_ori.txt | \n 0.877604 | \n 0.519444 | \n 0.159375 | \n 0.122222 | \n 1.0 | \n
\n \n | 12 | \n 108-out_ori.txt | \n 0.879948 | \n 0.568056 | \n 0.213021 | \n 0.206481 | \n 1.0 | \n
\n \n | 15 | \n 110-out_ori.txt | \n 0.866146 | \n 0.591667 | \n 0.175000 | \n 0.233333 | \n 1.0 | \n
\n \n | ... | \n ... | \n ... | \n ... | \n ... | \n ... | \n ... | \n
\n \n | 4433 | \n 24632-out_ori.txt | \n 0.078125 | \n 0.877778 | \n 0.150000 | \n 0.119444 | \n 1.0 | \n
\n \n | 4434 | \n 24632-out_ori.txt | \n 0.061719 | \n 0.747222 | \n 0.120313 | \n 0.083333 | \n 1.0 | \n
\n \n | 4438 | \n 24635-out_ori.txt | \n 0.108594 | \n 0.884722 | \n 0.198437 | \n 0.222222 | \n 1.0 | \n
\n \n | 4467 | \n 63-out_ori.txt | \n 0.675260 | \n 0.682870 | \n 0.110937 | \n 0.275000 | \n 1.0 | \n
\n \n | 4480 | \n 65-out_ori.txt | \n 0.103125 | \n 0.736111 | \n 0.072917 | \n 0.075926 | \n 1.0 | \n
\n \n
\n
2483 rows × 6 columns
\n
"
101 | },
102 | "execution_count": 7,
103 | "metadata": {},
104 | "output_type": "execute_result"
105 | }
106 | ],
107 | "source": [
108 | "df[df['cls']==1]"
109 | ],
110 | "metadata": {
111 | "collapsed": false
112 | }
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": 10,
117 | "outputs": [
118 | {
119 | "name": "stdout",
120 | "output_type": "stream",
121 | "text": [
122 | "[213, 2483, 66, 82, 92, 95, 1441, 25]\n"
123 | ]
124 | },
125 | {
126 | "data": {
127 | "text/plain": ""
128 | },
129 | "execution_count": 10,
130 | "metadata": {},
131 | "output_type": "execute_result"
132 | },
133 | {
134 | "data": {
135 | "text/plain": "",
136 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjAAAAGdCAYAAAAMm0nCAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAiVklEQVR4nO3df1BVdf7H8ReCF7W44C+4sCqSlr81w8S7mlvJgERuTm6bZWlFOjaXNqXM2HHU2p1w7JfVmq5baTvpqu2kFa4oYsKW+IuGVanYNF0svehmcpUtULnfP3Y82/1qFnhvhw8+HzNnVu753Mv7k7U+PZwLYX6/3y8AAACDtLJ7AAAAgMYiYAAAgHEIGAAAYBwCBgAAGIeAAQAAxiFgAACAcQgYAABgHAIGAAAYJ8LuAUKloaFBhw8fVlRUlMLCwuweBwAA/Ah+v18nT55UQkKCWrX6/ussLTZgDh8+rK5du9o9BgAAaIJDhw6pS5cu33u+xQZMVFSUpP/+A3A6nTZPAwAAfgyfz6euXbtaf45/nxYbMOe+bOR0OgkYAAAM80O3f3ATLwAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4jQqYvLw8XX/99YqKilJsbKzGjh2rysrKgDU33nijwsLCAo6pU6cGrKmqqlJmZqbatWun2NhYzZgxQ2fOnAlYs2XLFl133XWKjIxUz549tWzZsqbtEAAAtDiNCpji4mJ5PB5t27ZNhYWFOn36tNLS0lRbWxuwbvLkyTpy5Ih1zJ8/3zp39uxZZWZmqr6+Xlu3btUbb7yhZcuWafbs2daaAwcOKDMzUzfddJPKy8s1bdo0Pfjgg9qwYcMlbhcAALQEYX6/39/UJx87dkyxsbEqLi7WyJEjJf33Csy1116rBQsWXPA569ev16233qrDhw8rLi5OkrR48WLNnDlTx44dk8Ph0MyZM7Vu3Trt3bvXet748eN14sQJFRQU/KjZfD6foqOjVVNTw48SAADAED/2z+9LugempqZGktShQ4eAx5cvX65OnTqpf//+ys3N1X/+8x/rXGlpqQYMGGDFiySlp6fL5/OpoqLCWpOamhrwmunp6SotLf3eWerq6uTz+QIOAADQMjX5hzk2NDRo2rRpGj58uPr37289fvfddysxMVEJCQnavXu3Zs6cqcrKSr399tuSJK/XGxAvkqyPvV7vRdf4fD598803atu27Xnz5OXl6cknn2zqdgAAgEGaHDAej0d79+7VBx98EPD4lClTrF8PGDBA8fHxGjVqlPbv368ePXo0fdIfkJubq5ycHOvjcz+OGwAAtDxNCpjs7Gzl5+erpKREXbp0uejalJQUSdK+ffvUo0cPuVwu7dixI2BNdXW1JMnlcln/e+6x765xOp0XvPoiSZGRkYqMjGzKdvA9uj+xzu4RmuTgvEy7RwAAhFij7oHx+/3Kzs7WmjVrtHnzZiUlJf3gc8rLyyVJ8fHxkiS32609e/bo6NGj1prCwkI5nU717dvXWlNUVBTwOoWFhXK73Y0ZFwAAtFCNChiPx6M333xTK1asUFRUlLxer7xer7755htJ0v79+/W73/1OZWVlOnjwoN59911NnDhRI0eO1MCBAyVJaWlp6tu3r+6991794x//0IYNGzRr1ix5PB7rCsrUqVP1+eef6/HHH9enn36qV155RatXr9b06dODvH0AAGCiRgXMokWLVFNToxtvvFHx8fHWsWrVKkmSw+HQpk2blJaWpt69e+vRRx/VuHHj9N5771mvER4ervz8fIWHh8vtduuee+7RxIkT9dRTT1lrkpKStG7dOhUWFmrQoEF67rnn9Oqrryo9PT1I2wYAACa7pO8D05zxfWAuHffAAAB+aj/J94EBAACwAwEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOI0KmLy8PF1//fWKiopSbGysxo4dq8rKyoA13377rTwejzp27Kgrr7xS48aNU3V1dcCaqqoqZWZmql27doqNjdWMGTN05syZgDVbtmzRddddp8jISPXs2VPLli1r2g4BAECL06iAKS4ulsfj0bZt21RYWKjTp08rLS1NtbW11prp06frvffe01tvvaXi4mIdPnxYt99+u3X+7NmzyszMVH19vbZu3ao33nhDy5Yt0+zZs601Bw4cUGZmpm666SaVl5dr2rRpevDBB7Vhw4YgbBkAAJguzO/3+5v65GPHjik2NlbFxcUaOXKkampq1LlzZ61YsUK/+tWvJEmffvqp+vTpo9LSUg0bNkzr16/XrbfeqsOHDysuLk6StHjxYs2cOVPHjh2Tw+HQzJkztW7dOu3du9f6XOPHj9eJEydUUFDwo2bz+XyKjo5WTU2NnE5nU7d4Wev+xDq7R2iSg/My7R4BANBEP/bP70u6B6ampkaS1KFDB0lSWVmZTp8+rdTUVGtN79691a1bN5WWlkqSSktLNWDAACteJCk9PV0+n08VFRXWmu++xrk1517jQurq6uTz+QIOAADQMjU5YBoaGjRt2jQNHz5c/fv3lyR5vV45HA7FxMQErI2Li5PX67XWfDdezp0/d+5ia3w+n7755psLzpOXl6fo6Gjr6Nq1a1O3BgAAmrkmB4zH49HevXu1cuXKYM7TZLm5uaqpqbGOQ4cO2T0SAAAIkYimPCk7O1v5+fkqKSlRly5drMddLpfq6+t14sSJgKsw1dXVcrlc1podO3YEvN65dyl9d83/f+dSdXW1nE6n2rZte8GZIiMjFRkZ2ZTtAAAAwzTqCozf71d2drbWrFmjzZs3KykpKeB8cnKyWrduraKiIuuxyspKVVVVye12S5Lcbrf27Nmjo0ePWmsKCwvldDrVt29fa813X+PcmnOvAQAALm+NugLj8Xi0YsUKvfPOO4qKirLuWYmOjlbbtm0VHR2trKws5eTkqEOHDnI6nXr44Yfldrs1bNgwSVJaWpr69u2re++9V/Pnz5fX69WsWbPk8XisKyhTp07VH/7wBz3++ON64IEHtHnzZq1evVrr1pn5rhgAABBcjboCs2jRItXU1OjGG29UfHy8daxatcpa88ILL+jWW2/VuHHjNHLkSLlcLr399tvW+fDwcOXn5ys8PFxut1v33HOPJk6cqKeeespak5SUpHXr1qmwsFCDBg3Sc889p1dffVXp6elB2DIAADDdJX0fmOaM7wNz6fg+MACAn9pP8n1gAAAA7EDAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDiNDpiSkhKNGTNGCQkJCgsL09q1awPO33fffQoLCws4Ro8eHbDm+PHjmjBhgpxOp2JiYpSVlaVTp04FrNm9e7duuOEGtWnTRl27dtX8+fMbvzsAANAiNTpgamtrNWjQIC1cuPB714wePVpHjhyxjr/85S8B5ydMmKCKigoVFhYqPz9fJSUlmjJlinXe5/MpLS1NiYmJKisr0zPPPKO5c+dqyZIljR0XAAC0QBGNfUJGRoYyMjIuuiYyMlIul+uC5z755BMVFBRo586dGjJkiCTp5Zdf1i233KJnn31WCQkJWr58uerr6/X666/L4XCoX79+Ki8v1/PPPx8QOgAA4PIUkntgtmzZotjYWPXq1UsPPfSQvvrqK+tcaWmpYmJirHiRpNTUVLVq1Urbt2+31owcOVIOh8Nak56ersrKSn399dehGBkAABik0Vdgfsjo0aN1++23KykpSfv379dvf/tbZWRkqLS0VOHh4fJ6vYqNjQ0cIiJCHTp0kNfrlSR5vV4lJSUFrImLi7POtW/f/rzPW1dXp7q6Outjn88X7K0BAIBmIugBM378eOvXAwYM0MCBA9WjRw9t2bJFo0aNCvans+Tl5enJJ58M2esDAIDmI+Rvo77qqqvUqVMn7du3T5Lkcrl09OjRgDVnzpzR8ePHrftmXC6XqqurA9ac+/j77q3Jzc1VTU2NdRw6dCjYWwEAAM1EyAPmiy++0FdffaX4+HhJktvt1okTJ1RWVmat2bx5sxoaGpSSkmKtKSkp0enTp601hYWF6tWr1wW/fCT998Zhp9MZcAAAgJap0QFz6tQplZeXq7y8XJJ04MABlZeXq6qqSqdOndKMGTO0bds2HTx4UEVFRbrtttvUs2dPpaenS5L69Omj0aNHa/LkydqxY4c+/PBDZWdna/z48UpISJAk3X333XI4HMrKylJFRYVWrVqlF198UTk5OcHbOQAAMFajA2bXrl0aPHiwBg8eLEnKycnR4MGDNXv2bIWHh2v37t365S9/qWuuuUZZWVlKTk7W3//+d0VGRlqvsXz5cvXu3VujRo3SLbfcohEjRgR8j5fo6Ght3LhRBw4cUHJysh599FHNnj2bt1ADAABJUpjf7/fbPUQo+Hw+RUdHq6amhi8nNVH3J9bZPUKTHJyXafcIAIAm+rF/fvOzkAAAgHEIGAAAYJygfx8YAACCgS9j42K4AgMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACM0+iAKSkp0ZgxY5SQkKCwsDCtXbs24Lzf79fs2bMVHx+vtm3bKjU1VZ999lnAmuPHj2vChAlyOp2KiYlRVlaWTp06FbBm9+7duuGGG9SmTRt17dpV8+fPb/zuAABAi9TogKmtrdWgQYO0cOHCC56fP3++XnrpJS1evFjbt2/XFVdcofT0dH377bfWmgkTJqiiokKFhYXKz89XSUmJpkyZYp33+XxKS0tTYmKiysrK9Mwzz2ju3LlasmRJE7YIAABamojGPiEjI0MZGRkXPOf3+7VgwQLNmjVLt912myTpz3/+s+Li4rR27VqNHz9en3zyiQoKCrRz504NGTJEkvTyyy/rlltu0bPPPquEhAQtX75c9fX1ev311+VwONSvXz+Vl5fr+eefDwgdAABweQrqPTAHDhyQ1+tVamqq9Vh0dLRSUlJUWloqSSotLVVMTIwVL5KUmpqqVq1aafv27daakSNHyuFwWGvS09NVWVmpr7/+OpgjAwAAAzX6CszFeL1eSVJcXFzA43FxcdY5r9er2NjYwCEiItShQ4eANUlJSee9xrlz7du3P+9z19XVqa6uzvrY5/Nd4m4AAEBz1WLehZSXl6fo6Gjr6Nq1q90jAQCAEAlqwLhcLklSdXV1wOPV1dXWOZfLpaNHjwacP3PmjI4fPx6w5kKv8d3P8f/l5uaqpqbGOg4dOnTpGwIAAM1SUAMmKSlJLpdLRUVF1mM+n0/bt2+X2+2WJLndbp04cUJlZWXWms2bN6uhoUEpKSnWmpKSEp0+fdpaU1hYqF69el3wy0eSFBkZKafTGXAAAICWqdEBc+rUKZWXl6u8vFzSf2/cLS8vV1VVlcLCwjRt2jT9/ve/17vvvqs9e/Zo4sSJSkhI0NixYyVJffr00ejRozV58mTt2LFDH374obKzszV+/HglJCRIku6++245HA5lZWWpoqJCq1at0osvvqicnJygbRwAAJir0Tfx7tq1SzfddJP18bmomDRpkpYtW6bHH39ctbW1mjJlik6cOKERI0aooKBAbdq0sZ6zfPlyZWdna9SoUWrVqpXGjRunl156yTofHR2tjRs3yuPxKDk5WZ06ddLs2bN5CzUAAJAkhfn9fr/dQ4SCz+dTdHS0ampq+HJSE3V/Yp3dIzTJwXmZdo8AIAj4/6DL04/987vFvAsJAABcPggYAABgHAIGAAAYh4ABAADGIWAAAIBxCBgAAGAcAgYAABiHgAEAAMYhYAAAgHEIGAAAYBwCBgAAGIeAAQAAxiFgAACAcQgYAABgHAIGAAAYh4ABAADGIWAAAIBxCBgAAGAcAgYAABiHgAEAAMYhYAAAgHEIGAAAYBwCBgAAGIeAAQAAxiFgAACAcQgYAABgHAIGAAAYh4ABAADGIWAAAIBxCBgAAGAcAgYAABiHgAEAAMYhYAAAgHEIGAAAYBwCBgAAGIeAAQAAxiFgAACAcQgYAABgHAIGAAAYh4ABAADGIWAAAIBxCBgAAGAcAgYAABiHgAEAAMYhYAAAgHEIGAAAYBwCBgAAGIeAAQAAxiFgAACAcQgYAABgHAIGAAAYh4ABAADGIWAAAIBxCBgAAGAcAgYAABiHgAEAAMYhYAAAgHEIGAAAYBwCBgAAGIeAAQAAxgl6wMydO1dhYWEBR+/eva3z3377rTwejzp27Kgrr7xS48aNU3V1dcBrVFVVKTMzU+3atVNsbKxmzJihM2fOBHtUAABgqIhQvGi/fv20adOm/32SiP99munTp2vdunV66623FB0drezsbN1+++368MMPJUlnz55VZmamXC6Xtm7dqiNHjmjixIlq3bq1nn766VCMCwAADBOSgImIiJDL5Trv8ZqaGr322mtasWKFbr75ZknS0qVL1adPH23btk3Dhg3Txo0b9fHHH2vTpk2Ki4vTtddeq9/97neaOXOm5s6dK4fDEYqRAQCAQUJyD8xnn32mhIQEXXXVVZowYYKqqqokSWVlZTp9+rRSU1Ottb1791a3bt1UWloqSSotLdWAAQMUFxdnrUlPT5fP51NFRcX3fs66ujr5fL6AAwAAtExBD5iUlBQtW7ZMBQUFWrRokQ4cOKAbbrhBJ0+elNfrlcPhUExMTMBz4uLi5PV6JUlerzcgXs6dP3fu++Tl5Sk6Oto6unbtGtyNAQCAZiPoX0LKyMiwfj1w4EClpKQoMTFRq1evVtu2bYP96Sy5ubnKycmxPvb5fEQMAAAtVMjfRh0TE6NrrrlG+/btk8vlUn19vU6cOBGwprq62rpnxuVynfeupHMfX+i+mnMiIyPldDoDDgAA0DKFPGBOnTql/fv3Kz4+XsnJyWrdurWKioqs85WVlaqqqpLb7ZYkud1u7dmzR0ePHrXWFBYWyul0qm/fvqEeFwAAGCDoX0J67LHHNGbMGCUmJurw4cOaM2eOwsPDdddddyk6OlpZWVnKyclRhw4d5HQ69fDDD8vtdmvYsGGSpLS0NPXt21f33nuv5s+fL6/Xq1mzZsnj8SgyMjLY4wIAAAMFPWC++OIL3XXXXfrqq6/UuXNnjRgxQtu2bVPnzp0lSS+88IJatWqlcePGqa6uTunp6XrllVes54eHhys/P18PPfSQ3G63rrjiCk2aNElPPfVUsEcFAACGCnrArFy58qLn27Rpo4ULF2rhwoXfuyYxMVF/+9vfgj0aAABoIfhZSAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADAOAQMAAIxDwAAAAOMQMAAAwDgEDAAAMA4BAwAAjEPAAAAA4xAwAADAOAQMAAAwDgEDAACMQ8AAAADjEDAAAMA4BAwAADBOhN0DmKj7E+vsHqHJDs7LtHsEAAAuGVdgAACAcQgYAABgHAIGAAAYh4ABAADGIWAAAIBxCBgAAGAcAgYAABiHgAEAAMYhYAAAgHEIGAAAYBwCBgAAGIeAAQAAxiFgAACAcQgYAABgHAIGAAAYh4ABAADGibB7AAAIpu5PrLN7hCY5OC/zR681dY9S4/YJXAxXYAAAgHG4AgNcJkz9Wzt/YwdwIVyBAQAAxuEKDC57XJkAAPNwBQYAABiHgAEAAMYhYAAAgHEIGAAAYJxmHTALFy5U9+7d1aZNG6WkpGjHjh12jwQAAJqBZvsupFWrViknJ0eLFy9WSkqKFixYoPT0dFVWVio2Ntbu8QAACAreCdk0zfYKzPPPP6/Jkyfr/vvvV9++fbV48WK1a9dOr7/+ut2jAQAAmzXLKzD19fUqKytTbm6u9VirVq2Umpqq0tLSCz6nrq5OdXV11sc1NTWSJJ/PF/T5Gur+E/TX/Kk05p+Hqfts7O85+2ze2Of5TN2jdHnsk39ng/O6fr//4gv9zdCXX37pl+TfunVrwOMzZszwDx069ILPmTNnjl8SBwcHBwcHRws4Dh06dNFWaJZXYJoiNzdXOTk51scNDQ06fvy4OnbsqLCwMBsnaxyfz6euXbvq0KFDcjqddo8TEpfDHiX22dKwz5bjctijZO4+/X6/Tp48qYSEhIuua5YB06lTJ4WHh6u6ujrg8erqarlcrgs+JzIyUpGRkQGPxcTEhGrEkHM6nUb9C9cUl8MeJfbZ0rDPluNy2KNk5j6jo6N/cE2zvInX4XAoOTlZRUVF1mMNDQ0qKiqS2+22cTIAANAcNMsrMJKUk5OjSZMmaciQIRo6dKgWLFig2tpa3X///XaPBgAAbNZsA+bOO+/UsWPHNHv2bHm9Xl177bUqKChQXFyc3aOFVGRkpObMmXPel8NaksthjxL7bGnYZ8txOexRavn7DPP7f+h9SgAAAM1Ls7wHBgAA4GIIGAAAYBwCBgAAGIeAAQAAxiFgmpGFCxeqe/fuatOmjVJSUrRjxw67RwqqkpISjRkzRgkJCQoLC9PatWvtHikk8vLydP311ysqKkqxsbEaO3asKisr7R4r6BYtWqSBAwda3yTL7XZr/fr1do8VUvPmzVNYWJimTZtm9yhBNXfuXIWFhQUcvXv3tnuskPjyyy91zz33qGPHjmrbtq0GDBigXbt22T1WUHXv3v2838+wsDB5PB67RwsqAqaZWLVqlXJycjRnzhx99NFHGjRokNLT03X06FG7Rwua2tpaDRo0SAsXLrR7lJAqLi6Wx+PRtm3bVFhYqNOnTystLU21tbV2jxZUXbp00bx581RWVqZdu3bp5ptv1m233aaKigq7RwuJnTt36o9//KMGDhxo9ygh0a9fPx05csQ6PvjgA7tHCrqvv/5aw4cPV+vWrbV+/Xp9/PHHeu6559S+fXu7RwuqnTt3BvxeFhYWSpLuuOMOmycLsuD8+EVcqqFDh/o9Ho/18dmzZ/0JCQn+vLw8G6cKHUn+NWvW2D3GT+Lo0aN+Sf7i4mK7Rwm59u3b+1999VW7xwi6kydP+q+++mp/YWGh/xe/+IX/kUcesXukoJozZ45/0KBBdo8RcjNnzvSPGDHC7jF+co888oi/R48e/oaGBrtHCSquwDQD9fX1KisrU2pqqvVYq1atlJqaqtLSUhsnQzDU1NRIkjp06GDzJKFz9uxZrVy5UrW1tS3yx314PB5lZmYG/Dfa0nz22WdKSEjQVVddpQkTJqiqqsrukYLu3Xff1ZAhQ3THHXcoNjZWgwcP1p/+9Ce7xwqp+vp6vfnmm3rggQeM+sHGPwYB0wz8+9//1tmzZ8/7LsNxcXHyer02TYVgaGho0LRp0zR8+HD179/f7nGCbs+ePbryyisVGRmpqVOnas2aNerbt6/dYwXVypUr9dFHHykvL8/uUUImJSVFy5YtU0FBgRYtWqQDBw7ohhtu0MmTJ+0eLag+//xzLVq0SFdffbU2bNighx56SL/5zW/0xhtv2D1ayKxdu1YnTpzQfffdZ/coQddsf5QA0BJ4PB7t3bu3Rd5PIEm9evVSeXm5ampq9Ne//lWTJk1ScXFxi4mYQ4cO6ZFHHlFhYaHatGlj9zghk5GRYf164MCBSklJUWJiolavXq2srCwbJwuuhoYGDRkyRE8//bQkafDgwdq7d68WL16sSZMm2TxdaLz22mvKyMhQQkKC3aMEHVdgmoFOnTopPDxc1dXVAY9XV1fL5XLZNBUuVXZ2tvLz8/X++++rS5cudo8TEg6HQz179lRycrLy8vI0aNAgvfjii3aPFTRlZWU6evSorrvuOkVERCgiIkLFxcV66aWXFBERobNnz9o9YkjExMTommuu0b59++weJaji4+PPi+s+ffq0yC+XSdK//vUvbdq0SQ8++KDdo4QEAdMMOBwOJScnq6ioyHqsoaFBRUVFLfJ+gpbO7/crOztba9as0ebNm5WUlGT3SD+ZhoYG1dXV2T1G0IwaNUp79uxReXm5dQwZMkQTJkxQeXm5wsPD7R4xJE6dOqX9+/crPj7e7lGCavjw4ed9S4N//vOfSkxMtGmi0Fq6dKliY2OVmZlp9yghwZeQmomcnBxNmjRJQ4YM0dChQ7VgwQLV1tbq/vvvt3u0oDl16lTA3+gOHDig8vJydejQQd26dbNxsuDyeDxasWKF3nnnHUVFRVn3MUVHR6tt27Y2Txc8ubm5ysjIULdu3XTy5EmtWLFCW7Zs0YYNG+weLWiioqLOu3fpiiuuUMeOHVvUPU2PPfaYxowZo8TERB0+fFhz5sxReHi47rrrLrtHC6rp06fr5z//uZ5++mn9+te/1o4dO7RkyRItWbLE7tGCrqGhQUuXLtWkSZMUEdFC/6i3+21Q+J+XX37Z361bN7/D4fAPHTrUv23bNrtHCqr333/fL+m8Y9KkSXaPFlQX2qMk/9KlS+0eLageeOABf2Jiot/hcPg7d+7sHzVqlH/jxo12jxVyLfFt1Hfeeac/Pj7e73A4/D/72c/8d955p3/fvn12jxUS7733nr9///7+yMhIf+/evf1Lliyxe6SQ2LBhg1+Sv7Ky0u5RQibM7/f77UknAACApuEeGAAAYBwCBgAAGIeAAQAAxiFgAACAcQgYAABgHAIGAAAYh4ABAADGIWAAAIBxCBgAAGAcAgYAABiHgAEAAMYhYAAAgHH+D88Vxb9xaxoRAAAAAElFTkSuQmCC"
137 | },
138 | "metadata": {},
139 | "output_type": "display_data"
140 | }
141 | ],
142 | "source": [
143 | "count=[len(df[df['cls']==i]) for i in range(8)]\n",
144 | "print(count)\n",
145 | "plt.bar(range(8),count)"
146 | ],
147 | "metadata": {
148 | "collapsed": false
149 | }
150 | },
151 | {
152 | "cell_type": "code",
153 | "execution_count": 8,
154 | "outputs": [
155 | {
156 | "data": {
157 | "text/plain": " filename x_center y_center w h\n37 122-out_ori.txt 0.436458 0.444444 0.026042 0.055556\n38 122-out_ori.txt 0.461719 0.380093 0.027604 0.047222\n39 122-out_ori.txt 0.545833 0.375000 0.011458 0.027778\n40 122-out_ori.txt 0.521354 0.418519 0.310417 0.027778\n48 128-out_ori.txt 0.499219 0.486111 0.061979 0.133333\n... ... ... ... ... ...\n4476 64-out_ori.txt 0.279427 0.433333 0.259896 0.025926\n4479 65-out_ori.txt 0.232552 0.444444 0.259896 0.031481\n4482 65-out_ori.txt 0.655729 0.466204 0.078125 0.141667\n4487 66-out_ori.txt 0.196615 0.473611 0.290104 0.025000\n4496 79-out_ori.txt 0.173698 0.471759 0.257812 0.037963\n\n[79 rows x 5 columns]",
158 | "text/html": "\n\n
\n \n \n | \n filename | \n x_center | \n y_center | \n w | \n h | \n
\n \n \n \n | 37 | \n 122-out_ori.txt | \n 0.436458 | \n 0.444444 | \n 0.026042 | \n 0.055556 | \n
\n \n | 38 | \n 122-out_ori.txt | \n 0.461719 | \n 0.380093 | \n 0.027604 | \n 0.047222 | \n
\n \n | 39 | \n 122-out_ori.txt | \n 0.545833 | \n 0.375000 | \n 0.011458 | \n 0.027778 | \n
\n \n | 40 | \n 122-out_ori.txt | \n 0.521354 | \n 0.418519 | \n 0.310417 | \n 0.027778 | \n
\n \n | 48 | \n 128-out_ori.txt | \n 0.499219 | \n 0.486111 | \n 0.061979 | \n 0.133333 | \n
\n \n | ... | \n ... | \n ... | \n ... | \n ... | \n ... | \n
\n \n | 4476 | \n 64-out_ori.txt | \n 0.279427 | \n 0.433333 | \n 0.259896 | \n 0.025926 | \n
\n \n | 4479 | \n 65-out_ori.txt | \n 0.232552 | \n 0.444444 | \n 0.259896 | \n 0.031481 | \n
\n \n | 4482 | \n 65-out_ori.txt | \n 0.655729 | \n 0.466204 | \n 0.078125 | \n 0.141667 | \n
\n \n | 4487 | \n 66-out_ori.txt | \n 0.196615 | \n 0.473611 | \n 0.290104 | \n 0.025000 | \n
\n \n | 4496 | \n 79-out_ori.txt | \n 0.173698 | \n 0.471759 | \n 0.257812 | \n 0.037963 | \n
\n \n
\n
79 rows × 5 columns
\n
"
159 | },
160 | "execution_count": 8,
161 | "metadata": {},
162 | "output_type": "execute_result"
163 | }
164 | ],
165 | "source": [
166 | "df[df['y_center']<0.5]"
167 | ],
168 | "metadata": {
169 | "collapsed": false
170 | }
171 | },
172 | {
173 | "cell_type": "code",
174 | "execution_count": 33,
175 | "outputs": [
176 | {
177 | "data": {
178 | "text/plain": " 1 2 3 4 5 6 7 \\\n1 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n2 0.65708 1.0 1.0 1.0 1.0 1.0 1.0 \n3 0.480953 0.876106 1.0 0.914454 0.928568 1.0 1.0 \n4 0.374905 0.72143 0.985619 1.0 0.857301 0.835711 0.875668 \n5 0.306065 0.599847 0.865716 1.0 1.0 0.823009 0.779997 \n6 0.258227 0.510109 0.749809 0.961906 1.0 1.0 0.800148 \n7 0.22318 0.442674 0.655854 0.856925 1.0 1.0 1.0 \n8 0.196445 0.390565 0.58101 0.765164 0.937261 1.0 1.0 \n9 0.175397 0.349235 0.520753 0.688605 0.850182 0.999746 1.0 \n10 0.158406 0.315715 0.471468 0.624904 0.77468 0.918196 1.0 \n11 0.144405 0.28801 0.430521 0.571476 0.710118 0.845106 0.973844 \n12 0.132671 0.264742 0.396014 0.526192 0.654817 0.78113 0.903794 \n13 0.122697 0.244931 0.366566 0.487402 0.607145 0.725335 0.841216 \n14 0.114115 0.227866 0.341155 0.453843 0.565734 0.676533 0.78578 \n\n 8 9 10 11 12 13 14 \n1 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n2 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n3 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n4 0.939346 1.0 1.0 1.0 1.0 1.0 1.0 \n5 0.800611 0.845411 0.901279 0.96303 1.0 1.0 1.0 \n6 0.742854 0.750573 0.782788 0.826173 0.875482 0.928287 0.983321 \n7 0.783818 0.716324 0.714831 0.738057 0.772525 0.812947 0.856881 \n8 1.0 0.771571 0.696426 0.688025 0.704509 0.73229 0.766047 \n9 1.0 1.0 0.762045 0.68095 0.667176 0.678416 0.700995 \n10 1.0 1.0 1.0 0.754425 0.668569 0.650496 0.657542 \n11 1.0 1.0 1.0 1.0 0.74819 0.658439 0.636849 \n12 1.0 1.0 1.0 1.0 1.0 0.742994 0.649997 \n13 0.953452 1.0 1.0 1.0 1.0 1.0 0.738598 \n14 0.892719 0.996017 1.0 1.0 1.0 1.0 1.0 ",
179 | "text/html": "\n\n
\n \n \n | \n 1 | \n 2 | \n 3 | \n 4 | \n 5 | \n 6 | \n 7 | \n 8 | \n 9 | \n 10 | \n 11 | \n 12 | \n 13 | \n 14 | \n
\n \n \n \n | 1 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 2 | \n 0.65708 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 3 | \n 0.480953 | \n 0.876106 | \n 1.0 | \n 0.914454 | \n 0.928568 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 4 | \n 0.374905 | \n 0.72143 | \n 0.985619 | \n 1.0 | \n 0.857301 | \n 0.835711 | \n 0.875668 | \n 0.939346 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 5 | \n 0.306065 | \n 0.599847 | \n 0.865716 | \n 1.0 | \n 1.0 | \n 0.823009 | \n 0.779997 | \n 0.800611 | \n 0.845411 | \n 0.901279 | \n 0.96303 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 6 | \n 0.258227 | \n 0.510109 | \n 0.749809 | \n 0.961906 | \n 1.0 | \n 1.0 | \n 0.800148 | \n 0.742854 | \n 0.750573 | \n 0.782788 | \n 0.826173 | \n 0.875482 | \n 0.928287 | \n 0.983321 | \n
\n \n | 7 | \n 0.22318 | \n 0.442674 | \n 0.655854 | \n 0.856925 | \n 1.0 | \n 1.0 | \n 1.0 | \n 0.783818 | \n 0.716324 | \n 0.714831 | \n 0.738057 | \n 0.772525 | \n 0.812947 | \n 0.856881 | \n
\n \n | 8 | \n 0.196445 | \n 0.390565 | \n 0.58101 | \n 0.765164 | \n 0.937261 | \n 1.0 | \n 1.0 | \n 1.0 | \n 0.771571 | \n 0.696426 | \n 0.688025 | \n 0.704509 | \n 0.73229 | \n 0.766047 | \n
\n \n | 9 | \n 0.175397 | \n 0.349235 | \n 0.520753 | \n 0.688605 | \n 0.850182 | \n 0.999746 | \n 1.0 | \n 1.0 | \n 1.0 | \n 0.762045 | \n 0.68095 | \n 0.667176 | \n 0.678416 | \n 0.700995 | \n
\n \n | 10 | \n 0.158406 | \n 0.315715 | \n 0.471468 | \n 0.624904 | \n 0.77468 | \n 0.918196 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 0.754425 | \n 0.668569 | \n 0.650496 | \n 0.657542 | \n
\n \n | 11 | \n 0.144405 | \n 0.28801 | \n 0.430521 | \n 0.571476 | \n 0.710118 | \n 0.845106 | \n 0.973844 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 0.74819 | \n 0.658439 | \n 0.636849 | \n
\n \n | 12 | \n 0.132671 | \n 0.264742 | \n 0.396014 | \n 0.526192 | \n 0.654817 | \n 0.78113 | \n 0.903794 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 0.742994 | \n 0.649997 | \n
\n \n | 13 | \n 0.122697 | \n 0.244931 | \n 0.366566 | \n 0.487402 | \n 0.607145 | \n 0.725335 | \n 0.841216 | \n 0.953452 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 0.738598 | \n
\n \n | 14 | \n 0.114115 | \n 0.227866 | \n 0.341155 | \n 0.453843 | \n 0.565734 | \n 0.676533 | \n 0.78578 | \n 0.892719 | \n 0.996017 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n
\n
"
180 | },
181 | "execution_count": 33,
182 | "metadata": {},
183 | "output_type": "execute_result"
184 | }
185 | ],
186 | "source": [
187 | "import numpy as np\n",
188 | "import pandas as pd\n",
189 | "df1=pd.DataFrame(index=[i for i in range(1,15)], columns=[i for i in range(1,15)])\n",
190 | "for i in df1.index:\n",
191 | " for j in df1.columns:\n",
192 | " ccc=j * (2/5*np.arctan(i-j)+1)/i\n",
193 | " df1.loc[i, j] = ccc if ccc<1 else 1.0\n",
194 | " # df1.loc[i, j] = min(i, j)/i\n",
195 | "df1"
196 | ],
197 | "metadata": {
198 | "collapsed": false
199 | }
200 | },
201 | {
202 | "cell_type": "code",
203 | "execution_count": 34,
204 | "outputs": [
205 | {
206 | "data": {
207 | "text/plain": " 1 2 3 4 5 6 7 \\\n1 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n2 0.65708 1.0 1.0 1.0 1.0 1.0 1.0 \n3 0.480953 0.876106 1.0 1.0 1.0 1.0 1.0 \n4 0.374905 0.72143 0.985619 1.0 1.0 1.0 1.0 \n5 0.306065 0.599847 0.865716 1.051327 1.0 1.0 1.0 \n6 0.258227 0.510109 0.749809 0.961906 1.095133 1.0 1.0 \n7 0.22318 0.442674 0.655854 0.856925 1.030614 1.126422 1.0 \n8 0.196445 0.390565 0.58101 0.765164 0.937261 1.082145 1.149889 \n9 0.175397 0.349235 0.520753 0.688605 0.850182 0.999746 1.122224 \n10 0.158406 0.315715 0.471468 0.624904 0.77468 0.918196 1.049733 \n11 0.144405 0.28801 0.430521 0.571476 0.710118 0.845106 0.973844 \n12 0.132671 0.264742 0.396014 0.526192 0.654817 0.78113 0.903794 \n13 0.122697 0.244931 0.366566 0.487402 0.607145 0.725335 0.841216 \n14 0.114115 0.227866 0.341155 0.453843 0.565734 0.676533 0.78578 \n\n 8 9 10 11 12 13 14 \n1 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n2 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n3 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n4 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n5 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n6 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n7 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n8 1.0 1.0 1.0 1.0 1.0 1.0 1.0 \n9 1.168142 1.0 1.0 1.0 1.0 1.0 1.0 \n10 1.154288 1.182743 1.0 1.0 1.0 1.0 1.0 \n11 1.090631 1.180521 1.19469 1.0 1.0 1.0 1.0 \n12 1.020218 1.124714 1.202383 1.204646 1.0 1.0 1.0 \n13 0.953452 1.059457 1.153553 1.220881 1.21307 1.0 1.0 \n14 0.892719 0.996017 1.093091 1.178272 1.236737 1.220291 1.0 ",
208 | "text/html": "\n\n
\n \n \n | \n 1 | \n 2 | \n 3 | \n 4 | \n 5 | \n 6 | \n 7 | \n 8 | \n 9 | \n 10 | \n 11 | \n 12 | \n 13 | \n 14 | \n
\n \n \n \n | 1 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 2 | \n 0.65708 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 3 | \n 0.480953 | \n 0.876106 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 4 | \n 0.374905 | \n 0.72143 | \n 0.985619 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 5 | \n 0.306065 | \n 0.599847 | \n 0.865716 | \n 1.051327 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 6 | \n 0.258227 | \n 0.510109 | \n 0.749809 | \n 0.961906 | \n 1.095133 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 7 | \n 0.22318 | \n 0.442674 | \n 0.655854 | \n 0.856925 | \n 1.030614 | \n 1.126422 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 8 | \n 0.196445 | \n 0.390565 | \n 0.58101 | \n 0.765164 | \n 0.937261 | \n 1.082145 | \n 1.149889 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 9 | \n 0.175397 | \n 0.349235 | \n 0.520753 | \n 0.688605 | \n 0.850182 | \n 0.999746 | \n 1.122224 | \n 1.168142 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 10 | \n 0.158406 | \n 0.315715 | \n 0.471468 | \n 0.624904 | \n 0.77468 | \n 0.918196 | \n 1.049733 | \n 1.154288 | \n 1.182743 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 11 | \n 0.144405 | \n 0.28801 | \n 0.430521 | \n 0.571476 | \n 0.710118 | \n 0.845106 | \n 0.973844 | \n 1.090631 | \n 1.180521 | \n 1.19469 | \n 1.0 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 12 | \n 0.132671 | \n 0.264742 | \n 0.396014 | \n 0.526192 | \n 0.654817 | \n 0.78113 | \n 0.903794 | \n 1.020218 | \n 1.124714 | \n 1.202383 | \n 1.204646 | \n 1.0 | \n 1.0 | \n 1.0 | \n
\n \n | 13 | \n 0.122697 | \n 0.244931 | \n 0.366566 | \n 0.487402 | \n 0.607145 | \n 0.725335 | \n 0.841216 | \n 0.953452 | \n 1.059457 | \n 1.153553 | \n 1.220881 | \n 1.21307 | \n 1.0 | \n 1.0 | \n
\n \n | 14 | \n 0.114115 | \n 0.227866 | \n 0.341155 | \n 0.453843 | \n 0.565734 | \n 0.676533 | \n 0.78578 | \n 0.892719 | \n 0.996017 | \n 1.093091 | \n 1.178272 | \n 1.236737 | \n 1.220291 | \n 1.0 | \n
\n \n
\n
"
209 | },
210 | "execution_count": 34,
211 | "metadata": {},
212 | "output_type": "execute_result"
213 | }
214 | ],
215 | "source": [
216 | "import numpy as np\n",
217 | "import pandas as pd\n",
218 | "df1=pd.DataFrame(index=[i for i in range(1,15)], columns=[i for i in range(1,15)])\n",
219 | "for i in df1.index:\n",
220 | " for j in df1.columns:\n",
221 | " df1.loc[i, j] = min(i, j) * (2/5*np.arctan(i-min(i, j))+1)/i\n",
222 | " # df1.loc[i, j] = min(i, j)/i*2/(1+np.exp(min(i,j)-i))\n",
223 | "df1"
224 | ],
225 | "metadata": {
226 | "collapsed": false
227 | }
228 | },
229 | {
230 | "cell_type": "code",
231 | "execution_count": 16,
232 | "outputs": [
233 | {
234 | "data": {
235 | "text/plain": "",
236 | "application/javascript": "/* Put everything inside the global mpl namespace */\n/* global mpl */\nwindow.mpl = {};\n\nmpl.get_websocket_type = function () {\n if (typeof WebSocket !== 'undefined') {\n return WebSocket;\n } else if (typeof MozWebSocket !== 'undefined') {\n return MozWebSocket;\n } else {\n alert(\n 'Your browser does not have WebSocket support. ' +\n 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n 'Firefox 4 and 5 are also supported but you ' +\n 'have to enable WebSockets in about:config.'\n );\n }\n};\n\nmpl.figure = function (figure_id, websocket, ondownload, parent_element) {\n this.id = figure_id;\n\n this.ws = websocket;\n\n this.supports_binary = this.ws.binaryType !== undefined;\n\n if (!this.supports_binary) {\n var warnings = document.getElementById('mpl-warnings');\n if (warnings) {\n warnings.style.display = 'block';\n warnings.textContent =\n 'This browser does not support binary websocket messages. ' +\n 'Performance may be slow.';\n }\n }\n\n this.imageObj = new Image();\n\n this.context = undefined;\n this.message = undefined;\n this.canvas = undefined;\n this.rubberband_canvas = undefined;\n this.rubberband_context = undefined;\n this.format_dropdown = undefined;\n\n this.image_mode = 'full';\n\n this.root = document.createElement('div');\n this.root.setAttribute('style', 'display: inline-block');\n this._root_extra_style(this.root);\n\n parent_element.appendChild(this.root);\n\n this._init_header(this);\n this._init_canvas(this);\n this._init_toolbar(this);\n\n var fig = this;\n\n this.waiting = false;\n\n this.ws.onopen = function () {\n fig.send_message('supports_binary', { value: fig.supports_binary });\n fig.send_message('send_image_mode', {});\n if (fig.ratio !== 1) {\n fig.send_message('set_device_pixel_ratio', {\n device_pixel_ratio: fig.ratio,\n });\n }\n fig.send_message('refresh', {});\n };\n\n this.imageObj.onload = function () {\n if (fig.image_mode === 'full') {\n // Full images could contain transparency (where diff images\n // almost always do), so we need to clear the canvas so that\n // there is no ghosting.\n fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n }\n fig.context.drawImage(fig.imageObj, 0, 0);\n };\n\n this.imageObj.onunload = function () {\n fig.ws.close();\n };\n\n this.ws.onmessage = this._make_on_message_function(this);\n\n this.ondownload = ondownload;\n};\n\nmpl.figure.prototype._init_header = function () {\n var titlebar = document.createElement('div');\n titlebar.classList =\n 'ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix';\n var titletext = document.createElement('div');\n titletext.classList = 'ui-dialog-title';\n titletext.setAttribute(\n 'style',\n 'width: 100%; text-align: center; padding: 3px;'\n );\n titlebar.appendChild(titletext);\n this.root.appendChild(titlebar);\n this.header = titletext;\n};\n\nmpl.figure.prototype._canvas_extra_style = function (_canvas_div) {};\n\nmpl.figure.prototype._root_extra_style = function (_canvas_div) {};\n\nmpl.figure.prototype._init_canvas = function () {\n var fig = this;\n\n var canvas_div = (this.canvas_div = document.createElement('div'));\n canvas_div.setAttribute('tabindex', '0');\n canvas_div.setAttribute(\n 'style',\n 'border: 1px solid #ddd;' +\n 'box-sizing: content-box;' +\n 'clear: both;' +\n 'min-height: 1px;' +\n 'min-width: 1px;' +\n 'outline: 0;' +\n 'overflow: hidden;' +\n 'position: relative;' +\n 'resize: both;' +\n 'z-index: 2;'\n );\n\n function on_keyboard_event_closure(name) {\n return function (event) {\n return fig.key_event(event, name);\n };\n }\n\n canvas_div.addEventListener(\n 'keydown',\n on_keyboard_event_closure('key_press')\n );\n canvas_div.addEventListener(\n 'keyup',\n on_keyboard_event_closure('key_release')\n );\n\n this._canvas_extra_style(canvas_div);\n this.root.appendChild(canvas_div);\n\n var canvas = (this.canvas = document.createElement('canvas'));\n canvas.classList.add('mpl-canvas');\n canvas.setAttribute(\n 'style',\n 'box-sizing: content-box;' +\n 'pointer-events: none;' +\n 'position: relative;' +\n 'z-index: 0;'\n );\n\n this.context = canvas.getContext('2d');\n\n var backingStore =\n this.context.backingStorePixelRatio ||\n this.context.webkitBackingStorePixelRatio ||\n this.context.mozBackingStorePixelRatio ||\n this.context.msBackingStorePixelRatio ||\n this.context.oBackingStorePixelRatio ||\n this.context.backingStorePixelRatio ||\n 1;\n\n this.ratio = (window.devicePixelRatio || 1) / backingStore;\n\n var rubberband_canvas = (this.rubberband_canvas = document.createElement(\n 'canvas'\n ));\n rubberband_canvas.setAttribute(\n 'style',\n 'box-sizing: content-box;' +\n 'left: 0;' +\n 'pointer-events: none;' +\n 'position: absolute;' +\n 'top: 0;' +\n 'z-index: 1;'\n );\n\n // Apply a ponyfill if ResizeObserver is not implemented by browser.\n if (this.ResizeObserver === undefined) {\n if (window.ResizeObserver !== undefined) {\n this.ResizeObserver = window.ResizeObserver;\n } else {\n var obs = _JSXTOOLS_RESIZE_OBSERVER({});\n this.ResizeObserver = obs.ResizeObserver;\n }\n }\n\n this.resizeObserverInstance = new this.ResizeObserver(function (entries) {\n var nentries = entries.length;\n for (var i = 0; i < nentries; i++) {\n var entry = entries[i];\n var width, height;\n if (entry.contentBoxSize) {\n if (entry.contentBoxSize instanceof Array) {\n // Chrome 84 implements new version of spec.\n width = entry.contentBoxSize[0].inlineSize;\n height = entry.contentBoxSize[0].blockSize;\n } else {\n // Firefox implements old version of spec.\n width = entry.contentBoxSize.inlineSize;\n height = entry.contentBoxSize.blockSize;\n }\n } else {\n // Chrome <84 implements even older version of spec.\n width = entry.contentRect.width;\n height = entry.contentRect.height;\n }\n\n // Keep the size of the canvas and rubber band canvas in sync with\n // the canvas container.\n if (entry.devicePixelContentBoxSize) {\n // Chrome 84 implements new version of spec.\n canvas.setAttribute(\n 'width',\n entry.devicePixelContentBoxSize[0].inlineSize\n );\n canvas.setAttribute(\n 'height',\n entry.devicePixelContentBoxSize[0].blockSize\n );\n } else {\n canvas.setAttribute('width', width * fig.ratio);\n canvas.setAttribute('height', height * fig.ratio);\n }\n /* This rescales the canvas back to display pixels, so that it\n * appears correct on HiDPI screens. */\n canvas.style.width = width + 'px';\n canvas.style.height = height + 'px';\n\n rubberband_canvas.setAttribute('width', width);\n rubberband_canvas.setAttribute('height', height);\n\n // And update the size in Python. We ignore the initial 0/0 size\n // that occurs as the element is placed into the DOM, which should\n // otherwise not happen due to the minimum size styling.\n if (fig.ws.readyState == 1 && width != 0 && height != 0) {\n fig.request_resize(width, height);\n }\n }\n });\n this.resizeObserverInstance.observe(canvas_div);\n\n function on_mouse_event_closure(name) {\n /* User Agent sniffing is bad, but WebKit is busted:\n * https://bugs.webkit.org/show_bug.cgi?id=144526\n * https://bugs.webkit.org/show_bug.cgi?id=181818\n * The worst that happens here is that they get an extra browser\n * selection when dragging, if this check fails to catch them.\n */\n var UA = navigator.userAgent;\n var isWebKit = /AppleWebKit/.test(UA) && !/Chrome/.test(UA);\n if(isWebKit) {\n return function (event) {\n /* This prevents the web browser from automatically changing to\n * the text insertion cursor when the button is pressed. We\n * want to control all of the cursor setting manually through\n * the 'cursor' event from matplotlib */\n event.preventDefault()\n return fig.mouse_event(event, name);\n };\n } else {\n return function (event) {\n return fig.mouse_event(event, name);\n };\n }\n }\n\n canvas_div.addEventListener(\n 'mousedown',\n on_mouse_event_closure('button_press')\n );\n canvas_div.addEventListener(\n 'mouseup',\n on_mouse_event_closure('button_release')\n );\n canvas_div.addEventListener(\n 'dblclick',\n on_mouse_event_closure('dblclick')\n );\n // Throttle sequential mouse events to 1 every 20ms.\n canvas_div.addEventListener(\n 'mousemove',\n on_mouse_event_closure('motion_notify')\n );\n\n canvas_div.addEventListener(\n 'mouseenter',\n on_mouse_event_closure('figure_enter')\n );\n canvas_div.addEventListener(\n 'mouseleave',\n on_mouse_event_closure('figure_leave')\n );\n\n canvas_div.addEventListener('wheel', function (event) {\n if (event.deltaY < 0) {\n event.step = 1;\n } else {\n event.step = -1;\n }\n on_mouse_event_closure('scroll')(event);\n });\n\n canvas_div.appendChild(canvas);\n canvas_div.appendChild(rubberband_canvas);\n\n this.rubberband_context = rubberband_canvas.getContext('2d');\n this.rubberband_context.strokeStyle = '#000000';\n\n this._resize_canvas = function (width, height, forward) {\n if (forward) {\n canvas_div.style.width = width + 'px';\n canvas_div.style.height = height + 'px';\n }\n };\n\n // Disable right mouse context menu.\n canvas_div.addEventListener('contextmenu', function (_e) {\n event.preventDefault();\n return false;\n });\n\n function set_focus() {\n canvas.focus();\n canvas_div.focus();\n }\n\n window.setTimeout(set_focus, 100);\n};\n\nmpl.figure.prototype._init_toolbar = function () {\n var fig = this;\n\n var toolbar = document.createElement('div');\n toolbar.classList = 'mpl-toolbar';\n this.root.appendChild(toolbar);\n\n function on_click_closure(name) {\n return function (_event) {\n return fig.toolbar_button_onclick(name);\n };\n }\n\n function on_mouseover_closure(tooltip) {\n return function (event) {\n if (!event.currentTarget.disabled) {\n return fig.toolbar_button_onmouseover(tooltip);\n }\n };\n }\n\n fig.buttons = {};\n var buttonGroup = document.createElement('div');\n buttonGroup.classList = 'mpl-button-group';\n for (var toolbar_ind in mpl.toolbar_items) {\n var name = mpl.toolbar_items[toolbar_ind][0];\n var tooltip = mpl.toolbar_items[toolbar_ind][1];\n var image = mpl.toolbar_items[toolbar_ind][2];\n var method_name = mpl.toolbar_items[toolbar_ind][3];\n\n if (!name) {\n /* Instead of a spacer, we start a new button group. */\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n buttonGroup = document.createElement('div');\n buttonGroup.classList = 'mpl-button-group';\n continue;\n }\n\n var button = (fig.buttons[name] = document.createElement('button'));\n button.classList = 'mpl-widget';\n button.setAttribute('role', 'button');\n button.setAttribute('aria-disabled', 'false');\n button.addEventListener('click', on_click_closure(method_name));\n button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n\n var icon_img = document.createElement('img');\n icon_img.src = '_images/' + image + '.png';\n icon_img.srcset = '_images/' + image + '_large.png 2x';\n icon_img.alt = tooltip;\n button.appendChild(icon_img);\n\n buttonGroup.appendChild(button);\n }\n\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n\n var fmt_picker = document.createElement('select');\n fmt_picker.classList = 'mpl-widget';\n toolbar.appendChild(fmt_picker);\n this.format_dropdown = fmt_picker;\n\n for (var ind in mpl.extensions) {\n var fmt = mpl.extensions[ind];\n var option = document.createElement('option');\n option.selected = fmt === mpl.default_extension;\n option.innerHTML = fmt;\n fmt_picker.appendChild(option);\n }\n\n var status_bar = document.createElement('span');\n status_bar.classList = 'mpl-message';\n toolbar.appendChild(status_bar);\n this.message = status_bar;\n};\n\nmpl.figure.prototype.request_resize = function (x_pixels, y_pixels) {\n // Request matplotlib to resize the figure. Matplotlib will then trigger a resize in the client,\n // which will in turn request a refresh of the image.\n this.send_message('resize', { width: x_pixels, height: y_pixels });\n};\n\nmpl.figure.prototype.send_message = function (type, properties) {\n properties['type'] = type;\n properties['figure_id'] = this.id;\n this.ws.send(JSON.stringify(properties));\n};\n\nmpl.figure.prototype.send_draw_message = function () {\n if (!this.waiting) {\n this.waiting = true;\n this.ws.send(JSON.stringify({ type: 'draw', figure_id: this.id }));\n }\n};\n\nmpl.figure.prototype.handle_save = function (fig, _msg) {\n var format_dropdown = fig.format_dropdown;\n var format = format_dropdown.options[format_dropdown.selectedIndex].value;\n fig.ondownload(fig, format);\n};\n\nmpl.figure.prototype.handle_resize = function (fig, msg) {\n var size = msg['size'];\n if (size[0] !== fig.canvas.width || size[1] !== fig.canvas.height) {\n fig._resize_canvas(size[0], size[1], msg['forward']);\n fig.send_message('refresh', {});\n }\n};\n\nmpl.figure.prototype.handle_rubberband = function (fig, msg) {\n var x0 = msg['x0'] / fig.ratio;\n var y0 = (fig.canvas.height - msg['y0']) / fig.ratio;\n var x1 = msg['x1'] / fig.ratio;\n var y1 = (fig.canvas.height - msg['y1']) / fig.ratio;\n x0 = Math.floor(x0) + 0.5;\n y0 = Math.floor(y0) + 0.5;\n x1 = Math.floor(x1) + 0.5;\n y1 = Math.floor(y1) + 0.5;\n var min_x = Math.min(x0, x1);\n var min_y = Math.min(y0, y1);\n var width = Math.abs(x1 - x0);\n var height = Math.abs(y1 - y0);\n\n fig.rubberband_context.clearRect(\n 0,\n 0,\n fig.canvas.width / fig.ratio,\n fig.canvas.height / fig.ratio\n );\n\n fig.rubberband_context.strokeRect(min_x, min_y, width, height);\n};\n\nmpl.figure.prototype.handle_figure_label = function (fig, msg) {\n // Updates the figure title.\n fig.header.textContent = msg['label'];\n};\n\nmpl.figure.prototype.handle_cursor = function (fig, msg) {\n fig.canvas_div.style.cursor = msg['cursor'];\n};\n\nmpl.figure.prototype.handle_message = function (fig, msg) {\n fig.message.textContent = msg['message'];\n};\n\nmpl.figure.prototype.handle_draw = function (fig, _msg) {\n // Request the server to send over a new figure.\n fig.send_draw_message();\n};\n\nmpl.figure.prototype.handle_image_mode = function (fig, msg) {\n fig.image_mode = msg['mode'];\n};\n\nmpl.figure.prototype.handle_history_buttons = function (fig, msg) {\n for (var key in msg) {\n if (!(key in fig.buttons)) {\n continue;\n }\n fig.buttons[key].disabled = !msg[key];\n fig.buttons[key].setAttribute('aria-disabled', !msg[key]);\n }\n};\n\nmpl.figure.prototype.handle_navigate_mode = function (fig, msg) {\n if (msg['mode'] === 'PAN') {\n fig.buttons['Pan'].classList.add('active');\n fig.buttons['Zoom'].classList.remove('active');\n } else if (msg['mode'] === 'ZOOM') {\n fig.buttons['Pan'].classList.remove('active');\n fig.buttons['Zoom'].classList.add('active');\n } else {\n fig.buttons['Pan'].classList.remove('active');\n fig.buttons['Zoom'].classList.remove('active');\n }\n};\n\nmpl.figure.prototype.updated_canvas_event = function () {\n // Called whenever the canvas gets updated.\n this.send_message('ack', {});\n};\n\n// A function to construct a web socket function for onmessage handling.\n// Called in the figure constructor.\nmpl.figure.prototype._make_on_message_function = function (fig) {\n return function socket_on_message(evt) {\n if (evt.data instanceof Blob) {\n var img = evt.data;\n if (img.type !== 'image/png') {\n /* FIXME: We get \"Resource interpreted as Image but\n * transferred with MIME type text/plain:\" errors on\n * Chrome. But how to set the MIME type? It doesn't seem\n * to be part of the websocket stream */\n img.type = 'image/png';\n }\n\n /* Free the memory for the previous frames */\n if (fig.imageObj.src) {\n (window.URL || window.webkitURL).revokeObjectURL(\n fig.imageObj.src\n );\n }\n\n fig.imageObj.src = (window.URL || window.webkitURL).createObjectURL(\n img\n );\n fig.updated_canvas_event();\n fig.waiting = false;\n return;\n } else if (\n typeof evt.data === 'string' &&\n evt.data.slice(0, 21) === 'data:image/png;base64'\n ) {\n fig.imageObj.src = evt.data;\n fig.updated_canvas_event();\n fig.waiting = false;\n return;\n }\n\n var msg = JSON.parse(evt.data);\n var msg_type = msg['type'];\n\n // Call the \"handle_{type}\" callback, which takes\n // the figure and JSON message as its only arguments.\n try {\n var callback = fig['handle_' + msg_type];\n } catch (e) {\n console.log(\n \"No handler for the '\" + msg_type + \"' message type: \",\n msg\n );\n return;\n }\n\n if (callback) {\n try {\n // console.log(\"Handling '\" + msg_type + \"' message: \", msg);\n callback(fig, msg);\n } catch (e) {\n console.log(\n \"Exception inside the 'handler_\" + msg_type + \"' callback:\",\n e,\n e.stack,\n msg\n );\n }\n }\n };\n};\n\nfunction getModifiers(event) {\n var mods = [];\n if (event.ctrlKey) {\n mods.push('ctrl');\n }\n if (event.altKey) {\n mods.push('alt');\n }\n if (event.shiftKey) {\n mods.push('shift');\n }\n if (event.metaKey) {\n mods.push('meta');\n }\n return mods;\n}\n\n/*\n * return a copy of an object with only non-object keys\n * we need this to avoid circular references\n * https://stackoverflow.com/a/24161582/3208463\n */\nfunction simpleKeys(original) {\n return Object.keys(original).reduce(function (obj, key) {\n if (typeof original[key] !== 'object') {\n obj[key] = original[key];\n }\n return obj;\n }, {});\n}\n\nmpl.figure.prototype.mouse_event = function (event, name) {\n if (name === 'button_press') {\n this.canvas.focus();\n this.canvas_div.focus();\n }\n\n // from https://stackoverflow.com/q/1114465\n var boundingRect = this.canvas.getBoundingClientRect();\n var x = (event.clientX - boundingRect.left) * this.ratio;\n var y = (event.clientY - boundingRect.top) * this.ratio;\n\n this.send_message(name, {\n x: x,\n y: y,\n button: event.button,\n step: event.step,\n modifiers: getModifiers(event),\n guiEvent: simpleKeys(event),\n });\n\n return false;\n};\n\nmpl.figure.prototype._key_event_extra = function (_event, _name) {\n // Handle any extra behaviour associated with a key event\n};\n\nmpl.figure.prototype.key_event = function (event, name) {\n // Prevent repeat events\n if (name === 'key_press') {\n if (event.key === this._key) {\n return;\n } else {\n this._key = event.key;\n }\n }\n if (name === 'key_release') {\n this._key = null;\n }\n\n var value = '';\n if (event.ctrlKey && event.key !== 'Control') {\n value += 'ctrl+';\n }\n else if (event.altKey && event.key !== 'Alt') {\n value += 'alt+';\n }\n else if (event.shiftKey && event.key !== 'Shift') {\n value += 'shift+';\n }\n\n value += 'k' + event.key;\n\n this._key_event_extra(event, name);\n\n this.send_message(name, { key: value, guiEvent: simpleKeys(event) });\n return false;\n};\n\nmpl.figure.prototype.toolbar_button_onclick = function (name) {\n if (name === 'download') {\n this.handle_save(this, null);\n } else {\n this.send_message('toolbar_button', { name: name });\n }\n};\n\nmpl.figure.prototype.toolbar_button_onmouseover = function (tooltip) {\n this.message.textContent = tooltip;\n};\n\n///////////////// REMAINING CONTENT GENERATED BY embed_js.py /////////////////\n// prettier-ignore\nvar _JSXTOOLS_RESIZE_OBSERVER=function(A){var t,i=new WeakMap,n=new WeakMap,a=new WeakMap,r=new WeakMap,o=new Set;function s(e){if(!(this instanceof s))throw new TypeError(\"Constructor requires 'new' operator\");i.set(this,e)}function h(){throw new TypeError(\"Function is not a constructor\")}function c(e,t,i,n){e=0 in arguments?Number(arguments[0]):0,t=1 in arguments?Number(arguments[1]):0,i=2 in arguments?Number(arguments[2]):0,n=3 in arguments?Number(arguments[3]):0,this.right=(this.x=this.left=e)+(this.width=i),this.bottom=(this.y=this.top=t)+(this.height=n),Object.freeze(this)}function d(){t=requestAnimationFrame(d);var s=new WeakMap,p=new Set;o.forEach((function(t){r.get(t).forEach((function(i){var r=t instanceof window.SVGElement,o=a.get(t),d=r?0:parseFloat(o.paddingTop),f=r?0:parseFloat(o.paddingRight),l=r?0:parseFloat(o.paddingBottom),u=r?0:parseFloat(o.paddingLeft),g=r?0:parseFloat(o.borderTopWidth),m=r?0:parseFloat(o.borderRightWidth),w=r?0:parseFloat(o.borderBottomWidth),b=u+f,F=d+l,v=(r?0:parseFloat(o.borderLeftWidth))+m,W=g+w,y=r?0:t.offsetHeight-W-t.clientHeight,E=r?0:t.offsetWidth-v-t.clientWidth,R=b+v,z=F+W,M=r?t.width:parseFloat(o.width)-R-E,O=r?t.height:parseFloat(o.height)-z-y;if(n.has(t)){var k=n.get(t);if(k[0]===M&&k[1]===O)return}n.set(t,[M,O]);var S=Object.create(h.prototype);S.target=t,S.contentRect=new c(u,d,M,O),s.has(i)||(s.set(i,[]),p.add(i)),s.get(i).push(S)}))})),p.forEach((function(e){i.get(e).call(e,s.get(e),e)}))}return s.prototype.observe=function(i){if(i instanceof window.Element){r.has(i)||(r.set(i,new Set),o.add(i),a.set(i,window.getComputedStyle(i)));var n=r.get(i);n.has(this)||n.add(this),cancelAnimationFrame(t),t=requestAnimationFrame(d)}},s.prototype.unobserve=function(i){if(i instanceof window.Element&&r.has(i)){var n=r.get(i);n.has(this)&&(n.delete(this),n.size||(r.delete(i),o.delete(i))),n.size||r.delete(i),o.size||cancelAnimationFrame(t)}},A.DOMRectReadOnly=c,A.ResizeObserver=s,A.ResizeObserverEntry=h,A}; // eslint-disable-line\nmpl.toolbar_items = [[\"Home\", \"Reset original view\", \"fa fa-home\", \"home\"], [\"Back\", \"Back to previous view\", \"fa fa-arrow-left\", \"back\"], [\"Forward\", \"Forward to next view\", \"fa fa-arrow-right\", \"forward\"], [\"\", \"\", \"\", \"\"], [\"Pan\", \"Left button pans, Right button zooms\\nx/y fixes axis, CTRL fixes aspect\", \"fa fa-arrows\", \"pan\"], [\"Zoom\", \"Zoom to rectangle\\nx/y fixes axis\", \"fa fa-square-o\", \"zoom\"], [\"\", \"\", \"\", \"\"], [\"Download\", \"Download plot\", \"fa fa-floppy-o\", \"download\"]];\n\nmpl.extensions = [\"eps\", \"jpeg\", \"pgf\", \"pdf\", \"png\", \"ps\", \"raw\", \"svg\", \"tif\", \"webp\"];\n\nmpl.default_extension = \"png\";/* global mpl */\n\nvar comm_websocket_adapter = function (comm) {\n // Create a \"websocket\"-like object which calls the given IPython comm\n // object with the appropriate methods. Currently this is a non binary\n // socket, so there is still some room for performance tuning.\n var ws = {};\n\n ws.binaryType = comm.kernel.ws.binaryType;\n ws.readyState = comm.kernel.ws.readyState;\n function updateReadyState(_event) {\n if (comm.kernel.ws) {\n ws.readyState = comm.kernel.ws.readyState;\n } else {\n ws.readyState = 3; // Closed state.\n }\n }\n comm.kernel.ws.addEventListener('open', updateReadyState);\n comm.kernel.ws.addEventListener('close', updateReadyState);\n comm.kernel.ws.addEventListener('error', updateReadyState);\n\n ws.close = function () {\n comm.close();\n };\n ws.send = function (m) {\n //console.log('sending', m);\n comm.send(m);\n };\n // Register the callback with on_msg.\n comm.on_msg(function (msg) {\n //console.log('receiving', msg['content']['data'], msg);\n var data = msg['content']['data'];\n if (data['blob'] !== undefined) {\n data = {\n data: new Blob(msg['buffers'], { type: data['blob'] }),\n };\n }\n // Pass the mpl event to the overridden (by mpl) onmessage function.\n ws.onmessage(data);\n });\n return ws;\n};\n\nmpl.mpl_figure_comm = function (comm, msg) {\n // This is the function which gets called when the mpl process\n // starts-up an IPython Comm through the \"matplotlib\" channel.\n\n var id = msg.content.data.id;\n // Get hold of the div created by the display call when the Comm\n // socket was opened in Python.\n var element = document.getElementById(id);\n var ws_proxy = comm_websocket_adapter(comm);\n\n function ondownload(figure, _format) {\n window.open(figure.canvas.toDataURL());\n }\n\n var fig = new mpl.figure(id, ws_proxy, ondownload, element);\n\n // Call onopen now - mpl needs it, as it is assuming we've passed it a real\n // web socket which is closed, not our websocket->open comm proxy.\n ws_proxy.onopen();\n\n fig.parent_element = element;\n fig.cell_info = mpl.find_output_cell(\"\");\n if (!fig.cell_info) {\n console.error('Failed to find cell for figure', id, fig);\n return;\n }\n fig.cell_info[0].output_area.element.on(\n 'cleared',\n { fig: fig },\n fig._remove_fig_handler\n );\n};\n\nmpl.figure.prototype.handle_close = function (fig, msg) {\n var width = fig.canvas.width / fig.ratio;\n fig.cell_info[0].output_area.element.off(\n 'cleared',\n fig._remove_fig_handler\n );\n fig.resizeObserverInstance.unobserve(fig.canvas_div);\n\n // Update the output cell to use the data from the current canvas.\n fig.push_to_output();\n var dataURL = fig.canvas.toDataURL();\n // Re-enable the keyboard manager in IPython - without this line, in FF,\n // the notebook keyboard shortcuts fail.\n IPython.keyboard_manager.enable();\n fig.parent_element.innerHTML =\n '
';\n fig.close_ws(fig, msg);\n};\n\nmpl.figure.prototype.close_ws = function (fig, msg) {\n fig.send_message('closing', msg);\n // fig.ws.close()\n};\n\nmpl.figure.prototype.push_to_output = function (_remove_interactive) {\n // Turn the data on the canvas into data in the output cell.\n var width = this.canvas.width / this.ratio;\n var dataURL = this.canvas.toDataURL();\n this.cell_info[1]['text/html'] =\n '
';\n};\n\nmpl.figure.prototype.updated_canvas_event = function () {\n // Tell IPython that the notebook contents must change.\n IPython.notebook.set_dirty(true);\n this.send_message('ack', {});\n var fig = this;\n // Wait a second, then push the new image to the DOM so\n // that it is saved nicely (might be nice to debounce this).\n setTimeout(function () {\n fig.push_to_output();\n }, 1000);\n};\n\nmpl.figure.prototype._init_toolbar = function () {\n var fig = this;\n\n var toolbar = document.createElement('div');\n toolbar.classList = 'btn-toolbar';\n this.root.appendChild(toolbar);\n\n function on_click_closure(name) {\n return function (_event) {\n return fig.toolbar_button_onclick(name);\n };\n }\n\n function on_mouseover_closure(tooltip) {\n return function (event) {\n if (!event.currentTarget.disabled) {\n return fig.toolbar_button_onmouseover(tooltip);\n }\n };\n }\n\n fig.buttons = {};\n var buttonGroup = document.createElement('div');\n buttonGroup.classList = 'btn-group';\n var button;\n for (var toolbar_ind in mpl.toolbar_items) {\n var name = mpl.toolbar_items[toolbar_ind][0];\n var tooltip = mpl.toolbar_items[toolbar_ind][1];\n var image = mpl.toolbar_items[toolbar_ind][2];\n var method_name = mpl.toolbar_items[toolbar_ind][3];\n\n if (!name) {\n /* Instead of a spacer, we start a new button group. */\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n buttonGroup = document.createElement('div');\n buttonGroup.classList = 'btn-group';\n continue;\n }\n\n button = fig.buttons[name] = document.createElement('button');\n button.classList = 'btn btn-default';\n button.href = '#';\n button.title = name;\n button.innerHTML = '';\n button.addEventListener('click', on_click_closure(method_name));\n button.addEventListener('mouseover', on_mouseover_closure(tooltip));\n buttonGroup.appendChild(button);\n }\n\n if (buttonGroup.hasChildNodes()) {\n toolbar.appendChild(buttonGroup);\n }\n\n // Add the status bar.\n var status_bar = document.createElement('span');\n status_bar.classList = 'mpl-message pull-right';\n toolbar.appendChild(status_bar);\n this.message = status_bar;\n\n // Add the close button to the window.\n var buttongrp = document.createElement('div');\n buttongrp.classList = 'btn-group inline pull-right';\n button = document.createElement('button');\n button.classList = 'btn btn-mini btn-primary';\n button.href = '#';\n button.title = 'Stop Interaction';\n button.innerHTML = '';\n button.addEventListener('click', function (_evt) {\n fig.handle_close(fig, {});\n });\n button.addEventListener(\n 'mouseover',\n on_mouseover_closure('Stop Interaction')\n );\n buttongrp.appendChild(button);\n var titlebar = this.root.querySelector('.ui-dialog-titlebar');\n titlebar.insertBefore(buttongrp, titlebar.firstChild);\n};\n\nmpl.figure.prototype._remove_fig_handler = function (event) {\n var fig = event.data.fig;\n if (event.target !== this) {\n // Ignore bubbled events from children.\n return;\n }\n fig.close_ws(fig, {});\n};\n\nmpl.figure.prototype._root_extra_style = function (el) {\n el.style.boxSizing = 'content-box'; // override notebook setting of border-box.\n};\n\nmpl.figure.prototype._canvas_extra_style = function (el) {\n // this is important to make the div 'focusable\n el.setAttribute('tabindex', 0);\n // reach out to IPython and tell the keyboard manager to turn it's self\n // off when our div gets focus\n\n // location in version 3\n if (IPython.notebook.keyboard_manager) {\n IPython.notebook.keyboard_manager.register_events(el);\n } else {\n // location in version 2\n IPython.keyboard_manager.register_events(el);\n }\n};\n\nmpl.figure.prototype._key_event_extra = function (event, _name) {\n // Check for shift+enter\n if (event.shiftKey && event.which === 13) {\n this.canvas_div.blur();\n // select the cell after this one\n var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n IPython.notebook.select(index + 1);\n }\n};\n\nmpl.figure.prototype.handle_save = function (fig, _msg) {\n fig.ondownload(fig, null);\n};\n\nmpl.find_output_cell = function (html_output) {\n // Return the cell and output element which can be found *uniquely* in the notebook.\n // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n // IPython event is triggered only after the cells have been serialised, which for\n // our purposes (turning an active figure into a static one), is too late.\n var cells = IPython.notebook.get_cells();\n var ncells = cells.length;\n for (var i = 0; i < ncells; i++) {\n var cell = cells[i];\n if (cell.cell_type === 'code') {\n for (var j = 0; j < cell.output_area.outputs.length; j++) {\n var data = cell.output_area.outputs[j];\n if (data.data) {\n // IPython >= 3 moved mimebundle to data attribute of output\n data = data.data;\n }\n if (data['text/html'] === html_output) {\n return [cell, data, j];\n }\n }\n }\n }\n};\n\n// Register the function which deals with the matplotlib target/channel.\n// The kernel may be null if the page has been refreshed.\nif (IPython.notebook.kernel !== null) {\n IPython.notebook.kernel.comm_manager.register_target(\n 'matplotlib',\n mpl.mpl_figure_comm\n );\n}\n"
237 | },
238 | "metadata": {},
239 | "output_type": "display_data"
240 | },
241 | {
242 | "data": {
243 | "text/plain": "",
244 | "text/html": ""
245 | },
246 | "metadata": {},
247 | "output_type": "display_data"
248 | }
249 | ],
250 | "source": [
251 | "import pandas as pd\n",
252 | "import matplotlib.pyplot as plt\n",
253 | "%matplotlib notebook\n",
254 | "data = pd.read_csv('sc1.txt', sep=' ', header=None)\n",
255 | "data.iloc[:,1]=data.iloc[:,1]*100\n",
256 | "data[1]=data[1].astype(int)\n",
257 | "data.iloc[:,2]=data.iloc[:,2]*100\n",
258 | "data[2]=data[2].astype(int)\n",
259 | "fig = plt.figure()\n",
260 | "ax = fig.add_subplot(111, projection='3d')\n",
261 | "ax.scatter(data[1], data[2], data[0], c='r', marker='.')\n",
262 | "ax.set_xlabel('iou')\n",
263 | "ax.set_ylabel('conf')\n",
264 | "ax.set_zlabel('f1')\n",
265 | "plt.show()"
266 | ],
267 | "metadata": {
268 | "collapsed": false
269 | }
270 | },
271 | {
272 | "cell_type": "code",
273 | "execution_count": 15,
274 | "outputs": [
275 | {
276 | "data": {
277 | "text/plain": " 0 1 2\n316 0.495508 9 21\n391 0.495394 10 21\n466 0.496765 11 21\n541 0.496007 12 21\n616 0.496484 13 21\n... ... .. ..\n3165 0.495221 47 20\n3166 0.499160 47 21\n3241 0.498244 48 21\n3315 0.496889 49 20\n3316 0.501203 49 21\n\n[71 rows x 3 columns]",
278 | "text/html": "\n\n
\n \n \n | \n 0 | \n 1 | \n 2 | \n
\n \n \n \n | 316 | \n 0.495508 | \n 9 | \n 21 | \n
\n \n | 391 | \n 0.495394 | \n 10 | \n 21 | \n
\n \n | 466 | \n 0.496765 | \n 11 | \n 21 | \n
\n \n | 541 | \n 0.496007 | \n 12 | \n 21 | \n
\n \n | 616 | \n 0.496484 | \n 13 | \n 21 | \n
\n \n | ... | \n ... | \n ... | \n ... | \n
\n \n | 3165 | \n 0.495221 | \n 47 | \n 20 | \n
\n \n | 3166 | \n 0.499160 | \n 47 | \n 21 | \n
\n \n | 3241 | \n 0.498244 | \n 48 | \n 21 | \n
\n \n | 3315 | \n 0.496889 | \n 49 | \n 20 | \n
\n \n | 3316 | \n 0.501203 | \n 49 | \n 21 | \n
\n \n
\n
71 rows × 3 columns
\n
"
279 | },
280 | "execution_count": 15,
281 | "metadata": {},
282 | "output_type": "execute_result"
283 | }
284 | ],
285 | "source": [
286 | "import pandas as pd\n",
287 | "data = pd.read_csv('sc1.txt', sep=' ', header=None)\n",
288 | "data.iloc[:,1]=data.iloc[:,1]*100\n",
289 | "data[1]=data[1].astype(int)\n",
290 | "data.iloc[:,2]=data.iloc[:,2]*100\n",
291 | "data[2]=data[2].astype(int)\n",
292 | "data[data[0]>0.495]"
293 | ],
294 | "metadata": {
295 | "collapsed": false
296 | }
297 | },
298 | {
299 | "cell_type": "code",
300 | "execution_count": null,
301 | "outputs": [],
302 | "source": [],
303 | "metadata": {
304 | "collapsed": false
305 | }
306 | },
307 | {
308 | "cell_type": "code",
309 | "execution_count": null,
310 | "outputs": [],
311 | "source": [],
312 | "metadata": {
313 | "collapsed": false
314 | }
315 | },
316 | {
317 | "cell_type": "code",
318 | "execution_count": null,
319 | "outputs": [],
320 | "source": [],
321 | "metadata": {
322 | "collapsed": false
323 | }
324 | },
325 | {
326 | "cell_type": "code",
327 | "execution_count": null,
328 | "outputs": [],
329 | "source": [],
330 | "metadata": {
331 | "collapsed": false
332 | }
333 | },
334 | {
335 | "cell_type": "code",
336 | "execution_count": null,
337 | "outputs": [],
338 | "source": [],
339 | "metadata": {
340 | "collapsed": false
341 | }
342 | },
343 | {
344 | "cell_type": "code",
345 | "execution_count": null,
346 | "outputs": [],
347 | "source": [],
348 | "metadata": {
349 | "collapsed": false
350 | }
351 | },
352 | {
353 | "cell_type": "code",
354 | "execution_count": null,
355 | "outputs": [],
356 | "source": [],
357 | "metadata": {
358 | "collapsed": false
359 | }
360 | },
361 | {
362 | "cell_type": "code",
363 | "execution_count": null,
364 | "outputs": [],
365 | "source": [],
366 | "metadata": {
367 | "collapsed": false
368 | }
369 | }
370 | ],
371 | "metadata": {
372 | "kernelspec": {
373 | "display_name": "Python 3",
374 | "language": "python",
375 | "name": "python3"
376 | },
377 | "language_info": {
378 | "codemirror_mode": {
379 | "name": "ipython",
380 | "version": 2
381 | },
382 | "file_extension": ".py",
383 | "mimetype": "text/x-python",
384 | "name": "python",
385 | "nbconvert_exporter": "python",
386 | "pygments_lexer": "ipython2",
387 | "version": "2.7.6"
388 | }
389 | },
390 | "nbformat": 4,
391 | "nbformat_minor": 0
392 | }
393 |
--------------------------------------------------------------------------------