├── src ├── pallete ├── __pycache__ │ ├── utils.cpython-36.pyc │ ├── zsd_net.cpython-36.pyc │ ├── ZSD_loss.cpython-36.pyc │ ├── yolo_net.cpython-36.pyc │ ├── voc_dataset.cpython-36.pyc │ ├── zsd_dataset.cpython-36.pyc │ └── data_augmentation.cpython-36.pyc ├── zsd_dataset.py ├── voc_dataset.py ├── data_augmentation.py ├── yolo_net.py ├── zsd_net.py ├── ZSD_loss.py └── utils.py ├── attributes ├── seen.pkl.npy ├── attrs.pkl.npy └── voc.txt ├── README.md ├── try_zsd_dataset.py └── try_zsdloss.py /src/pallete: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/howBiGaStorm/ZeroShot-YOLO/HEAD/src/pallete -------------------------------------------------------------------------------- /attributes/seen.pkl.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/howBiGaStorm/ZeroShot-YOLO/HEAD/attributes/seen.pkl.npy -------------------------------------------------------------------------------- /attributes/attrs.pkl.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/howBiGaStorm/ZeroShot-YOLO/HEAD/attributes/attrs.pkl.npy -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ZeroShot-YOLO 2 | pytorch code for recurring paper:Zero-Shot Detection【https://arxiv.org/abs/1803.07113】 3 | -------------------------------------------------------------------------------- /src/__pycache__/utils.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/howBiGaStorm/ZeroShot-YOLO/HEAD/src/__pycache__/utils.cpython-36.pyc -------------------------------------------------------------------------------- /src/__pycache__/zsd_net.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/howBiGaStorm/ZeroShot-YOLO/HEAD/src/__pycache__/zsd_net.cpython-36.pyc -------------------------------------------------------------------------------- /src/__pycache__/ZSD_loss.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/howBiGaStorm/ZeroShot-YOLO/HEAD/src/__pycache__/ZSD_loss.cpython-36.pyc -------------------------------------------------------------------------------- /src/__pycache__/yolo_net.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/howBiGaStorm/ZeroShot-YOLO/HEAD/src/__pycache__/yolo_net.cpython-36.pyc -------------------------------------------------------------------------------- /src/__pycache__/voc_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/howBiGaStorm/ZeroShot-YOLO/HEAD/src/__pycache__/voc_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /src/__pycache__/zsd_dataset.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/howBiGaStorm/ZeroShot-YOLO/HEAD/src/__pycache__/zsd_dataset.cpython-36.pyc -------------------------------------------------------------------------------- /src/__pycache__/data_augmentation.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/howBiGaStorm/ZeroShot-YOLO/HEAD/src/__pycache__/data_augmentation.cpython-36.pyc -------------------------------------------------------------------------------- /try_zsd_dataset.py: -------------------------------------------------------------------------------- 1 | from src.zsd_dataset import ZSD_Dataset 2 | import os 3 | import argparse 4 | import torch.nn as nn 5 | import numpy as np 6 | from torch.utils.data import DataLoader 7 | from src.utils import * 8 | 9 | from src.ZSD_loss import ZSDLoss 10 | from src.zsd_net import ZSD 11 | from train_voc import get_args 12 | 13 | opt = get_args() 14 | training_params = {"batch_size": opt.batch_size, 15 | "shuffle": False, 16 | "drop_last": True, 17 | "collate_fn": custom_collate_fn} 18 | 19 | training_set = ZSD_Dataset(opt.data_path,mode='seen', image_size=opt.image_size) 20 | training_generator = DataLoader(training_set, **training_params) 21 | # for iter, batch in enumerate(training_generator): 22 | # img,label = batch # [1,3,448,448] 23 | # print('img.shape',img.shape) 24 | # print(label[0][0][5]) 25 | 26 | # import cv2 27 | # img = cv2.imread('/home/neec10601/Data/hmb/ZSD/data/VOCdevkit/VOC2007/JPEGImages/000005.jpg') 28 | # print(img.shape) 29 | 30 | attr = np.load('/home/neec10601/Data/hmb/ZSD/attributes/attrs.pkl.npy') 31 | seen = np.ndarray((10,64)) 32 | k = [1,2,4,5,8,10,11,14,15,19] 33 | for i in range(10): 34 | seen[i] = attr[k[i]] 35 | print(seen) 36 | np.save('/home/neec10601/Data/hmb/ZSD/attributes/seen.pkl',seen) -------------------------------------------------------------------------------- /src/zsd_dataset.py: -------------------------------------------------------------------------------- 1 | """ 2 | @author: Viet Nguyen 3 | """ 4 | import os 5 | from torch.utils.data import Dataset 6 | import xml.etree.ElementTree as ET 7 | from src.data_augmentation import * 8 | import numpy as np 9 | 10 | 11 | class ZSD_Dataset(Dataset): 12 | def __init__(self, root_path="data/1010split", mode="seen", image_size=448, is_training = True): 13 | 14 | if (mode in ["mix", "seen", "test_seen", "unseen",'try'] ) : 15 | self.data_path = os.path.join(root_path, mode+'.txt') 16 | self.pic_paths = [path for path in open(self.data_path)] 17 | self.num_images = len(self.pic_paths) 18 | 19 | self.classes = ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 20 | 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 21 | 'tvmonitor'] 22 | self.attrs = np.load('/home/neec10601/Data/hmb/ZSD/attributes/attrs.pkl.npy') 23 | self.num_classes = len(self.classes) 24 | 25 | self.image_size = image_size 26 | self.is_training = is_training 27 | 28 | 29 | 30 | def __len__(self): 31 | return self.num_images 32 | 33 | def __getitem__(self, item): 34 | pic_path = self.pic_paths[item].strip() 35 | img = cv2.imread(pic_path) 36 | image = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) 37 | temp = pic_path.split('JPEGImages') 38 | image_xml_path = temp[0] + 'Annotations' + temp[1].split('jpg')[0] + 'xml' 39 | 40 | annot = ET.parse(image_xml_path) 41 | 42 | objects = [] 43 | for obj in annot.findall('object'): 44 | xmin, xmax, ymin, ymax = [int(obj.find('bndbox').find(tag).text) - 1 for tag in 45 | ["xmin", "xmax", "ymin", "ymax"]] 46 | label = self.classes.index(obj.find('name').text.lower().strip()) 47 | attr = self.attrs[label] 48 | # print(label) 49 | objects.append([xmin, ymin, xmax, ymax, label,attr]) 50 | 51 | 52 | if self.is_training: 53 | transformations = Compose([HSVAdjust(), VerticalFlip(), Crop(), Resize(self.image_size)]) 54 | else: 55 | transformations = Compose([Resize(self.image_size)]) 56 | image, objects = transformations((image, objects)) 57 | 58 | return np.transpose(np.array(image, dtype=np.float32), (2, 0, 1)), objects #np.array(objects, dtype=np.float32) 59 | -------------------------------------------------------------------------------- /src/voc_dataset.py: -------------------------------------------------------------------------------- 1 | """ 2 | @author: Viet Nguyen 3 | """ 4 | import os 5 | from torch.utils.data import Dataset 6 | import xml.etree.ElementTree as ET 7 | from src.data_augmentation import * 8 | import numpy as np 9 | 10 | 11 | class VOCDataset(Dataset): 12 | def __init__(self, root_path="data/VOCdevkit", year="2007", mode="train", image_size=448, is_training = False): 13 | if (mode in ["train", "val", "trainval", "test",'try'] and year == "2007") or ( 14 | mode in ["train", "val", "trainval"] and year == "2012"): 15 | self.data_path = os.path.join(root_path, "VOC{}".format(year)) 16 | id_list_path = os.path.join(self.data_path, "ImageSets/Main/{}.txt".format(mode)) 17 | self.ids = [id.strip() for id in open(id_list_path)] 18 | self.classes = ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 19 | 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 20 | 'tvmonitor'] 21 | self.attrs = np.load('/home/neec10601/Data/hmb/ZSD/attributes/attrs.pkl.npy') 22 | self.image_size = image_size 23 | self.num_classes = len(self.classes) 24 | self.num_images = len(self.ids) 25 | self.is_training = is_training 26 | 27 | 28 | def __len__(self): 29 | return self.num_images 30 | 31 | def __getitem__(self, item): 32 | id = self.ids[item] 33 | image_path = os.path.join(self.data_path, "JPEGImages", "{}.jpg".format(id)) 34 | image = cv2.imread(image_path) 35 | image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) 36 | image_xml_path = os.path.join(self.data_path, "Annotations", "{}.xml".format(id)) 37 | annot = ET.parse(image_xml_path) 38 | 39 | objects = [] 40 | for obj in annot.findall('object'): 41 | xmin, xmax, ymin, ymax = [int(obj.find('bndbox').find(tag).text) - 1 for tag in 42 | ["xmin", "xmax", "ymin", "ymax"]] 43 | label = self.classes.index(obj.find('name').text.lower().strip()) 44 | attr = self.attrs[label] 45 | # print(label) 46 | objects.append([xmin, ymin, xmax, ymax, label,attr]) 47 | if self.is_training: 48 | transformations = Compose([HSVAdjust(), VerticalFlip(), Crop(), Resize(self.image_size)]) 49 | else: 50 | transformations = Compose([Resize(self.image_size)]) 51 | image, objects = transformations((image, objects)) 52 | 53 | return np.transpose(np.array(image, dtype=np.float32), (2, 0, 1)), objects #np.array(objects, dtype=np.float32) 54 | -------------------------------------------------------------------------------- /try_zsdloss.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import time 3 | b = 5 4 | n = 10 5 | # torch.manual_seed(1234) 6 | 7 | 8 | ts = torch.abs(torch.randn(b,5,169,64)) 9 | tt = torch.abs(torch.randn(b,5,169,64)) 10 | seen_vec = torch.abs(torch.randn(n,64)) 11 | # ts = torch.Tensor([[1,1,1],[1,0,0]]) 12 | # seen_vec = torch.Tensor([[1,1,0],[1,1,1]]) 13 | 14 | def cos_sim(vector1,vector2): 15 | dot_product = 0.0 16 | normA = 0.0 17 | normB = 0.0 18 | for a,b in zip(vector1,vector2): 19 | dot_product += a*b 20 | normA += a**2 21 | normB += b**2 22 | if normA == 0.0 or normB==0.0: 23 | return 0 24 | else: 25 | return dot_product / ((normA*normB)**0.5) 26 | 27 | 28 | # t1 = time.clock() 29 | # ts = ts.view(-1,64) 30 | # noobj_sim = torch.zeros(b*5*169,1) 31 | # # noobj_sim = torch.zeros(2) 32 | # for i in range(seen_vec.shape[0]): 33 | # for j in range(ts.shape[0]): 34 | # sim = cos_sim(ts[j],seen_vec[i]) 35 | # # print(sim) 36 | # if sim > noobj_sim[j]: 37 | # noobj_sim[j] = sim 38 | # print(noobj_sim) 39 | # t2 = time.clock() 40 | # tf = t2-t1 41 | # print(tf) 42 | 43 | # print(tf) 44 | 45 | # t1 = time.clock() 46 | # #########3 method 2 ############### 47 | # tsb = ts.view(b, -1 ,64,1).repeat(1,1,1,n).view(-1,64) # [b,845,64,2] 48 | # tsb = ts.view(-1,3,1).repeat(1,1,2).view(-1,3) 49 | # # seen_attrs = torch.zeros( b, 5 * 169, 64, n, requires_grad=False) 50 | # seen_attrs = torch.zeros(2,3,2) 51 | # for i,seen_attr in enumerate(torch.FloatTensor(seen_vec)): 52 | # # seen_attrs[:,:,:,i] = seen_attr.view(1,1,64).repeat(b,5*169,1) 53 | # seen_attrs[:,:,i] = seen_attr.view(1,3).repeat(2,1) 54 | # seen_attrs = seen_attrs.view(-1, 3) 55 | # print(seen_attrs) 56 | # 57 | # noobj_sim = torch.zeros(tsb.shape[0], requires_grad=False) 58 | # for i in range(tsb.shape[0]): 59 | # noobj_sim[i] = cos_sim(tsb[i], seen_attrs[i]) 60 | # noobj_sim = noobj_sim.view(-1,n) 61 | # print(noobj_sim) 62 | # noobj_sim, _ = noobj_sim.max(1) 63 | # t2 = time.clock() 64 | # noobj_sim = noobj_sim.view(b,5,169) 65 | # print(noobj_sim) 66 | # tsen = t2-t1 67 | # print(tsen) 68 | # 69 | # print(tsen-tf) 70 | 71 | # ts = torch.Tensor([[1,1,1],[1,0,0]]) 72 | # seen_vec = torch.Tensor([[1,1,0],[1,1,1],[1,0,1]]) 73 | # ts = ts.view(2,3,1).repeat(1,1,3) 74 | # seen = torch.zeros(2,3,3) 75 | # print(seen.shape) 76 | 77 | t1=time.clock() 78 | tsb = ts.view(b, -1 ,64,1).repeat(1,1,1,n) # [b,845,64,2] 79 | seen_attrs = torch.zeros( b, 5 *169, 64, n, requires_grad=False) 80 | for i,seen_attr in enumerate(torch.FloatTensor(seen_vec)): 81 | seen_attrs[:,:,:,i] = seen_attr.view(1,1,64).repeat(b,5*169,1) 82 | 83 | print(tsb.shape,seen_attrs.shape) 84 | 85 | cos_sim = torch.nn.CosineSimilarity(dim=2,eps=1e-8) 86 | loss = cos_sim(tsb,seen_attrs) 87 | noobj_sim, _ = loss.max(2) 88 | print(noobj_sim.shape) 89 | t2=time.clock() 90 | tbb = t2-t1 91 | print(tbb) 92 | 93 | 94 | 95 | -------------------------------------------------------------------------------- /src/data_augmentation.py: -------------------------------------------------------------------------------- 1 | """ 2 | @author: Viet Nguyen 3 | """ 4 | import numpy as np 5 | from random import uniform 6 | import cv2 7 | 8 | 9 | class Compose(object): 10 | 11 | def __init__(self, transforms): 12 | self.transforms = transforms 13 | 14 | def __call__(self, data): 15 | for function_ in self.transforms: 16 | data = function_(data) 17 | return data 18 | 19 | 20 | class Crop(object): 21 | 22 | def __init__(self, max_crop=0.1): 23 | super().__init__() 24 | self.max_crop = max_crop 25 | 26 | def __call__(self, data): 27 | image, label = data 28 | height, width = image.shape[:2] 29 | xmin = width 30 | ymin = height 31 | xmax = 0 32 | ymax = 0 33 | for lb in label: 34 | xmin = min(xmin, lb[0]) 35 | ymin = min(ymin, lb[1]) 36 | xmax = max(xmax, lb[2]) 37 | ymax = max(ymax, lb[2]) 38 | cropped_left = uniform(0, self.max_crop) 39 | cropped_right = uniform(0, self.max_crop) 40 | cropped_top = uniform(0, self.max_crop) 41 | cropped_bottom = uniform(0, self.max_crop) 42 | new_xmin = int(min(cropped_left * width, xmin)) 43 | new_ymin = int(min(cropped_top * height, ymin)) 44 | new_xmax = int(max(width - 1 - cropped_right * width, xmax)) 45 | new_ymax = int(max(height - 1 - cropped_bottom * height, ymax)) 46 | 47 | image = image[new_ymin:new_ymax, new_xmin:new_xmax, :] 48 | label = [[lb[0] - new_xmin, lb[1] - new_ymin, lb[2] - new_xmin, lb[3] - new_ymin, lb[4], lb[5]] for lb in label] 49 | 50 | return image, label 51 | 52 | 53 | class VerticalFlip(object): 54 | 55 | def __init__(self, prob=0.5): 56 | super().__init__() 57 | self.prob = prob 58 | 59 | def __call__(self, data): 60 | image, label = data 61 | if uniform(0, 1) >= self.prob: 62 | image = cv2.flip(image, 1) 63 | width = image.shape[1] 64 | # label = [[width - lb[2], lb[1], width - lb[0], lb[3], lb[4]] for lb in label] 65 | label = [[width - lb[2], lb[1], width - lb[0], lb[3], lb[4], lb[5]] for lb in label] 66 | return image, label 67 | 68 | 69 | class HSVAdjust(object): 70 | 71 | def __init__(self, hue=30, saturation=1.5, value=1.5, prob=0.5): 72 | super().__init__() 73 | self.hue = hue 74 | self.saturation = saturation 75 | self.value = value 76 | self.prob = prob 77 | 78 | def __call__(self, data): 79 | 80 | def clip_hue(hue_channel): 81 | hue_channel[hue_channel >= 360] -= 360 82 | hue_channel[hue_channel < 0] += 360 83 | return hue_channel 84 | 85 | image, label = data 86 | adjust_hue = uniform(-self.hue, self.hue) 87 | adjust_saturation = uniform(1, self.saturation) 88 | if uniform(0, 1) >= self.prob: 89 | adjust_saturation = 1 / adjust_saturation 90 | adjust_value = uniform(1, self.value) 91 | if uniform(0, 1) >= self.prob: 92 | adjust_value = 1 / adjust_value 93 | image = image.astype(np.float32) / 255 94 | image = cv2.cvtColor(image, cv2.COLOR_RGB2HSV) 95 | image[:, :, 0] += adjust_hue 96 | image[:, :, 0] = clip_hue(image[:, :, 0]) 97 | image[:, :, 1] = np.clip(adjust_saturation * image[:, :, 1], 0.0, 1.0) 98 | image[:, :, 2] = np.clip(adjust_value * image[:, :, 2], 0.0, 1.0) 99 | 100 | image = cv2.cvtColor(image, cv2.COLOR_HSV2RGB) 101 | image = (image * 255).astype(np.float32) 102 | 103 | return image, label 104 | 105 | 106 | class Resize(object): 107 | 108 | def __init__(self, image_size): 109 | super().__init__() 110 | self.image_size = image_size 111 | 112 | def __call__(self, data): 113 | image, label = data 114 | height, width = image.shape[:2] 115 | image = cv2.resize(image, (self.image_size, self.image_size)) 116 | width_ratio = float(self.image_size) / width 117 | height_ratio = float(self.image_size) / height 118 | new_label = [] 119 | for lb in label: 120 | resized_xmin = lb[0] * width_ratio 121 | resized_ymin = lb[1] * height_ratio 122 | resized_xmax = lb[2] * width_ratio 123 | resized_ymax = lb[3] * height_ratio 124 | resize_width = resized_xmax - resized_xmin 125 | resize_height = resized_ymax - resized_ymin 126 | new_label.append([resized_xmin, resized_ymin, resize_width, resize_height, lb[4],lb[5]]) 127 | 128 | return image, new_label 129 | -------------------------------------------------------------------------------- /src/yolo_net.py: -------------------------------------------------------------------------------- 1 | """ 2 | @author: Viet Nguyen 3 | """ 4 | import torch.nn as nn 5 | import torch 6 | 7 | 8 | class Yolo(nn.Module): 9 | def __init__(self, num_classes, 10 | anchors=[(1.3221, 1.73145), (3.19275, 4.00944), (5.05587, 8.09892), (9.47112, 4.84053), 11 | (11.2364, 10.0071)]): 12 | super(Yolo, self).__init__() 13 | self.num_classes = num_classes 14 | self.anchors = anchors 15 | 16 | self.stage1_conv1 = nn.Sequential(nn.Conv2d(3, 32, 3, 1, 1, bias=False), nn.BatchNorm2d(32), 17 | nn.LeakyReLU(0.1, inplace=True), nn.MaxPool2d(2, 2)) 18 | self.stage1_conv2 = nn.Sequential(nn.Conv2d(32, 64, 3, 1, 1, bias=False), nn.BatchNorm2d(64), 19 | nn.LeakyReLU(0.1, inplace=True), nn.MaxPool2d(2, 2)) 20 | self.stage1_conv3 = nn.Sequential(nn.Conv2d(64, 128, 3, 1, 1, bias=False), nn.BatchNorm2d(128), 21 | nn.LeakyReLU(0.1, inplace=True)) 22 | self.stage1_conv4 = nn.Sequential(nn.Conv2d(128, 64, 1, 1, 0, bias=False), nn.BatchNorm2d(64), 23 | nn.LeakyReLU(0.1, inplace=True)) 24 | self.stage1_conv5 = nn.Sequential(nn.Conv2d(64, 128, 3, 1, 1, bias=False), nn.BatchNorm2d(128), 25 | nn.LeakyReLU(0.1, inplace=True), nn.MaxPool2d(2, 2)) 26 | self.stage1_conv6 = nn.Sequential(nn.Conv2d(128, 256, 3, 1, 1, bias=False), nn.BatchNorm2d(256), 27 | nn.LeakyReLU(0.1, inplace=True)) 28 | self.stage1_conv7 = nn.Sequential(nn.Conv2d(256, 128, 1, 1, 0, bias=False), nn.BatchNorm2d(128), 29 | nn.LeakyReLU(0.1, inplace=True)) 30 | self.stage1_conv8 = nn.Sequential(nn.Conv2d(128, 256, 3, 1, 1, bias=False), nn.BatchNorm2d(256), 31 | nn.LeakyReLU(0.1, inplace=True), nn.MaxPool2d(2, 2)) 32 | self.stage1_conv9 = nn.Sequential(nn.Conv2d(256, 512, 3, 1, 1, bias=False), nn.BatchNorm2d(512), 33 | nn.LeakyReLU(0.1, inplace=True)) 34 | self.stage1_conv10 = nn.Sequential(nn.Conv2d(512, 256, 1, 1, 0, bias=False), nn.BatchNorm2d(256), 35 | nn.LeakyReLU(0.1, inplace=True)) 36 | self.stage1_conv11 = nn.Sequential(nn.Conv2d(256, 512, 3, 1, 1, bias=False), nn.BatchNorm2d(512), 37 | nn.LeakyReLU(0.1, inplace=True)) 38 | self.stage1_conv12 = nn.Sequential(nn.Conv2d(512, 256, 1, 1, 0, bias=False), nn.BatchNorm2d(256), 39 | nn.LeakyReLU(0.1, inplace=True)) 40 | self.stage1_conv13 = nn.Sequential(nn.Conv2d(256, 512, 3, 1, 1, bias=False), nn.BatchNorm2d(512), 41 | nn.LeakyReLU(0.1, inplace=True)) 42 | 43 | self.stage2_a_maxpl = nn.MaxPool2d(2, 2) 44 | self.stage2_a_conv1 = nn.Sequential(nn.Conv2d(512, 1024, 3, 1, 1, bias=False), 45 | nn.BatchNorm2d(1024), nn.LeakyReLU(0.1, inplace=True)) 46 | self.stage2_a_conv2 = nn.Sequential(nn.Conv2d(1024, 512, 1, 1, 0, bias=False), nn.BatchNorm2d(512), 47 | nn.LeakyReLU(0.1, inplace=True)) 48 | self.stage2_a_conv3 = nn.Sequential(nn.Conv2d(512, 1024, 3, 1, 1, bias=False), nn.BatchNorm2d(1024), 49 | nn.LeakyReLU(0.1, inplace=True)) 50 | self.stage2_a_conv4 = nn.Sequential(nn.Conv2d(1024, 512, 1, 1, 0, bias=False), nn.BatchNorm2d(512), 51 | nn.LeakyReLU(0.1, inplace=True)) 52 | self.stage2_a_conv5 = nn.Sequential(nn.Conv2d(512, 1024, 3, 1, 1, bias=False), nn.BatchNorm2d(1024), 53 | nn.LeakyReLU(0.1, inplace=True)) 54 | self.stage2_a_conv6 = nn.Sequential(nn.Conv2d(1024, 1024, 3, 1, 1, bias=False), nn.BatchNorm2d(1024), 55 | nn.LeakyReLU(0.1, inplace=True)) 56 | self.stage2_a_conv7 = nn.Sequential(nn.Conv2d(1024, 1024, 3, 1, 1, bias=False), nn.BatchNorm2d(1024), 57 | nn.LeakyReLU(0.1, inplace=True)) 58 | 59 | self.stage2_b_conv = nn.Sequential(nn.Conv2d(512, 64, 1, 1, 0, bias=False), nn.BatchNorm2d(64), 60 | nn.LeakyReLU(0.1, inplace=True)) 61 | 62 | self.stage3_conv1 = nn.Sequential(nn.Conv2d(256 + 1024, 1024, 3, 1, 1, bias=False), nn.BatchNorm2d(1024), 63 | nn.LeakyReLU(0.1, inplace=True)) 64 | self.stage3_conv2 = nn.Conv2d(1024, len(self.anchors) * (5 + num_classes), 1, 1, 0, bias=False) 65 | 66 | def forward(self, input): 67 | output = self.stage1_conv1(input) 68 | output = self.stage1_conv2(output) 69 | output = self.stage1_conv3(output) 70 | output = self.stage1_conv4(output) 71 | output = self.stage1_conv5(output) 72 | output = self.stage1_conv6(output) 73 | output = self.stage1_conv7(output) 74 | output = self.stage1_conv8(output) 75 | output = self.stage1_conv9(output) 76 | output = self.stage1_conv10(output) 77 | output = self.stage1_conv11(output) 78 | output = self.stage1_conv12(output) 79 | output = self.stage1_conv13(output) 80 | 81 | residual = output # 残差传输 82 | 83 | output_1 = self.stage2_a_maxpl(output) 84 | output_1 = self.stage2_a_conv1(output_1) 85 | output_1 = self.stage2_a_conv2(output_1) 86 | output_1 = self.stage2_a_conv3(output_1) 87 | output_1 = self.stage2_a_conv4(output_1) 88 | output_1 = self.stage2_a_conv5(output_1) 89 | output_1 = self.stage2_a_conv6(output_1) 90 | output_1 = self.stage2_a_conv7(output_1) 91 | 92 | output_2 = self.stage2_b_conv(residual) 93 | batch_size, num_channel, height, width = output_2.data.size() 94 | output_2 = output_2.view(batch_size, int(num_channel / 4), height, 2, width, 2).contiguous() 95 | output_2 = output_2.permute(0, 3, 5, 1, 2, 4).contiguous() 96 | output_2 = output_2.view(batch_size, -1, int(height / 2), int(width / 2)) 97 | 98 | output = torch.cat((output_1, output_2), 1) # output1 和 output2 连接起来 99 | output = self.stage3_conv1(output) 100 | output = self.stage3_conv2(output) 101 | 102 | return output 103 | 104 | 105 | if __name__ == "__main__": 106 | net = Yolo(20) 107 | # print(net.stage1_conv1[0]) 108 | print(net) -------------------------------------------------------------------------------- /src/zsd_net.py: -------------------------------------------------------------------------------- 1 | """ 2 | @author: Viet Nguyen 3 | """ 4 | import torch.nn as nn 5 | import torch 6 | 7 | 8 | class ZSD(nn.Module): 9 | def __init__(self, num_classes, 10 | anchors=[(1.3221, 1.73145), (3.19275, 4.00944), (5.05587, 8.09892), (9.47112, 4.84053), 11 | (11.2364, 10.0071)]): 12 | super(ZSD, self).__init__() 13 | self.num_classes = num_classes 14 | self.anchors = anchors 15 | 16 | self.stage1_conv1 = nn.Sequential(nn.Conv2d(3, 32, 3, 1, 1, bias=False), nn.BatchNorm2d(32), 17 | nn.LeakyReLU(0.1, inplace=True), nn.MaxPool2d(2, 2)) 18 | self.stage1_conv2 = nn.Sequential(nn.Conv2d(32, 64, 3, 1, 1, bias=False), nn.BatchNorm2d(64), 19 | nn.LeakyReLU(0.1, inplace=True), nn.MaxPool2d(2, 2)) 20 | self.stage1_conv3 = nn.Sequential(nn.Conv2d(64, 128, 3, 1, 1, bias=False), nn.BatchNorm2d(128), 21 | nn.LeakyReLU(0.1, inplace=True)) 22 | self.stage1_conv4 = nn.Sequential(nn.Conv2d(128, 64, 1, 1, 0, bias=False), nn.BatchNorm2d(64), 23 | nn.LeakyReLU(0.1, inplace=True)) 24 | self.stage1_conv5 = nn.Sequential(nn.Conv2d(64, 128, 3, 1, 1, bias=False), nn.BatchNorm2d(128), 25 | nn.LeakyReLU(0.1, inplace=True), nn.MaxPool2d(2, 2)) 26 | self.stage1_conv6 = nn.Sequential(nn.Conv2d(128, 256, 3, 1, 1, bias=False), nn.BatchNorm2d(256), 27 | nn.LeakyReLU(0.1, inplace=True)) 28 | self.stage1_conv7 = nn.Sequential(nn.Conv2d(256, 128, 1, 1, 0, bias=False), nn.BatchNorm2d(128), 29 | nn.LeakyReLU(0.1, inplace=True)) 30 | self.stage1_conv8 = nn.Sequential(nn.Conv2d(128, 256, 3, 1, 1, bias=False), nn.BatchNorm2d(256), 31 | nn.LeakyReLU(0.1, inplace=True), nn.MaxPool2d(2, 2)) 32 | self.stage1_conv9 = nn.Sequential(nn.Conv2d(256, 512, 3, 1, 1, bias=False), nn.BatchNorm2d(512), 33 | nn.LeakyReLU(0.1, inplace=True)) 34 | self.stage1_conv10 = nn.Sequential(nn.Conv2d(512, 256, 1, 1, 0, bias=False), nn.BatchNorm2d(256), 35 | nn.LeakyReLU(0.1, inplace=True)) 36 | self.stage1_conv11 = nn.Sequential(nn.Conv2d(256, 512, 3, 1, 1, bias=False), nn.BatchNorm2d(512), 37 | nn.LeakyReLU(0.1, inplace=True)) 38 | self.stage1_conv12 = nn.Sequential(nn.Conv2d(512, 256, 1, 1, 0, bias=False), nn.BatchNorm2d(256), 39 | nn.LeakyReLU(0.1, inplace=True)) 40 | self.stage1_conv13 = nn.Sequential(nn.Conv2d(256, 512, 3, 1, 1, bias=False), nn.BatchNorm2d(512), 41 | nn.LeakyReLU(0.1, inplace=True)) 42 | 43 | self.stage2_a_maxpl = nn.MaxPool2d(2, 2) 44 | self.stage2_a_conv1 = nn.Sequential(nn.Conv2d(512, 1024, 3, 1, 1, bias=False), 45 | nn.BatchNorm2d(1024), nn.LeakyReLU(0.1, inplace=True)) 46 | self.stage2_a_conv2 = nn.Sequential(nn.Conv2d(1024, 512, 1, 1, 0, bias=False), nn.BatchNorm2d(512), 47 | nn.LeakyReLU(0.1, inplace=True)) 48 | self.stage2_a_conv3 = nn.Sequential(nn.Conv2d(512, 1024, 3, 1, 1, bias=False), nn.BatchNorm2d(1024), 49 | nn.LeakyReLU(0.1, inplace=True)) 50 | self.stage2_a_conv4 = nn.Sequential(nn.Conv2d(1024, 512, 1, 1, 0, bias=False), nn.BatchNorm2d(512), 51 | nn.LeakyReLU(0.1, inplace=True)) 52 | self.stage2_a_conv5 = nn.Sequential(nn.Conv2d(512, 1024, 3, 1, 1, bias=False), nn.BatchNorm2d(1024), 53 | nn.LeakyReLU(0.1, inplace=True)) 54 | self.stage2_a_conv6 = nn.Sequential(nn.Conv2d(1024, 1024, 3, 1, 1, bias=False), nn.BatchNorm2d(1024), 55 | nn.LeakyReLU(0.1, inplace=True)) 56 | self.stage2_a_conv7 = nn.Sequential(nn.Conv2d(1024, 1024, 3, 1, 1, bias=False), nn.BatchNorm2d(1024), 57 | nn.LeakyReLU(0.1, inplace=True)) 58 | 59 | self.stage2_b_conv = nn.Sequential(nn.Conv2d(512, 64, 1, 1, 0, bias=False), nn.BatchNorm2d(64), 60 | nn.LeakyReLU(0.1, inplace=True)) 61 | 62 | self.stage3_conv1 = nn.Sequential(nn.Conv2d(256 + 1024, 1024, 3, 1, 1, bias=False), nn.BatchNorm2d(1024), 63 | nn.LeakyReLU(0.1, inplace=True)) 64 | # self.stage3_conv2 = nn.Conv2d(1024, len(self.anchors) * (5 + num_classes), 1, 1, 0, bias=False) 65 | 66 | 67 | self.loc_stage = nn.Sequential(nn.Conv2d(1024, 20, 1, 1, 0, bias=False), nn.BatchNorm2d(20), 68 | ) # nn.LeakyReLU(0.1, inplace=True) 69 | self.semantic_stage = nn.Sequential(nn.Conv2d(1024, 320, 1, 1, 0, bias=False), nn.BatchNorm2d(320), 70 | ) # nn.LeakyReLU(0.1, inplace=True) 71 | 72 | self.conf_stage = nn.Conv2d(1024+320+20, 5, 1, 1, 0, bias=False) 73 | 74 | def forward(self, input): 75 | output = self.stage1_conv1(input) 76 | output = self.stage1_conv2(output) 77 | output = self.stage1_conv3(output) 78 | output = self.stage1_conv4(output) 79 | output = self.stage1_conv5(output) 80 | output = self.stage1_conv6(output) 81 | output = self.stage1_conv7(output) 82 | output = self.stage1_conv8(output) 83 | output = self.stage1_conv9(output) 84 | output = self.stage1_conv10(output) 85 | output = self.stage1_conv11(output) 86 | output = self.stage1_conv12(output) 87 | output = self.stage1_conv13(output) 88 | 89 | residual = output # 残差传输 90 | 91 | output_1 = self.stage2_a_maxpl(output) 92 | output_1 = self.stage2_a_conv1(output_1) 93 | output_1 = self.stage2_a_conv2(output_1) 94 | output_1 = self.stage2_a_conv3(output_1) 95 | output_1 = self.stage2_a_conv4(output_1) 96 | output_1 = self.stage2_a_conv5(output_1) 97 | output_1 = self.stage2_a_conv6(output_1) 98 | output_1 = self.stage2_a_conv7(output_1) 99 | 100 | output_2 = self.stage2_b_conv(residual) 101 | batch_size, num_channel, height, width = output_2.data.size() 102 | output_2 = output_2.view(batch_size, int(num_channel / 4), height, 2, width, 2).contiguous() 103 | output_2 = output_2.permute(0, 3, 5, 1, 2, 4).contiguous() 104 | output_2 = output_2.view(batch_size, -1, int(height / 2), int(width / 2)) 105 | 106 | output = torch.cat((output_1, output_2), 1) # output1 和 output2 连接起来 107 | output_Tf = self.stage3_conv1(output) # [1,1024,13,13] # 特征向量 108 | 109 | output_Tl = self.loc_stage(output_Tf) # 定位向量 110 | output_Ts = self.semantic_stage(output_Tf) # 语义向量 111 | 112 | output_cat = torch.cat((output_Tf,output_Tl,output_Ts),1) 113 | ourput_Tc = self.conf_stage(output_cat) # 置信度向量 114 | 115 | # output = self.stage3_conv2(output1024) # [1,125,13,13] 116 | 117 | return output_Tf,output_Tl ,output_Ts,ourput_Tc 118 | 119 | 120 | if __name__ == "__main__": 121 | net = ZSD(20) 122 | # print(net.stage1_conv1[0]) 123 | print(net) -------------------------------------------------------------------------------- /src/ZSD_loss.py: -------------------------------------------------------------------------------- 1 | """ 2 | @author: Viet Nguyen 3 | """ 4 | import math 5 | import torch 6 | import torch.nn as nn 7 | 8 | 9 | class ZSDLoss(nn.modules.loss._Loss): # 继承LOSS类 10 | # The loss I borrow from LightNet repo. 11 | def __init__(self, num_classes, anchors, seen_attrs, reduction=32, coord_scale=1.0, noobject_scale=1.0, 12 | object_scale=5.0, class_scale=1.0, thresh=0.6,GPU = False): 13 | super(ZSDLoss, self).__init__() 14 | self.num_classes = num_classes # 20 15 | self.num_anchors = len(anchors) # 5 16 | self.anchor_step = len(anchors[0]) # 2 17 | self.anchors = torch.Tensor(anchors) 18 | self.reduction = reduction # 32 19 | 20 | self.coord_scale = coord_scale # 1 21 | self.noobject_scale = noobject_scale # 1 22 | self.object_scale = object_scale # 5 23 | self.class_scale = class_scale # 1 24 | self.thresh = thresh 25 | self.seen_vec = seen_attrs 26 | self.GPU = GPU 27 | 28 | def forward(self, tl, ts, tc, target): # 计算输出与真实值之间的损失 ?输出的大小是[1,125,13,13]target格式是? 29 | 30 | batch_size = tl.data.size(0) 31 | height = tl.data.size(2) 32 | width = tl.data.size(3) 33 | 34 | tl = tl.view(batch_size,self.num_anchors,4,height * width) 35 | coord = torch.zeros_like(tl) 36 | coord[:, :, :2, :] = tl[:, :, :2, :].sigmoid() 37 | coord[:, :, 2:4, :] = tl[:, :, 2:4, :] 38 | 39 | # Create prediction boxes 预测边框 40 | pred_boxes = torch.FloatTensor(batch_size * self.num_anchors * height * width, 4) 41 | lin_x = torch.arange(0, width,1).repeat(height, 1).view(height * width).float() 42 | lin_y = torch.arange(0, height,1).repeat(width, 1).t().contiguous().view(height * width).float() 43 | anchor_w = self.anchors[:, 0].contiguous().view(self.num_anchors, 1) 44 | anchor_h = self.anchors[:, 1].contiguous().view(self.num_anchors, 1) 45 | 46 | 47 | pred_boxes = pred_boxes.cuda() 48 | lin_x = lin_x.cuda() 49 | lin_y = lin_y.cuda() 50 | anchor_w = anchor_w.cuda() 51 | anchor_h = anchor_h.cuda() 52 | 53 | pred_boxes[:, 0] = (coord[:, :, 0].detach() + lin_x).view(-1) 54 | pred_boxes[:, 1] = (coord[:, :, 1].detach() + lin_y).view(-1) 55 | pred_boxes[:, 2] = (coord[:, :, 2].detach().exp() * anchor_w).view(-1) 56 | pred_boxes[:, 3] = (coord[:, :, 3].detach().exp() * anchor_h).view(-1) 57 | pred_boxes = pred_boxes.cpu() 58 | 59 | # Get target values 60 | 61 | coord_mask, conf_mask, obj_mask, noobj_mask,semantic_mask, tcoord, tvec = self.build_targets(pred_boxes, target, height, width) 62 | coord_mask = coord_mask.expand_as(tcoord) 63 | 64 | 65 | coord_mask = coord_mask.cuda() 66 | conf_mask = conf_mask.cuda() 67 | obj_mask = obj_mask.cuda() 68 | noobj_mask = noobj_mask.cuda() 69 | tcoord = tcoord.cuda() 70 | tvec = tvec.cuda() # [b,5,169,64] 71 | 72 | # ############################# loss coord ##################################### 73 | # Compute losses 74 | mse = nn.MSELoss(size_average=False) 75 | self.loss_coord = self.coord_scale * mse(coord * coord_mask, tcoord * coord_mask) / batch_size # 位置损失 76 | 77 | # ############################# loss conf ##################################### 78 | tc = tc.view(batch_size,self.num_anchors,-1) 79 | self.loss_conf = 5*mse(tc*conf_mask,obj_mask)/batch_size 80 | 81 | ############################# loss semantic #################################### 82 | tsf = ts.view(batch_size,self.num_anchors,height * width,-1) # [b,5,169,64] 83 | 84 | seman_mask = obj_mask.view(batch_size, self.num_anchors, height * width,1).repeat(1,1,1,64) 85 | # loss前半部分,有物体时的余弦相似度 86 | cos_sim = torch.nn.CosineSimilarity(dim=3, eps=1e-8) 87 | obj_vec = tsf*seman_mask # [b,5,169,64] 88 | obj_sim = cos_sim(obj_vec,tvec) 89 | 90 | 91 | # loss后半部分,无物体时的和seen——vec的最大余弦相似度 92 | num_seen = self.seen_vec.shape[0] 93 | tsb = ts.view(batch_size, -1 ,64,1).repeat(1,1,1,num_seen) # [b,845,64,2] 94 | seen_attrs = torch.zeros( batch_size, self.num_anchors * height * width, 64, num_seen, requires_grad=False) # [b,645,64,2] 95 | 96 | for i,seen_attr in enumerate(torch.Tensor(self.seen_vec)): 97 | seen_attrs[:,:,:,i] = seen_attr.view(1,1,64).repeat(batch_size,self.num_anchors*height*width,1) 98 | 99 | seen_attrs = seen_attrs.cuda() 100 | cos_sim = torch.nn.CosineSimilarity(dim=2, eps=1e-8) 101 | noobj_sim_all = cos_sim(tsb, seen_attrs) 102 | noobj_sim, _ = noobj_sim_all.max(2) # [5,845] 103 | 104 | noobj_sim = noobj_sim.view(batch_size,self.num_anchors,height*width) 105 | 106 | # ################### 相似度汇总 得到相似度矩阵 ######################### 107 | sim = noobj_sim*noobj_mask+obj_sim 108 | self.loss_semantic = 5*mse(sim*conf_mask , obj_mask)/batch_size 109 | 110 | self.loss_tot = self.loss_coord +self.loss_conf+self.loss_semantic # 总损失 111 | 112 | return self.loss_tot,self.loss_coord ,self.loss_conf ,self.loss_semantic 113 | 114 | def build_targets(self, pred_boxes, ground_truth, height, width): 115 | batch_size = len(ground_truth) 116 | # 存储 有目标的为1,每目标的为(根号下1/5),是为了obj和noobj的5倍的均衡 117 | conf_mask = torch.ones(batch_size, self.num_anchors, height * width, requires_grad=False) * (math.sqrt(1.0/self.object_scale)) 118 | # 有目标1,没目标为0 119 | coord_mask = torch.zeros(batch_size, self.num_anchors, 1, height * width, requires_grad=False) 120 | # 有目标1,没目标为0, 字节形, 121 | semantic_mask = torch.zeros(batch_size, self.num_anchors, height * width, requires_grad=False).byte() 122 | obj_mask = torch.zeros(batch_size, self.num_anchors, height * width, requires_grad=False) 123 | # 存储真实的BBOX信息 124 | tcoord = torch.zeros(batch_size, self.num_anchors, 4, height * width, requires_grad=False) 125 | # 存储真实的类的语义向量信息 126 | tvec = torch.zeros(batch_size, self.num_anchors, height * width, 64,requires_grad=False) 127 | # 存一个反向的mask,即有目标为0,没目标为1 128 | noobj_mask = torch.ones(batch_size, self.num_anchors, height * width, requires_grad=False) 129 | 130 | 131 | 132 | for b in range(batch_size): 133 | if len(ground_truth[b]) == 0: 134 | continue 135 | 136 | # Build up tensors 137 | cur_pred_boxes = pred_boxes[ 138 | b * (self.num_anchors * height * width):(b + 1) * (self.num_anchors * height * width)] 139 | if self.anchor_step == 4: 140 | anchors = self.anchors.clone() 141 | anchors[:, :2] = 0 142 | else: 143 | anchors = torch.cat([torch.zeros_like(self.anchors), self.anchors], 1) 144 | gt = torch.zeros(len(ground_truth[b]), 4) 145 | for i, anno in enumerate(ground_truth[b]): 146 | gt[i, 0] = (anno[0] + anno[2] / 2) / self.reduction 147 | gt[i, 1] = (anno[1] + anno[3] / 2) / self.reduction 148 | gt[i, 2] = anno[2] / self.reduction 149 | gt[i, 3] = anno[3] / self.reduction 150 | 151 | # Set confidence mask of matching detections to 0 152 | iou_gt_pred = bbox_ious(gt, cur_pred_boxes) 153 | mask = (iou_gt_pred > self.thresh).sum(0) >= 1 154 | conf_mask[b][mask.view_as(conf_mask[b])] = 0 155 | 156 | # Find best anchor for each ground truth 157 | gt_wh = gt.clone() 158 | gt_wh[:, :2] = 0 159 | iou_gt_anchors = bbox_ious(gt_wh, anchors) 160 | _, best_anchors = iou_gt_anchors.max(1) 161 | 162 | # Set masks and target values for each ground truth 163 | for i, anno in enumerate(ground_truth[b]): 164 | gi = min(width - 1, max(0, int(gt[i, 0]))) 165 | gj = min(height - 1, max(0, int(gt[i, 1]))) 166 | best_n = best_anchors[i] 167 | iou = iou_gt_pred[i][best_n * height * width + gj * width + gi] 168 | coord_mask[b][best_n][0][gj * width + gi] = 1 169 | # cls_mask[b][best_n][gj * width + gi] = 1 170 | obj_mask[b][best_n][gj * width + gi] = 1 171 | noobj_mask[b][best_n][gj * width + gi] = 0 172 | semantic_mask[b][best_n][gj * width + gi] = 1 173 | conf_mask[b][best_n][gj * width + gi] = 1.0 174 | tcoord[b][best_n][0][gj * width + gi] = gt[i, 0] - gi 175 | tcoord[b][best_n][1][gj * width + gi] = gt[i, 1] - gj 176 | tcoord[b][best_n][2][gj * width + gi] = math.log(max(gt[i, 2], 1.0) / self.anchors[best_n, 0]) 177 | tcoord[b][best_n][3][gj * width + gi] = math.log(max(gt[i, 3], 1.0) / self.anchors[best_n, 1]) 178 | tvec[b][best_n][gj * width + gi] = torch.FloatTensor(anno[5]) 179 | 180 | return coord_mask, conf_mask, obj_mask,noobj_mask,semantic_mask, tcoord, tvec 181 | 182 | 183 | def bbox_ious(boxes1, boxes2): # 计算iou 184 | b1x1, b1y1 = (boxes1[:, :2] - (boxes1[:, 2:4] / 2)).split(1, 1) 185 | b1x2, b1y2 = (boxes1[:, :2] + (boxes1[:, 2:4] / 2)).split(1, 1) 186 | b2x1, b2y1 = (boxes2[:, :2] - (boxes2[:, 2:4] / 2)).split(1, 1) 187 | b2x2, b2y2 = (boxes2[:, :2] + (boxes2[:, 2:4] / 2)).split(1, 1) 188 | 189 | dx = (b1x2.min(b2x2.t()) - b1x1.max(b2x1.t())).clamp(min=0) 190 | dy = (b1y2.min(b2y2.t()) - b1y1.max(b2y1.t())).clamp(min=0) 191 | intersections = dx * dy 192 | 193 | areas1 = (b1x2 - b1x1) * (b1y2 - b1y1) 194 | areas2 = (b2x2 - b2x1) * (b2y2 - b2y1) 195 | unions = (areas1 + areas2.t()) - intersections 196 | 197 | return intersections / unions 198 | 199 | 200 | # def cos_sim(vector1,vector2): 201 | # dot_product = 0.0 202 | # normA = 0.0 203 | # normB = 0.0 204 | # for a,b in zip(vector1,vector2): 205 | # dot_product += a*b 206 | # normA += a**2 207 | # normB += b**2 208 | # if normA == 0.0 or normB==0.0: 209 | # return 0 210 | # else: 211 | # return dot_product / ((normA*normB)**0.5) -------------------------------------------------------------------------------- /src/utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | @author: Viet Nguyen 3 | """ 4 | import torch 5 | from torch.autograd import Variable 6 | from torch.utils.data.dataloader import default_collate 7 | 8 | 9 | def custom_collate_fn(batch): 10 | items = list(zip(*batch)) 11 | items[0] = default_collate(items[0]) 12 | items[1] = list(items[1]) 13 | return items 14 | 15 | 16 | def post_processing(logits, image_size, gt_classes, anchors, conf_threshold, nms_threshold): 17 | num_anchors = len(anchors) 18 | anchors = torch.Tensor(anchors) 19 | 20 | if isinstance(logits, Variable): 21 | logits = logits.data 22 | 23 | if logits.dim() == 3: 24 | logits.unsqueeze_(0) 25 | 26 | batch = logits.size(0) 27 | h = logits.size(2) 28 | w = logits.size(3) 29 | 30 | # Compute xc,yc, w,h, box_score on Tensor 31 | lin_x = torch.linspace(0, w - 1, w).repeat(h, 1).view(h * w) 32 | lin_y = torch.linspace(0, h - 1, h).repeat(w, 1).t().contiguous().view(h * w) 33 | anchor_w = anchors[:, 0].contiguous().view(1, num_anchors, 1) 34 | anchor_h = anchors[:, 1].contiguous().view(1, num_anchors, 1) 35 | if torch.cuda.is_available(): 36 | lin_x = lin_x.cuda() 37 | lin_y = lin_y.cuda() 38 | anchor_w = anchor_w.cuda() 39 | anchor_h = anchor_h.cuda() 40 | 41 | logits = logits.view(batch, num_anchors, -1, h * w) # [1,5,25,169] 42 | logits[:, :, 0, :].sigmoid_().add_(lin_x).div_(w) #中心点x 43 | logits[:, :, 1, :].sigmoid_().add_(lin_y).div_(h) # 中心点y 44 | logits[:, :, 2, :].exp_().mul_(anchor_w).div_(w) # 宽度 45 | logits[:, :, 3, :].exp_().mul_(anchor_h).div_(h) # 高度 46 | logits[:, :, 4, :].sigmoid_() # 类别 47 | 48 | with torch.no_grad(): 49 | cls_scores = torch.nn.functional.softmax(logits[:, :, 5:, :], 2) 50 | cls_max, cls_max_idx = torch.max(cls_scores, 2) 51 | cls_max_idx = cls_max_idx.float() 52 | cls_max.mul_(logits[:, :, 4, :]) 53 | 54 | score_thresh = cls_max > conf_threshold 55 | score_thresh_flat = score_thresh.view(-1) 56 | 57 | if score_thresh.sum() == 0: 58 | predicted_boxes = [] 59 | for i in range(batch): 60 | predicted_boxes.append(torch.Tensor([])) 61 | else: 62 | coords = logits.transpose(2, 3)[..., 0:4] 63 | coords = coords[score_thresh[..., None].expand_as(coords)].view(-1, 4) 64 | scores = cls_max[score_thresh] 65 | idx = cls_max_idx[score_thresh] 66 | detections = torch.cat([coords, scores[:, None], idx[:, None]], dim=1) 67 | 68 | max_det_per_batch = num_anchors * h * w 69 | slices = [slice(max_det_per_batch * i, max_det_per_batch * (i + 1)) for i in range(batch)] 70 | det_per_batch = torch.IntTensor([score_thresh_flat[s].int().sum() for s in slices]) 71 | split_idx = torch.cumsum(det_per_batch, dim=0) 72 | 73 | # Group detections per image of batch 74 | predicted_boxes = [] 75 | start = 0 76 | for end in split_idx: 77 | predicted_boxes.append(detections[start: end]) 78 | start = end 79 | 80 | selected_boxes = [] 81 | for boxes in predicted_boxes: 82 | if boxes.numel() == 0: 83 | return boxes 84 | 85 | a = boxes[:, :2] 86 | b = boxes[:, 2:4] 87 | bboxes = torch.cat([a - b / 2, a + b / 2], 1) 88 | scores = boxes[:, 4] 89 | 90 | # Sort coordinates by descending score 91 | scores, order = scores.sort(0, descending=True) 92 | x1, y1, x2, y2 = bboxes[order].split(1, 1) 93 | 94 | # Compute dx and dy between each pair of boxes (these mat contain every pair twice...) 95 | dx = (x2.min(x2.t()) - x1.max(x1.t())).clamp(min=0) 96 | dy = (y2.min(y2.t()) - y1.max(y1.t())).clamp(min=0) 97 | 98 | # Compute iou 99 | intersections = dx * dy 100 | areas = (x2 - x1) * (y2 - y1) 101 | unions = (areas + areas.t()) - intersections 102 | ious = intersections / unions 103 | 104 | # Filter based on iou (and class) 105 | conflicting = (ious > nms_threshold).triu(1) 106 | 107 | keep = conflicting.sum(0).byte() 108 | keep = keep.cpu() 109 | conflicting = conflicting.cpu() 110 | 111 | keep_len = len(keep) - 1 112 | for i in range(1, keep_len): 113 | if keep[i] > 0: 114 | keep -= conflicting[i] 115 | if torch.cuda.is_available(): 116 | keep = keep.cuda() 117 | 118 | keep = (keep == 0) 119 | selected_boxes.append(boxes[order][keep[:, None].expand_as(boxes)].view(-1, 6).contiguous()) 120 | 121 | final_boxes = [] 122 | for boxes in selected_boxes: 123 | if boxes.dim() == 0: 124 | final_boxes.append([]) 125 | else: 126 | boxes[:, 0:3:2] *= image_size 127 | boxes[:, 0] -= boxes[:, 2] / 2 128 | boxes[:, 1:4:2] *= image_size 129 | boxes[:, 1] -= boxes[:, 3] / 2 130 | 131 | final_boxes.append([[box[0].item(), box[1].item(), box[2].item(), box[3].item(), box[4].item(), 132 | gt_classes[int(box[5].item())]] for box in boxes]) 133 | return final_boxes 134 | 135 | 136 | def out_processing(tl,ts, tc, image_size, anchors, conf_threshold, nms_threshold, seen_vec): # [b,20,13,13] [b,5,13,13] 137 | num_anchors = len(anchors) 138 | num_seen = seen_vec.shape[0] 139 | anchors = torch.Tensor(anchors) 140 | if isinstance(tl, Variable): 141 | tl = tl.data 142 | if isinstance(tc, Variable): 143 | tc = tc.data 144 | 145 | if tl.dim() == 3: 146 | tl.unsqueeze_(0) 147 | if tc.dim() == 3: 148 | tc.unsqueeze_(0) 149 | 150 | batch = tl.size(0) 151 | h = tl.size(2) 152 | w = tl.size(3) 153 | 154 | # Compute xc,yc, w,h, box_score on Tensor 155 | lin_x = torch.linspace(0, w - 1, w).repeat(h, 1).view(h * w) 156 | lin_y = torch.linspace(0, h - 1, h).repeat(w, 1).t().contiguous().view(h * w) 157 | anchor_w = anchors[:, 0].contiguous().view(1, num_anchors, 1) 158 | anchor_h = anchors[:, 1].contiguous().view(1, num_anchors, 1) 159 | if torch.cuda.is_available(): 160 | lin_x = lin_x.cuda() 161 | lin_y = lin_y.cuda() 162 | anchor_w = anchor_w.cuda() 163 | anchor_h = anchor_h.cuda() 164 | 165 | tl = tl.view(batch,num_anchors,-1,h*w)# [b,5,4,169] 166 | tl[:,:,0,:].sigmoid_().add_(lin_x).div_(w) 167 | tl[:, :, 1, :].sigmoid_().add_(lin_y).div_(h) 168 | tl[:, :, 2, :].exp_().mul_(anchor_w).div_(w) 169 | tl[:, :, 3, :].exp_().mul_(anchor_h).div_(h) 170 | tc = tc.view(batch,num_anchors,h*w).sigmoid_() # [1,5,169] 171 | 172 | ts = ts.view(batch,-1,64,1).repeat(1,1,1,num_seen) 173 | seen_attrs = torch.zeros(batch, num_anchors * h* w, 64, num_seen, requires_grad=False) 174 | for i, seen_attr in enumerate(torch.Tensor(seen_vec)): 175 | seen_attrs[:, :, :, i] = seen_attr.view(1, 1, 64).repeat(batch, num_anchors * h * w, 1) 176 | seen_attrs = seen_attrs.cuda() 177 | cos_sim = torch.nn.CosineSimilarity(dim=2, eps=1e-8) 178 | 179 | sim_score = cos_sim(ts,seen_attrs) 180 | score_max,score_max_idx = sim_score.max(2) 181 | score_max = score_max.view(batch,num_anchors,h*w) 182 | score_max_idx = score_max_idx.view(batch,num_anchors,h*w).float() 183 | score_max.mul_(tc) 184 | 185 | 186 | score_thresh = score_max > conf_threshold 187 | score_thresh_flat = score_thresh.view(-1) # 平铺得分阈值 [5*13*13] 188 | # print(score_thresh_flat.shape) 189 | 190 | if score_thresh.sum() == 0: # 即都是0,也就是都低于阈值,输出为空 191 | predicted_boxes = [] 192 | for i in range(batch): 193 | predicted_boxes.append(torch.Tensor([])) 194 | else: 195 | # coords = logits.transpose(2, 3)[..., 0:4] 196 | coords = tl.transpose(2,3) 197 | coords = coords[score_thresh[..., None].expand_as(coords)].view(-1, 4) # 满足阈值的位置信息保留[n,4] n代表满足的框的个数 198 | 199 | # scores = cls_max[score_thresh] # 满足阈值的置信度信息保留 [n] 200 | # idx = cls_max_idx[score_thresh] # 满足阈值的类别信息保留 [n] 201 | scores = score_max[score_thresh] 202 | 203 | # detections = torch.cat([coords, scores[:, None], idx[:, None]], dim=1) # [n,6] 204 | detections = torch.cat([coords, scores[:, None]], dim=1) 205 | 206 | 207 | max_det_per_batch = num_anchors * h * w # 每个batch 最多5×13×13个框 208 | slices = [slice(max_det_per_batch * i, max_det_per_batch * (i + 1)) for i in range(batch)] 209 | det_per_batch = torch.IntTensor([score_thresh_flat[s].int().sum() for s in slices]) 210 | split_idx = torch.cumsum(det_per_batch, dim=0) 211 | 212 | # Group detections per image of batch 213 | predicted_boxes = [] # 存的每个图片的满足初步筛选(阈值筛选)的信息,例如[0.6307, 0.4467, 0.0336, 0.0426, 0.8983, 3.0000] 214 | start = 0 215 | for end in split_idx: 216 | predicted_boxes.append(detections[start: end]) 217 | start = end 218 | # print(predicted_boxes) 219 | 220 | selected_boxes = [] 221 | for boxes in predicted_boxes: 222 | if boxes.numel() == 0: 223 | return boxes 224 | 225 | a = boxes[:, :2] 226 | b = boxes[:, 2:4] 227 | bboxes = torch.cat([a - b / 2, a + b / 2], 1) 228 | scores = boxes[:, 4] 229 | 230 | # Sort coordinates by descending score 通过得分降序,对候选框们排序 231 | scores, order = scores.sort(0, descending=True) 232 | x1, y1, x2, y2 = bboxes[order].split(1, 1) 233 | 234 | # Compute dx and dy between each pair of boxes (these mat contain every pair twice...) 235 | dx = (x2.min(x2.t()) - x1.max(x1.t())).clamp(min=0) 236 | dy = (y2.min(y2.t()) - y1.max(y1.t())).clamp(min=0) 237 | 238 | # Compute iou 239 | intersections = dx * dy 240 | areas = (x2 - x1) * (y2 - y1) 241 | unions = (areas + areas.t()) - intersections 242 | ious = intersections / unions 243 | 244 | # Filter based on iou (and class) 245 | conflicting = (ious > nms_threshold).triu(1) 246 | 247 | keep = conflicting.sum(0).byte() 248 | keep = keep.cpu() 249 | conflicting = conflicting.cpu() 250 | 251 | keep_len = len(keep) - 1 252 | for i in range(1, keep_len): 253 | if keep[i] > 0: 254 | keep -= conflicting[i] 255 | if torch.cuda.is_available(): 256 | keep = keep.cuda() 257 | 258 | keep = (keep == 0) 259 | # selected_boxes.append(boxes[order][keep[:, None].expand_as(boxes)].view(-1, 6).contiguous()) 260 | selected_boxes.append(boxes[order][keep[:, None].expand_as(boxes)].view(-1, 5).contiguous()) 261 | 262 | final_boxes = [] 263 | for boxes in selected_boxes: 264 | if boxes.dim() == 0: 265 | final_boxes.append([]) 266 | else: 267 | boxes[:, 0:3:2] *= image_size 268 | boxes[:, 0] -= boxes[:, 2] / 2 269 | boxes[:, 1:4:2] *= image_size 270 | boxes[:, 1] -= boxes[:, 3] / 2 271 | 272 | final_boxes.append([[box[0].item(), box[1].item(), box[2].item(), box[3].item(), box[4].item()] for box in boxes]) 273 | return final_boxes -------------------------------------------------------------------------------- /attributes/voc.txt: -------------------------------------------------------------------------------- 1 | 1.972972972972973082e-01 4.486486486486486736e-01 6.216216216216216561e-02 4.594594594594594850e-02 6.918918918918919303e-01 3.027027027027027195e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.864864864864865357e-01 2.810810810810810967e-01 4.864864864864865135e-01 5.783783783783783550e-01 3.648648648648648574e-01 5.216216216216216228e-01 3.297297297297297480e-01 3.783783783783783994e-02 2.972972972972973138e-02 0.000000000000000000e+00 1.459459459459459540e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.459459459459459429e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.432432432432432678e-01 1.945945945945946054e-01 1.891891891891891997e-02 0.000000000000000000e+00 0.000000000000000000e+00 4.729729729729729715e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.540540540540540793e-01 0.000000000000000000e+00 0.000000000000000000e+00 2 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.667752442996742968e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.153094462540716236e-01 0.000000000000000000e+00 4.560260586319218185e-02 4.885993485342019632e-02 9.771986970684038223e-03 0.000000000000000000e+00 7.068403908794788082e-01 7.752442996742671122e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.899022801302931773e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.478827361563517684e-01 4.625407166123778335e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.853420195439739349e-01 0.000000000000000000e+00 0.000000000000000000e+00 3 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 3.205828779599271261e-01 6.721311475409835756e-01 7.650273224043715459e-01 8.469945355191257352e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.448087431693989569e-01 7.959927140255008693e-01 0.000000000000000000e+00 0.000000000000000000e+00 5.646630236794171198e-01 4.462659380692167721e-01 5.974499089253187956e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.180327868852459217e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4 | 2.913907284768211814e-01 4.105960264900662127e-01 2.428256070640176512e-02 9.492273730684326616e-02 2.759381898454746351e-01 3.377483443708609312e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.913907284768211814e-01 2.560706401766004170e-01 2.207505518763796908e-03 8.388520971302428075e-02 4.415011037527593815e-03 2.207505518763796908e-03 1.103752759381898541e-02 1.986754966887417304e-02 0.000000000000000000e+00 0.000000000000000000e+00 5.739514348785872133e-02 2.317880794701986658e-01 3.090507726269315913e-01 1.766004415011037665e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.518763796909492703e-01 2.913907284768211814e-01 4.900662251655629187e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.854304635761589437e-01 0.000000000000000000e+00 0.000000000000000000e+00 5 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.506944444444444198e-01 7.291666666666667129e-02 3.194444444444444198e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.996527777777777901e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.729166666666666435e-02 2.569444444444444198e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.527777777777777901e-01 0.000000000000000000e+00 0.000000000000000000e+00 3.107638888888888951e-01 3.611111111111111049e-01 0.000000000000000000e+00 0.000000000000000000e+00 6 | 0.000000000000000000e+00 7.600000000000000089e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.799999999999999822e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.599999999999999867e-01 8.000000000000000444e-01 6.066666666666666874e-01 4.733333333333333282e-01 5.000000000000000000e-01 1.799999999999999933e-01 4.466666666666666563e-01 3.333333333333333287e-02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.666666666666666741e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.866666666666667140e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.066666666666667096e-01 0.000000000000000000e+00 0.000000000000000000e+00 7 | 0.000000000000000000e+00 6.389432485322896760e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.988258317025439981e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 7.964774951076321052e-01 0.000000000000000000e+00 6.213307240704500911e-01 5.459882583170254655e-01 3.835616438356164171e-01 3.542074363992172015e-01 4.637964774951076063e-01 1.046966731898238689e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.099804305283757389e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.772994129158512955e-01 0.000000000000000000e+00 0.000000000000000000e+00 8 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.289473684210526327e-01 4.052631578947368252e-01 0.000000000000000000e+00 9.526315789473683848e-01 9.263157894736842035e-01 8.605263157894736947e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000444e-01 7.684210526315788936e-01 0.000000000000000000e+00 0.000000000000000000e+00 7.026315789473683848e-01 6.342105263157894468e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.421052631578947789e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9 | 2.941822173435784671e-01 4.709110867178924464e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.982436882546652290e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.884742041712403782e-01 8.331503841931943155e-01 5.664105378704720195e-01 3.029637760702524885e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.030735455543359047e-01 2.502744237102085822e-01 5.246981339187706261e-01 3.809001097694840743e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.514818880351262442e-01 0.000000000000000000e+00 0.000000000000000000e+00 10 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.736040609137056379e-01 3.299492385786801929e-01 0.000000000000000000e+00 7.055837563451776706e-01 6.395939086294416542e-01 5.939086294416243694e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.720812182741117025e-01 7.208121827411168026e-01 0.000000000000000000e+00 0.000000000000000000e+00 6.548223350253806752e-01 4.010152284263959199e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.928934010152284217e-01 9.644670050761421087e-02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.035532994923858308e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 11 | 3.794642857142856984e-01 0.000000000000000000e+00 3.258928571428571508e-01 0.000000000000000000e+00 0.000000000000000000e+00 6.428571428571429047e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 3.214285714285714524e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.160714285714285754e-01 1.294642857142857262e-01 6.071428571428570953e-01 2.410714285714285754e-01 0.000000000000000000e+00 4.910714285714285615e-02 0.000000000000000000e+00 0.000000000000000000e+00 4.910714285714285615e-02 1.919642857142857262e-01 0.000000000000000000e+00 0.000000000000000000e+00 12 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.710743801652892415e-01 4.235537190082644399e-01 0.000000000000000000e+00 9.359504132231405427e-01 8.966942148760330689e-01 8.677685950413223104e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 7.913223140495867503e-01 7.954545454545454142e-01 0.000000000000000000e+00 0.000000000000000000e+00 7.789256198347107585e-01 6.549586776859503967e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.338842975206611552e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 13 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.098039215686274161e-01 5.130718954248365549e-01 0.000000000000000000e+00 8.921568627450980893e-01 8.137254901960784270e-01 8.464052287581699252e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 7.385620915032680145e-01 7.908496732026143450e-01 0.000000000000000000e+00 0.000000000000000000e+00 8.169934640522875657e-01 6.666666666666666297e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.065359477124182774e-01 3.562091503267973969e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.313725490196078649e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 14 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.949494949494949725e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.619528619528619151e-01 0.000000000000000000e+00 4.983164983164983242e-01 2.895622895622895765e-01 4.309764309764309576e-01 3.232323232323232598e-01 0.000000000000000000e+00 7.845117845117844935e-01 4.949494949494949725e-01 0.000000000000000000e+00 0.000000000000000000e+00 3.703703703703703498e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.326599326599326334e-01 5.925925925925925597e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.262626262626262985e-01 0.000000000000000000e+00 0.000000000000000000e+00 15 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.947544862946164690e-01 0.000000000000000000e+00 0.000000000000000000e+00 8.373101952277657301e-01 5.330309603628475834e-01 0.000000000000000000e+00 6.054032735160718159e-01 5.655689213172944152e-01 6.633800039439953222e-01 6.294616446460263859e-01 5.531453362255965844e-01 6.990731611122066314e-01 5.442713468743837213e-01 6.911851705777952715e-01 4.845198185762177312e-01 2.885032537960954269e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.207454151055019187e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.761585486097416275e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 16 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 7.522935779816514179e-01 7.110091743119266450e-02 3.027522935779816682e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.830275229357797961e-01 2.729357798165137350e-01 5.068807339449541427e-01 8.555045871559633364e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.334862385321101019e-01 0.000000000000000000e+00 17 | 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.700854700854700807e-01 3.034188034188033956e-01 0.000000000000000000e+00 7.564102564102563875e-01 6.495726495726495964e-01 6.111111111111111605e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.213675213675214026e-01 7.948717948717948234e-01 0.000000000000000000e+00 0.000000000000000000e+00 6.538461538461538547e-01 4.059829059829059839e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.615384615384615641e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 18 | 2.964426877470355826e-01 5.533596837944664282e-01 8.300395256916996700e-02 0.000000000000000000e+00 0.000000000000000000e+00 6.245059288537548881e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.806324110671936656e-01 8.102766798418972183e-01 7.905138339920948498e-01 7.154150197628458274e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.766798418972332002e-02 2.924901185770750756e-01 8.498023715415019552e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.185770750988142247e-01 19 | 0.000000000000000000e+00 7.840909090909090606e-01 1.022727272727272790e-01 4.545454545454545581e-02 5.000000000000000000e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.818181818181817677e-01 4.772727272727272929e-01 4.659090909090908839e-01 4.602272727272727071e-01 3.636363636363636465e-01 2.272727272727272790e-02 0.000000000000000000e+00 3.409090909090908839e-02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.545454545454545858e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.943181818181818232e-01 0.000000000000000000e+00 0.000000000000000000e+00 20 | 5.551839464882942776e-01 3.578595317725752345e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 3.210702341137123828e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.428093645484949770e-01 0.000000000000000000e+00 2.508361204013377921e-01 7.892976588628762835e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 7.993311036789297885e-01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.080267558528428151e-01 0.000000000000000000e+00 0.000000000000000000e+00 21 | --------------------------------------------------------------------------------