├── AppletDemo.png ├── CreateBash.py ├── DATASET.py ├── Demo.py ├── FILTER.py ├── InputLayers.png ├── LogParser.py ├── Logger.py ├── MODEL.py ├── Model.png ├── PmapViewer.py ├── Prob2Line.py ├── README.md ├── RotateLearning.py ├── Utility.py ├── applet.json ├── applet_images └── graybar.png ├── globalVariables.py └── temp.py /AppletDemo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aminrd/LineamentLearning/3d5ecaef46bdc3a86532a6d5e41f3a316553c566/AppletDemo.png -------------------------------------------------------------------------------- /CreateBash.py: -------------------------------------------------------------------------------- 1 | output_name = 'Work.sh' 2 | work_list = list(range(9,57,4)) 3 | 4 | 5 | 6 | 7 | 8 | with open(output_name, "w") as f: 9 | for w in work_list: 10 | #f.write('python RotateLearning.py prepare-datasets-flt -W {}\n'.format(w)) 11 | #f.write('python RotateLearning.py train-prepared -prefix "A_" -W {} -nprep 100\n'.format(w)) 12 | #f.write('python RotateLearning.py train-prepared -prefix "Q_" -W {} -nprep 100\n'.format(w)) 13 | #f.write('python RotateLearning.py train-prepared -prefix "Mixed" -W {} -nprep 100\n'.format(w)) 14 | f.write('python RotateLearning.py prepare-pmap -CB {}_Fault_Australia.hdf5 -W {}\n'.format(w,w)) 15 | f.write('python RotateLearning.py prepare-pmap -CB {}_Fault_Quest.hdf5 -W {}\n'.format(w, w)) 16 | f.write('python RotateLearning.py prepare-pmap -CB {}_Fault_Mixed.hdf5 -W {}\n'.format(w, w)) -------------------------------------------------------------------------------- /DATASET.py: -------------------------------------------------------------------------------- 1 | # Module : Dataset 2 | # Loading dataset from MATLAB files , Expanding fault lines 3 | __author__ = "Amin Aghaee" 4 | __copyright__ = "Copyright 2018, Amin Aghaee" 5 | 6 | import sys 7 | import numpy as np 8 | import random 9 | import scipy.io as sio 10 | import scipy.ndimage 11 | 12 | from globalVariables import * 13 | from Utility import * 14 | #from FILTER import * 15 | 16 | 17 | if DEBUG_MODE: 18 | print("### Importing DATASET Class ###") 19 | 20 | 21 | def gaussian2DMatrix(width = 5, epsilon = 0.9, type = 'manhattan'): 22 | matrix = np.zeros((width,width)) 23 | [x,y] = [np.int((width-1)/2),np.int((width-1)/2)] 24 | s = np.int((width-1)/2) 25 | 26 | if type.__eq__("manhattan"): 27 | for i in range(0-s,s+1): 28 | for j in range(0 - s, s + 2): 29 | D = np.abs(i) + np.abs(j) 30 | if D < s+1: 31 | matrix[x+i][y+j] = pow(epsilon, D) 32 | elif type.__eq__("gaussian"): 33 | for i in range(0-s,s+1): 34 | for j in range(0 - s, s + 2): 35 | D = np.abs(i) + np.abs(j) 36 | if D < s+1: 37 | matrix[x+i][y+j] = np.exp( (0 - (i*i + j*j))/(2 * epsilon * epsilon) ) 38 | 39 | else: # Simply expand by _width pixels 40 | for i in range(0-s,s+1): 41 | for j in range(0 - s, s + 2): 42 | D = np.abs(i) + np.abs(j) 43 | if D < s+1: 44 | matrix[x+i][y+j] = 1.0 45 | return matrix 46 | 47 | 48 | def labelAngel(radian, base = np.pi / 2.0): 49 | 50 | if base == np.pi / 2.0: 51 | return np.abs(np.abs(radian) - base) <= radianTH 52 | 53 | return np.abs(radian - base) <= radianTH 54 | 55 | class DATASET: 56 | """ Dataset Class, Loads dataset from MATLAB file. Have some function to expand fault lines, ....""" 57 | 58 | def __init__(self , directory, mode = 'normal'): 59 | 60 | DS = sio.loadmat(directory) 61 | self.x = DS['I1'].shape[0] 62 | self.y = DS['I1'].shape[1] 63 | 64 | self.INPUTS = np.zeros((self.x, self.y, Layers)) 65 | 66 | for i in range(Layers): 67 | self.INPUTS[:,:,i] = np.array(DS['I{}'.format(i+1)]) 68 | 69 | self.MASK = np.array(DS['mask']) 70 | self.trainMask = np.array(DS['train_mask']) 71 | 72 | if mode.__eq__('normal'): 73 | self.testMask = np.array(DS['test_mask']) 74 | self.OUTPUT = np.array(DS['output']) 75 | self.R2M = np.array(DS['R2M']) 76 | self.M2R = np.array(DS['M2R']) 77 | 78 | self.DEGREES = np.array(DS['DEGREES']) 79 | 80 | for i in range(Layers): 81 | self.INPUTS[:, :, i] = myNormalizer(self.INPUTS[:, :, i]) 82 | 83 | 84 | def expandBy(self, width=3, epsilon = 1.0, type = 'manhattan', set = True): 85 | 86 | if width==0 and set==False: 87 | return self.OUTPUT 88 | 89 | matrix = np.array(self.OUTPUT).astype(float) 90 | [a,b] = np.where(matrix == 1) 91 | 92 | GMAT = gaussian2DMatrix(width, epsilon, type) 93 | s = np.int((width-1)/2) 94 | 95 | for k in range(len(a)): 96 | [i,j] = [a[k], b[k]] 97 | 98 | if iself.x-s-1 or jself.y-s-s: 99 | continue 100 | 101 | submat = matrix[i - s:i + s+1, j - s:j + s+1] 102 | matrix[i - s:i + s+1, j - s:j + s+1] = np.maximum(GMAT, submat) 103 | 104 | if set: 105 | self.OUTPUT = matrix 106 | else: 107 | return matrix 108 | 109 | def generateDS(self, output, mask, w = WindowSize, choosy = False, ratio = 1.0, output_type = np.pi / 2.0): 110 | # When choosy = TRUE : it only picks the fault locations 111 | # ratio coresponds to randomly selecting all possible locations 112 | # output_type: 1 --> Degree , Otherwise ---> Binary 113 | input = np.array(self.INPUTS) 114 | 115 | s = np.uint32((w-1)/2) 116 | O = np.array(output) 117 | O[np.where(mask == 0)] = 0 118 | 119 | if output_type == 0: 120 | O[O.nonzero()] = 1 121 | 122 | if choosy == True: 123 | IDX = np.where(O > 0) # Find where there is a fault 124 | else: 125 | IDX = np.where(mask == 1) # Use whole of mask 126 | 127 | # Choosing samples randomly : Shuffling data and randomly choosing them usnig "ratio" 128 | subset = random.sample(range(len(IDX[0])), np.uint32(np.floor(ratio * len(IDX[0])))) 129 | subset = np.uint32(subset) 130 | IDX = np.array(IDX) 131 | IDX = IDX[:,subset] 132 | IDX = tuple(IDX) 133 | 134 | w = np.uint32(w) 135 | X = np.zeros([len(IDX[0]), w, w, Layers]) 136 | Y = np.zeros([len(IDX[0]), 1]) 137 | 138 | for k in range(len(IDX[0])): 139 | 140 | if DEBUG_MODE and np.random.rand() < 0.01: 141 | pct = k * 100 / len(IDX[0]) 142 | print(slideBar(pct) + '-- Preparing dataset, about ' + '{}'.format(pct) + 'done!') 143 | 144 | [i,j] = [IDX[0][k],IDX[1][k]] 145 | X[k,:,:,:] = np.reshape(input[i-s:i+s+1, j-s:j+s+1, :] , (1, w, w, Layers)) 146 | 147 | 148 | if output_type == 0: # All areas, not only faults 149 | Y[k] = O[i, j] 150 | else: 151 | Y[k] = labelAngel(O[i, j], output_type) 152 | 153 | 154 | return [X,Y, IDX] 155 | 156 | 157 | 158 | def generateDSwithFilter(self, dstype, output, mask, w = WindowSize, choosy = False, ratio = 1.0): 159 | # When choosy = TRUE : it only picks the fault locations and labels are based on fault angels 160 | # ratio coresponds to randomly selecting all possible locations 161 | 162 | input = np.array(self.INPUTS) 163 | 164 | s = np.uint32((w-1)/2) 165 | O = np.array(output) 166 | O[np.where(mask == 0)] = 0 167 | 168 | if choosy == True: 169 | IDX = np.where(O > 0) # Find where there is a fault 170 | else: 171 | IDX = np.where(mask == 1) # Use whole of mask 172 | 173 | # Choosing samples randomly : Shuffling data and randomly choosing them usnig "ratio" 174 | subset = random.sample(range(len(IDX[0])), np.uint32(np.floor(ratio * len(IDX[0])))) 175 | subset = np.uint32(subset) 176 | IDX = np.array(IDX) 177 | IDX = IDX[:,subset] 178 | IDX = tuple(IDX) 179 | 180 | w = np.uint32(w) 181 | X = np.zeros([len(IDX[0]), w, w, Layers]) 182 | Y = np.zeros([len(IDX[0]), 1]) 183 | 184 | inverted_mask = ~circular_mask(w) 185 | 186 | for k in range(len(IDX[0])): 187 | 188 | if DEBUG_MODE and np.random.rand() < 0.01: 189 | pct = k * 100 / len(IDX[0]) 190 | print(slideBar(pct) + '-- Preparing dataset, about ' + '{}'.format(pct) + 'done!') 191 | 192 | [i,j] = [IDX[0][k],IDX[1][k]] 193 | xr = np.array(input[i-s:i+s+1, j-s:j+s+1, :]) 194 | 195 | if dstype == 'train': 196 | X[k,:,:,:] = scipy.ndimage.rotate(xr, random.randrange(0, 360, 6), reshape=False, order=0) 197 | else: 198 | X[k, :, :, :] = scipy.ndimage.rotate(xr, 0, reshape=False, order=0) 199 | 200 | X[k,:,:,:][inverted_mask] = 0 201 | 202 | 203 | if choosy == False: # All areas, not only faults 204 | Y[k] = O[i, j] 205 | else:#TODO: Non choosy not supported yet 206 | Y[k] = O[i, j] 207 | 208 | return [X,Y, IDX] 209 | 210 | 211 | 212 | 213 | def shrinkMask(self, maskName = 'train', number = 9): 214 | # Shrink mask into 1/9 and return 9 masks: 215 | 216 | if maskName.__eq__('train'): 217 | M = np.array(self.trainMask) 218 | elif maskName.__eq__('all'): 219 | M = np.array(self.MASK) 220 | elif maskName.__eq__('whole'): 221 | M = np.ones(self.MASK.shape) 222 | offset = 100 223 | M[:, 0:offset] = 0 224 | M[0:offset, :] = 0 225 | M[:, 0 - offset:] = 0 226 | M[0 - offset:, :] = 0 227 | else: 228 | M = np.array(self.testMask) 229 | 230 | m = np.zeros((number, self.x, self.y)) 231 | idx = np.where(M == 1) 232 | idx = np.array(idx) 233 | 234 | cnt = idx.shape[1] // number 235 | 236 | for i in range(number): 237 | mprim = m[i] 238 | subidx = idx[:, cnt*i : cnt*(i+1)] 239 | subidx = tuple(subidx) 240 | mprim[subidx] = 1 241 | m[i] = mprim 242 | 243 | return m 244 | 245 | 246 | 247 | 248 | def evaluate(self, _pmap, expand=0, mask = 'all', etype = 'our'): 249 | pmap = np.array(_pmap) 250 | labels = self.expandBy(width=expand, epsilon=0.9 ,type='normal', set=False) 251 | 252 | 253 | if mask.__eq__('train'): 254 | maskFilter = self.trainMask 255 | labels[np.where(self.trainMask == 0)] = 0 256 | pmap[np.where(self.trainMask == 0)] = 0 257 | elif mask.__eq__('test'): 258 | maskFilter = self.testMask 259 | labels[np.where(self.testMask == 0)] = 0 260 | pmap[np.where(self.testMask == 0)] = 0 261 | else: 262 | maskFilter = self.MASK 263 | labels[np.where(self.MASK == 0)] = 0 264 | pmap[np.where(self.MASK == 0)] = 0 265 | 266 | 267 | 268 | if etype == 'our': 269 | IDX_pos = labels > 0 270 | differror = np.square(labels - pmap) 271 | differror[~IDX_pos] = 0 272 | pos_score = differror.sum() / IDX_pos.sum() 273 | 274 | 275 | 276 | IDX_neg = labels <= 0 277 | differror = np.square(labels - pmap) 278 | differror[~IDX_neg] = 0 279 | neg_score = differror.sum() / max(1, (pmap[IDX_neg] >0 ).sum()) 280 | 281 | IDXa = np.where(pmap > 0) 282 | 283 | 284 | return [pos_score, neg_score] 285 | 286 | 287 | else: 288 | EPS = np.finfo(float).eps 289 | 290 | yh = np.copy(pmap) 291 | yh[ yh == 1.0 ] = 1 - EPS 292 | yh[ yh == 0.0 ] = EPS 293 | 294 | y = np.copy(labels) 295 | y[ y == 1.0 ] = 1 - EPS 296 | y[ y == 0.0 ] = EPS 297 | 298 | 299 | loss = np.multiply(yh, np.log(yh)) + np.multiply((1.0 - y), np.log( 1-yh )) 300 | 301 | err = -np.sum( loss[maskFilter == 1] ) / np.sum(maskFilter) 302 | return [err,err] 303 | -------------------------------------------------------------------------------- /Demo.py: -------------------------------------------------------------------------------- 1 | 2 | __author__ = "Amin Aghaee" 3 | __copyright__ = "Copyright 2018, Amin Aghaee" 4 | 5 | from DATASET import * 6 | from PmapViewer import * 7 | 8 | LOAD_BACKGROUND = True 9 | AUSTRALIA = True 10 | 11 | dir1 = './Results/TrainOnRandomSeleciton_W45_fault_QUEST/Pmap_exist_aust.npz' 12 | dir2 = './Results/TrainOnRandomSeleciton_W45_fault_QUEST/Pmap_exist_quest.npz' 13 | dir3 = './Results/TrainOnRandomSelection_w45_fault/Pmap_exist_quest.npz' 14 | dir4 = './Results/TrainOnRandomSelection_w45_fault/PMAP_exist.npz' 15 | 16 | 17 | 18 | dir5 = './Results/NewTrainingRandom_strip_mixed/Pmamp_Fault_Australia.hdf5Australia_strip.mat.npz' 19 | dir6 = './Results/NewTrainingRandom_strip_mixed/Pmamp_Fault_Australia.hdf5QUEST_strip.mat.npz' 20 | dir7 = './Results/NewTrainingRandom_strip_mixed/Pmamp_Fault_Mixed.hdf5Australia_strip.mat.npz' 21 | dir8 = './Results/NewTrainingRandom_strip_mixed/Pmamp_Fault_Mixed.hdf5QUEST_strip.mat.npz' 22 | dir9 = './Results/NewTrainingRandom_strip_mixed/Pmamp_Fault_Quest.hdf5Australia_strip.mat.npz' 23 | dir10 = './Results/NewTrainingRandom_strip_mixed/Pmamp_Fault_Quest.hdf5QUEST_strip.mat.npz' 24 | 25 | 26 | dir11 = './Results/First3Layers/Pmamp_Fault_Australia.hdf5Australia_strip.mat.npz' 27 | 28 | 29 | 30 | jdir = './applet.json' 31 | # Load and run application: 32 | p = PmapViewer(dir=jdir) 33 | p.run() 34 | 35 | 36 | 37 | #TODO: Prepare a demo video on training phase 38 | -------------------------------------------------------------------------------- /FILTER.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import numpy as np 4 | import random 5 | from globalVariables import * 6 | from Utility import * 7 | from DATASET import * 8 | 9 | class FILTER: 10 | def __init__(self, directory = FILTERDIR + 'Default.mat'): 11 | FDS = sio.loadmat(directory) 12 | 13 | self.F = FDS['filters'] 14 | self.N = self.F.shape[0] 15 | self.rlist = FDS['rotations'] 16 | 17 | def getFilter(self, n = 1): 18 | fnum = random.sample(range(self.N), n) 19 | return [fnum, self.F[fnum, :, :]] 20 | 21 | def getRadian(selfs, index): 22 | return (selfs.rlist[index]*np.pi) / 180.0 23 | 24 | def getFilterbyNumber(self, fnum = 0): 25 | return [fnum, self.F[fnum, :, :]] 26 | 27 | -------------------------------------------------------------------------------- /InputLayers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aminrd/LineamentLearning/3d5ecaef46bdc3a86532a6d5e41f3a316553c566/InputLayers.png -------------------------------------------------------------------------------- /LogParser.py: -------------------------------------------------------------------------------- 1 | import re 2 | import numpy as np 3 | import scipy.io as sio 4 | 5 | model_australia = {} 6 | model_quest = {} 7 | model_mixed = {} 8 | 9 | 10 | models = [model_australia, model_quest, model_mixed] 11 | for m in models: 12 | m['Quest'] = {} 13 | m['Australia'] = {} 14 | 15 | for mp in ['Quest', 'Australia']: 16 | m[mp]['Train_p'] = [] 17 | m[mp]['Train_n'] = [] 18 | m[mp]['Test_p'] = [] 19 | m[mp]['Test_n'] = [] 20 | m[mp]['All_p'] = [] 21 | m[mp]['All_n'] = [] 22 | 23 | 24 | 25 | 26 | 27 | with open("log.txt", "r") as f: 28 | for it in range(72): 29 | 30 | l1 = f.readline()[29:] 31 | l2 = f.readline()[29:] 32 | l3 = f.readline()[29:] 33 | l4 = f.readline()[29:] 34 | l5 = f.readline()[29:] 35 | l6 = f.readline()[29:] 36 | l7 = f.readline()[29:] 37 | 38 | idx = int(re.findall("\d+", l2)[0]) 39 | idx = (idx - 9) // 4 40 | 41 | if 'Quest' in l3: 42 | model = model_quest 43 | elif 'Mixed' in l3: 44 | model = model_mixed 45 | else: 46 | model = model_australia 47 | 48 | if 'QUEST' in l4: 49 | map_name = 'Quest' 50 | else: 51 | map_name = 'Australia' 52 | 53 | [p,n] = re.findall("\d+\.\d+", l5) 54 | model[map_name]['Train_p'] += [float(p)] 55 | model[map_name]['Train_n'] += [float(n)] 56 | 57 | [p, n] = re.findall("\d+\.\d+", l6) 58 | model[map_name]['Test_p'] += [float(p)] 59 | model[map_name]['Test_n'] += [float(n)] 60 | 61 | [p, n] = re.findall("\d+\.\d+", l7) 62 | model[map_name]['All_p'] += [float(p)] 63 | model[map_name]['All_n'] += [float(n)] 64 | 65 | sio.savemat('Australia.mat', model_australia) 66 | sio.savemat('Quest.mat', model_quest) 67 | sio.savemat('Mixed.mat', model_mixed) -------------------------------------------------------------------------------- /Logger.py: -------------------------------------------------------------------------------- 1 | # Logger class: 2 | # To log special events and append to a file after running 3 | __author__ = "Amin Aghaee" 4 | __copyright__ = "Copyright 2018, Amin Aghaee" 5 | 6 | import datetime 7 | 8 | class Logger: 9 | """To log special events and append to a file after running""" 10 | 11 | def __init__(self, fname = './log.txt'): 12 | self.fname = fname 13 | 14 | 15 | def addlog(self, message = None): 16 | 17 | if message is None: 18 | return 1 19 | 20 | try: 21 | with open(self.fname, "a") as f: 22 | fmessage = str(datetime.datetime.now()) + " : " + message + "\n" 23 | f.write(fmessage) 24 | except: 25 | print("Failed to log!") 26 | 27 | -------------------------------------------------------------------------------- /MODEL.py: -------------------------------------------------------------------------------- 1 | 2 | # Class : Model 3 | __author__ = "Amin Aghaee" 4 | __copyright__ = "Copyright 2018, Amin Aghaee" 5 | 6 | import numpy as np 7 | import random 8 | 9 | # Importing Keras Libraries: 10 | from keras.models import Sequential 11 | from keras.layers import Conv2D, Dense, Flatten 12 | from keras.optimizers import Adam 13 | from keras.callbacks import ModelCheckpoint 14 | 15 | 16 | from globalVariables import * 17 | from Utility import * 18 | 19 | 20 | if DEBUG_MODE: 21 | print("### Importing MODEL Class ###") 22 | 23 | 24 | def get_RotateNet(w): 25 | model = Sequential() 26 | model.add(Conv2D(8, 3, padding='valid', activation='relu', input_shape=(w, w, Layers))) 27 | model.add(Flatten()) 28 | model.add(Dense(300, activation='relu')) 29 | model.add(Dense(300, activation='relu')) 30 | model.add(Dense(1, activation='sigmoid')) 31 | model.compile(optimizer=Adam(), loss='binary_crossentropy', metrics=['accuracy']) 32 | return model 33 | 34 | 35 | class MODEL: 36 | """Class Model: define model here. Can train, predict and load previously trained parameters""" 37 | 38 | def __init__(self, w = WindowSize, param_dir = 0, checkpoint = CB + 'Default.hdf5'): 39 | self.model = get_RotateNet(w) 40 | 41 | if param_dir != 0: 42 | self.model.load_weights(param_dir) 43 | else: 44 | self.model_checkpoint = ModelCheckpoint(checkpoint, monitor='loss', verbose=DEBUG_MODE, save_best_only=False) 45 | 46 | def train(self, X, Y, epochs = 2): 47 | self.model.fit(X, Y, shuffle=True, batch_size=32, epochs=epochs, verbose=DEBUG_MODE, callbacks=[self.model_checkpoint]) 48 | 49 | def predict(self, X): 50 | return self.model.predict(X, verbose = DEBUG_MODE) 51 | 52 | -------------------------------------------------------------------------------- /Model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aminrd/LineamentLearning/3d5ecaef46bdc3a86532a6d5e41f3a316553c566/Model.png -------------------------------------------------------------------------------- /PmapViewer.py: -------------------------------------------------------------------------------- 1 | #Pmap Viewer 2 | import numpy as np 3 | __author__ = "Amin Aghaee" 4 | __copyright__ = "Copyright 2018, Amin Aghaee" 5 | 6 | from PIL import Image, ImageDraw, ImageTk 7 | import tkinter as tk 8 | import json 9 | 10 | 11 | # Loading Related Modules: 12 | # -------------------------------------- 13 | from globalVariables import * 14 | from Utility import * 15 | from DATASET import * 16 | from FILTER import * 17 | import matplotlib.pyplot as plt 18 | 19 | if LOAD_MODELS: 20 | from MODEL import * 21 | # -------------------------------------- 22 | from Prob2Line import * 23 | 24 | 25 | class PmapViewer: 26 | def __init__(self, matrix=None, bg = None, dir = None): 27 | 28 | self.RUN = False 29 | 30 | sz = MAX_WINDOW_SIZE 31 | 32 | self.FILL = 'red' 33 | self.ErrorIsReady = False 34 | 35 | 36 | if matrix is None and dir is None: 37 | self.matrix = np.zeros((sz,sz)) 38 | self.width = sz 39 | self.height = sz 40 | self.width2 = sz 41 | self.height2 = sz 42 | 43 | elif not matrix is None: 44 | 45 | if len(matrix.shape) < 3: 46 | self.matrix = np.zeros((matrix.shape[0], matrix.shape[1])) 47 | self.matrix = matrix 48 | else: 49 | self.matrix = matrix 50 | 51 | self.width = matrix.shape[0] 52 | self.height = matrix.shape[1] 53 | 54 | if self.width > sz or self.height > sz: 55 | if self.width > sz: 56 | self.height2 = (sz * self.height) // self.width 57 | self.width2 = sz 58 | else: 59 | self.width2 = (sz * self.width) // self.height 60 | self.height2 = sz 61 | else: 62 | self.height2 = self.height 63 | self.width2 = self.width 64 | 65 | elif not dir is None: 66 | self.load(dir) 67 | 68 | 69 | 70 | if not bg is None: 71 | if len(bg.shape) >= 3: 72 | self.bg = bg 73 | else: 74 | self.bg = np.zeros((bg.shape[0], bg.shape[1], 3)) 75 | self.bg[:, :, 0] = bg 76 | self.bg[:, :, 1] = bg 77 | self.bg[:, :, 2] = bg 78 | self.bg = np.uint8(self.bg) 79 | else: 80 | self.bg = np.zeros((self.ds.OUTPUT.shape[0], self.ds.OUTPUT.shape[1], 3)) 81 | 82 | oex = self.ds.expandBy(width=45, epsilon=0.9, set=False) 83 | #oex = self.ds.expandBy(width=3, epsilon=0.9, set=False) 84 | 85 | #self.bg[:, :, 0] = self.ds.OUTPUT 86 | #self.bg[:, :, 1] = self.ds.OUTPUT 87 | #self.bg[:, :, 2] = self.ds.OUTPUT 88 | 89 | self.bg[:, :, 0] = oex 90 | self.bg[:, :, 1] = oex 91 | self.bg[:, :, 2] = oex 92 | 93 | self.bg = np.uint8(self.bg * 255) 94 | 95 | 96 | self.master = tk.Tk() 97 | 98 | 99 | 100 | 101 | 102 | def load(self, dir = './applet.json'): 103 | 104 | with open(dir) as f: 105 | self.jfile = json.load(f) 106 | 107 | self.ds = DATASET(self.jfile["dataset"]["link"]) 108 | 109 | 110 | if LOAD_MODELS: 111 | self.wf = int(self.jfile["model1"]["w"]) 112 | self.model_flt = MODEL(w=self.wf, param_dir=self.jfile["model1"]["link"]) 113 | 114 | self.wa = int(self.jfile["model2"]["w"]) 115 | self.model_ang = MODEL(w=self.wa, param_dir=self.jfile["model2"]["link"]) 116 | 117 | 118 | ds = np.load(self.jfile["pmap"]["plink"]) 119 | self.lnumber = int(self.jfile["pmap"]["lnumber"]) 120 | self.matrix = ds['matrix'] 121 | self.width = self.matrix.shape[0] 122 | self.height = self.matrix.shape[1] 123 | 124 | if int(self.jfile["pmap"]["trained"]) == 1: 125 | h = np.load(self.jfile["pmap"]["alink"]) 126 | #self.angels = h['matrix'] 127 | self.angels = np.zeros((self.width, self.height, 36)) 128 | 129 | 130 | 131 | 132 | sz = MAX_WINDOW_SIZE 133 | if self.width > sz or self.height > sz: 134 | if self.width > sz: 135 | self.height2 = (sz * self.height) // self.width 136 | self.width2 = sz 137 | else: 138 | self.width2 = (sz * self.width) // self.height 139 | self.height2 = sz 140 | else: 141 | self.height2 = self.height 142 | self.width2 = self.width 143 | 144 | 145 | 146 | 147 | # -------------------------------------------------------------------------- 148 | 149 | def getBackground(self, showLines=False, c1 = 1, c2 = 254, layer=-1): 150 | 151 | BG = np.ones((self.width, self.height, 3)) 152 | 153 | if layer == -1: 154 | BG = BG * c1 155 | else: 156 | ll = self.ds.INPUTS[:,:,layer] * 255 157 | BG[:, :, 0] = ll 158 | BG[:, :, 1] = ll 159 | BG[:, :, 2] = ll 160 | 161 | if showLines: 162 | ll = np.array(self.bg) 163 | BG[ll >= 200] = np.floor(c2 * 0.81) 164 | BG[ll >= 220] = np.floor(c2 * 0.9) 165 | BG[ll >= 250] = c2 166 | #BG = BG + c2 * self.bg 167 | #BG[BG > 255] = 255 168 | 169 | return np.uint8(BG) 170 | 171 | 172 | 173 | def getImage(self, showLines = False, angels=False, pct = 0.9, onlyMax = True, threshold = 0.5, cb=1, cl=254, sheet=-1, prob=False): 174 | 175 | p = pmapCutoff(self.matrix, threshold = threshold) 176 | 177 | BG = self.getBackground(showLines, cb, cl, sheet) 178 | 179 | if angels == False: 180 | 181 | if prob == False: 182 | cvec = colour2vec(self.FILL) 183 | 184 | ptrans = np.uint8(p * 255) 185 | map = np.zeros(BG.shape) 186 | 187 | for d in range(3): 188 | cmap = self.ds.trainMask * cvec[d] + self.ds.testMask*(1-cvec[d]) 189 | cmap = cmap*255 190 | map[:,:,d] = np.multiply(BG[:,:,d], (1-p)) + np.multiply(p,cmap) 191 | 192 | else: 193 | 194 | cvec = colour2vec(self.FILL) 195 | 196 | ptrans = np.uint8(self.matrix * 255) 197 | map = np.zeros(BG.shape) 198 | 199 | for d in range(3): 200 | cmap = self.ds.trainMask * cvec[d] + self.ds.testMask*(1-cvec[d]) 201 | cmap = cmap*255 202 | map[:,:,d] = np.multiply(BG[:,:,d], (1-self.matrix)) + np.multiply(self.matrix,cmap) 203 | 204 | 205 | # ------ Apply mask? --------------- 206 | APPLY_MASK = True 207 | 208 | if APPLY_MASK: 209 | for i in range(3): 210 | mp = np.array(map[:,:,i]) 211 | mp[np.where(self.ds.MASK == 0)] = 0 212 | map[:,:,i] = mp 213 | 214 | #----------------------------------- 215 | 216 | im = Image.fromarray(np.uint8(map)) 217 | return im 218 | 219 | 220 | 221 | else: 222 | 223 | if int(self.jfile["pmap"]["trained"]) == 1: 224 | n = self.angels.shape[2] 225 | 226 | IDX = np.where(p >= 1) 227 | subset = np.random.choice(range(len(IDX[0])), np.uint32(np.floor(pct * len(IDX[0]))), replace=False) 228 | subset = np.uint32(subset) 229 | IDX = np.array(IDX) 230 | IDX = IDX[:, subset] 231 | IDX = tuple(IDX) 232 | 233 | ang_predictions = np.zeros((len(IDX[0]),n)) 234 | 235 | k = len(IDX[0]) 236 | 237 | MaxProb = -np.ones(k) 238 | MaxSlope = np.full(k, np.pi / 2.0) 239 | 240 | for i in range(n): 241 | slope = 2 * np.pi * i / n 242 | slope = np.arctan(np.tan(slope)) 243 | 244 | pm = self.angels[:,:,i] 245 | ang_predictions[:,i] = np.ndarray.flatten(pm[IDX]) 246 | 247 | YhNormal = np.ndarray.flatten(pm[IDX]) 248 | 249 | mIdx = np.where(MaxProb < YhNormal) 250 | MaxProb[mIdx] = YhNormal[mIdx] 251 | MaxSlope[mIdx] = slope 252 | 253 | 254 | if onlyMax==False: 255 | 256 | for r in range(n): 257 | slope = 2 * np.pi * r / n 258 | slope = np.arctan(np.tan(slope)) 259 | slope_m = np.repeat(slope, k) 260 | 261 | BG = drawLinesSlope(BG, IDX, slope_m, ws=30, prelative=True, parray=ang_predictions[:,r], WIDTH=1, FILL=self.FILL) 262 | 263 | return Image.fromarray(BG) 264 | 265 | 266 | else: 267 | tmp = drawLinesSlope(BG, IDX, MaxSlope, ws=15, FILL=self.FILL) 268 | return Image.fromarray(tmp) 269 | 270 | 271 | 272 | 273 | 274 | else: 275 | 276 | flt_name = self.jfile["filter"]["link"] 277 | FLT = FILTER(flt_name) 278 | 279 | 280 | [X, Y, IDX] = self.ds.generateDSwithFilter(FILTERDIR + 'Filters_0_w45.mat', self.ds.DEGREES, p, ratio=pct, 281 | w=self.wa, 282 | choosy=True) 283 | 284 | ang_predictions = np.zeros((len(Y), FLT.N)) 285 | 286 | MaxProb = -np.ones(len(Y)) 287 | MaxSlope = np.full(len(Y), np.pi / 2.0) 288 | 289 | for r in range(FLT.N): 290 | 291 | if DEBUG_MODE: 292 | print("++Preparing prediction for angel number {}".format(r)) 293 | 294 | [fnum, filter] = FLT.getFilterbyNumber(r) 295 | slope = 2 * np.pi * r / FLT.N 296 | slope = np.arctan(np.tan(slope)) 297 | 298 | Xr = np.array(X) 299 | xr = np.zeros((self.wa, self.wa, Layers)) 300 | 301 | # Rotate test test: 302 | for id in range(Xr.shape[0]): 303 | xr = Xr[id, :, :, :] 304 | Xr[id, :, :, :] = rotateWithMap(xr, filter, map_type='m2r', dim=2) 305 | 306 | Yh = self.model_ang.predict(Xr) 307 | YhNormal = np.ndarray.flatten(Yh) 308 | 309 | ang_predictions[:,r] = YhNormal 310 | 311 | mIdx = np.where(MaxProb < YhNormal) 312 | MaxProb[mIdx] = YhNormal[mIdx] 313 | MaxSlope[mIdx] = slope 314 | 315 | 316 | if showLines == False: 317 | BG = np.uint8(np.zeros((self.ds.x, self.ds.y, 3))) 318 | else: 319 | BG = self.bg 320 | 321 | 322 | if onlyMax == True: 323 | tmp = drawLinesSlope(BG, IDX, MaxSlope, ws=15, FILL=self.FILL) 324 | return Image.fromarray(tmp) 325 | 326 | else: 327 | 328 | for r in range(FLT.N): 329 | slope = 2 * np.pi * r / FLT.N 330 | slope = np.arctan(np.tan(slope)) 331 | slope_m = np.repeat(slope, len(Y)) 332 | 333 | BG = drawLinesSlope(BG, IDX, slope_m, ws=15, prelative=True, parray=ang_predictions[:,r], FILL=self.FILL) 334 | 335 | return Image.fromarray(BG) 336 | 337 | 338 | 339 | def plotEvaluation(self): 340 | 341 | fname = './applet_images/plot.png' 342 | 343 | if self.ErrorIsReady: 344 | im = Image.open(fname) 345 | im.show() 346 | return 347 | 348 | max_expand = 45 349 | xaxis = list(range(1,max_expand,2)) 350 | 351 | train_err = np.zeros((len(xaxis), 2)) 352 | test_err = np.zeros((len(xaxis), 2)) 353 | all_err = np.zeros((len(xaxis), 2)) 354 | 355 | for i in range(len(xaxis)): 356 | 357 | if DEBUG_MODE: 358 | print("<-> Expanding lines by {} pixels".format(xaxis[i])) 359 | 360 | train_err[i] = self.ds.evaluate(self.matrix, xaxis[i], 'train') 361 | test_err[i] = self.ds.evaluate(self.matrix, xaxis[i], 'test') 362 | all_err[i] = self.ds.evaluate(self.matrix, xaxis[i], 'all') 363 | 364 | 365 | 366 | f, axarr = plt.subplots(3, sharey=True) 367 | 368 | axarr[0].plot(xaxis, train_err[:,0], '+', xaxis, train_err[:,1], 'r--') 369 | axarr[0].set_title('Training errors') 370 | str = 'pos: {:10.3f}\n neg:{:10.3f}'.format(np.mean(train_err[:,0]), np.mean(train_err[:,1])) 371 | axarr[0].text(4, 0.5,str, horizontalalignment='right', verticalalignment='center') 372 | 373 | 374 | 375 | axarr[1].plot(xaxis, test_err[:, 0], '+', xaxis, test_err[:, 1], 'r--') 376 | axarr[1].set_title('Test errors') 377 | str = 'pos: {:10.3f}\n neg:{:10.3f}'.format(np.mean(test_err[:,0]), np.mean(test_err[:,1])) 378 | axarr[1].text(4, 0.5,str, horizontalalignment='right', verticalalignment='center') 379 | 380 | 381 | axarr[2].plot(xaxis, all_err[:, 0], '+', xaxis, all_err[:, 1], 'r--') 382 | axarr[2].set_title('Total errors') 383 | str = 'pos: {:10.3f}\n neg:{:10.3f}'.format(np.mean(all_err[:,0]), np.mean(all_err[:,1])) 384 | axarr[2].text(4, 0.5,str, horizontalalignment='right', verticalalignment='center') 385 | 386 | 387 | plt.savefig(fname) 388 | self.ErrorIsReady = True 389 | im = Image.open(fname) 390 | im.show() 391 | 392 | 393 | def requestImage(self): 394 | # Getting values from Applet: 395 | _showLines = self.CheckVar1.get() == 1 396 | _onlyMax = self.CheckVarMode.get() == 1 397 | _threshold = self.th.get() / 100 398 | 399 | #_pct = self.pcth.get()/100 400 | _pct = 0.1 401 | 402 | _angels = False 403 | _prob = self.CheckVarpmap.get() == 1 404 | 405 | _c1 = self.bgcol.get() 406 | _c2 = self.linecol.get() 407 | _sheet = self.lselect.index(tk.ACTIVE) - 1 408 | 409 | 410 | self.FILL = self.pcol.get(tk.ACTIVE) 411 | 412 | 413 | im = self.getImage(_showLines, _angels, _pct, _onlyMax, _threshold, _c1, _c2, _sheet, _prob) 414 | 415 | return im 416 | 417 | def showValues(self): 418 | 419 | im = self.requestImage() 420 | im2 = im.resize((self.width2, self.height2)) 421 | photo = ImageTk.PhotoImage(im2) 422 | self.panel.configure(image = photo) 423 | self.panel.image = photo 424 | 425 | im.save('./Temp.png') 426 | 427 | 428 | def updateImage(self, im): 429 | im2 = im.resize((self.width2, self.height2)) 430 | photo = ImageTk.PhotoImage(im2) 431 | self.panel.configure(image = photo) 432 | self.panel.image = photo 433 | 434 | 435 | 436 | def openImage(self): 437 | im = self.requestImage() 438 | im.save('./Temp.png') 439 | im.show() 440 | 441 | 442 | 443 | 444 | def close_window(self): 445 | self.master.destroy() 446 | 447 | 448 | 449 | def showclusters(self): 450 | p2l = prob2map(self.matrix) 451 | 452 | _coff = self.th.get() / 100 453 | _eps = self.pcth.get() 454 | cmap = p2l.getClusters(cutoff=_coff, eps=_eps) 455 | img = p2l.showClusters(cmap) 456 | im = Image.fromarray(img) 457 | self.updateImage(im) 458 | 459 | im.save('./Temp.png') 460 | 461 | return im 462 | 463 | 464 | 465 | def convert2lines(self): 466 | p2l = prob2map(self.matrix) 467 | 468 | _coff = self.th.get() / 100 469 | _eps = self.pcth.get() 470 | lines = p2l.makeConversion(_coff, _eps) 471 | 472 | _showLines = self.CheckVar1.get() == 1 473 | _cb = self.bgcol.get() 474 | _cl = self.linecol.get() 475 | _sheet = self.lselect.index(tk.ACTIVE) - 1 476 | bg = self.getBackground(_showLines, _cb, _cl, _sheet) 477 | 478 | 479 | im = drawLinesWithEndingPoints(bg, lines) 480 | self.updateImage(im) 481 | 482 | im.save('./Temp.png') 483 | 484 | return im 485 | 486 | 487 | 488 | 489 | 490 | 491 | 492 | def run(self): 493 | 494 | self.master.title("Probability Map Viewer") 495 | 496 | 497 | 498 | # FAULT EXISTENSE: 499 | # Scale bar to set threshold 500 | mainframe = tk.Frame(self.master) 501 | mainframe.pack() 502 | 503 | frame1 = tk.Frame(mainframe, borderwidth=2) 504 | frame1.pack(side=tk.LEFT) 505 | 506 | frame3 = tk.Frame(mainframe) 507 | frame3.pack(side=tk.LEFT) 508 | 509 | 510 | frame2 = tk.Frame(mainframe) 511 | frame2.pack(side=tk.LEFT) 512 | 513 | frame4 = tk.Frame(mainframe) 514 | frame4.pack(side = tk.LEFT) 515 | 516 | frame5 = tk.Frame(mainframe) 517 | frame5.pack(side = tk.RIGHT) 518 | 519 | 520 | 521 | checkFrame = tk.Frame(self.master) 522 | checkFrame.pack() 523 | 524 | buttonFrame = tk.Frame(self.master) 525 | buttonFrame.pack() 526 | 527 | 528 | 529 | # ========================================= # 530 | # =============== FRAME 1 ============== # 531 | # ========================================= # 532 | 533 | self.th = tk.Scale(frame1, from_=1, to=100, orient=tk.HORIZONTAL, label='Labeling threshold', length=200) 534 | self.th.set( 50 ) 535 | self.th.pack() 536 | 537 | self.pcth = tk.Scale(frame1, from_=3, to=10, resolution=0.2 , orient=tk.HORIZONTAL, label='Epsilon ', length=200) 538 | self.pcth.set( 1 ) 539 | self.pcth.pack() 540 | 541 | 542 | 543 | # ========================================= # 544 | # =============== FRAME 2 ============== # 545 | # ========================================= # 546 | 547 | 548 | self.bgcol = tk.Scale(frame2, from_=0, to=254, orient=tk.HORIZONTAL, length=100) 549 | self.bgcol.set(0) 550 | self.bgcol.pack() 551 | 552 | self.linecol = tk.Scale(frame2, from_=1, to=254, orient=tk.HORIZONTAL,length=100) 553 | self.linecol.set(254) 554 | self.linecol.pack() 555 | 556 | barimage = Image.open('./applet_images/graybar.png') 557 | img_bar = ImageTk.PhotoImage(barimage) 558 | pnl_bar = tk.Label(frame2, image=img_bar) 559 | pnl_bar.pack() 560 | 561 | 562 | 563 | # ========================================= # 564 | # =============== FRAME 3 ============== # 565 | # ========================================= # 566 | 567 | 568 | tbg = tk.Text(frame3, height=2, width=15) 569 | tbg.pack() 570 | tbg.insert(tk.END, "Background's colour") 571 | 572 | tl = tk.Text(frame3, height=2, width=15) 573 | tl.pack() 574 | tl.insert(tk.END, "Line's colour") 575 | 576 | 577 | 578 | 579 | # ========================================= # 580 | # =============== FRAME 4 ============== # 581 | # ========================================= # 582 | 583 | 584 | tpc = tk.Text(frame4, height=2, width=30) 585 | tpc.pack() 586 | tpc.insert(tk.END, "Prediction colour:") 587 | 588 | 589 | self.pcol = tk.Listbox(frame4, height=6) 590 | self.pcol.insert(1, 'red') 591 | self.pcol.insert(2, 'green') 592 | self.pcol.insert(3, 'blue') 593 | self.pcol.insert(4, 'yellow') 594 | self.pcol.insert(5, 'white') 595 | self.pcol.insert(6, 'black') 596 | self.pcol.pack() 597 | 598 | self.pcol.itemconfig(0, {'bg':'red'}) 599 | self.pcol.itemconfig(1, {'bg': 'green'}) 600 | self.pcol.itemconfig(2, {'bg': 'blue'}) 601 | self.pcol.itemconfig(3, {'bg': 'yellow'}) 602 | self.pcol.itemconfig(4, {'bg': 'white'}) 603 | self.pcol.itemconfig(5, {'bg': 'black', 'fg':'white'}) 604 | 605 | 606 | 607 | 608 | # ========================================= # 609 | # =============== FRAME 5 ============== # 610 | # ========================================= # 611 | 612 | 613 | tls = tk.Text(frame5, height=2, width=30) 614 | tls.pack() 615 | tls.insert(tk.END, "Underlying map/sheet:") 616 | 617 | 618 | self.lselect = tk.Listbox(frame5, height=5) 619 | self.lselect.insert(1, 'Empty') 620 | self.lselect.insert(2, '1vd_TMI_RTP') 621 | self.lselect.insert(3, 'TMI_RTP') 622 | self.lselect.insert(4, 'Digital Elevation') 623 | self.lselect.insert(5, 'Isostatic Gravity') 624 | self.lselect.insert(6, 'RTP_HGM') 625 | self.lselect.insert(7, 'RTP_RS_HGM') 626 | self.lselect.insert(8, 'RTP_RD_HGM') 627 | self.lselect.insert(9, 'RTP_RI_HGM') 628 | self.lselect.pack(side=tk.RIGHT) 629 | 630 | 631 | 632 | 633 | # ========================================= # 634 | # ============== CHECK FRAME ============== # 635 | # ========================================= # 636 | 637 | 638 | self.CheckVar1 = tk.IntVar() 639 | check = tk.Checkbutton(checkFrame , text="Show interpreted lines", variable = self.CheckVar1) 640 | check.pack(side=tk.LEFT) 641 | 642 | 643 | self.CheckVarMode = tk.IntVar() 644 | showMax = tk.Checkbutton(checkFrame, text="Maximum/Mode?", variable = self.CheckVarMode) 645 | showMax.pack(side=tk.RIGHT) 646 | 647 | 648 | 649 | self.CheckVarpmap = tk.IntVar() 650 | angTik = tk.Checkbutton(checkFrame, text="Show prob map?", variable = self.CheckVarpmap) 651 | angTik.pack(side=tk.LEFT) 652 | 653 | 654 | # ========================================= # 655 | # ============== BUTTON FRAME ============= # 656 | # ========================================= # 657 | 658 | tk.Button(buttonFrame, text='Show', command=self.showValues, bg="blue", bd=4, fg="blue").pack(side=tk.LEFT) 659 | tk.Button(buttonFrame, text='Open', command=self.openImage, bg="red", bd=4, fg="red").pack(side=tk.LEFT) 660 | tk.Button(buttonFrame, text='Compute Error', command=self.plotEvaluation, bg="black", bd=4, fg="black").pack(side=tk.LEFT) 661 | tk.Button(buttonFrame, text='Run DBSCAN', command=self.showclusters, bg="green", bd=4, fg="green").pack( 662 | side=tk.LEFT) 663 | tk.Button(buttonFrame, text='Convert to Lines', command=self.convert2lines, bg="yellow", bd=4, fg="yellow").pack( 664 | side=tk.RIGHT) 665 | 666 | 667 | 668 | # ========================================= # 669 | # ============== IMAGE FRAME ============== # 670 | # ========================================= # 671 | 672 | 673 | im = Image.fromarray(self.bg) 674 | im = im.resize((self.width2, self.height2)) 675 | img = ImageTk.PhotoImage(im) 676 | self.panel = tk.Label(self.master, image=img) 677 | self.panel.pack() 678 | 679 | 680 | self.RUN = True 681 | self.master.mainloop() 682 | 683 | 684 | 685 | 686 | 687 | -------------------------------------------------------------------------------- /Prob2Line.py: -------------------------------------------------------------------------------- 1 | # Prob2Map 2 | # Is a class that gets a probability map as an input and convert it to lines 3 | 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | __author__ = "Amin Aghaee" 7 | __copyright__ = "Copyright 2018, Amin Aghaee" 8 | 9 | from sklearn import linear_model 10 | from sklearn.cluster import DBSCAN 11 | from sklearn.metrics import mean_squared_error 12 | from sklearn.linear_model import Ridge 13 | from sklearn.preprocessing import PolynomialFeatures 14 | from sklearn.pipeline import make_pipeline 15 | 16 | from Utility import * 17 | import numpy.matlib 18 | 19 | 20 | 21 | METHOD_OPTIONS = ['Linear', 'Curve', 'BestCurve'] 22 | METHOD = METHOD_OPTIONS[2] 23 | DEGREELIST = [1,3] 24 | DEGREE = 3 25 | 26 | 27 | class prob2map: 28 | def __init__(self, pmap=None): 29 | 30 | if pmap is None: 31 | self.pmap = np.random.random((500,500)) 32 | else: 33 | self.pmap = pmap 34 | 35 | self.SpecialColor = -1 36 | 37 | 38 | def window2line(self, patch, s): 39 | [x,y] = np.where(patch > 0) 40 | 41 | x -= s 42 | y -= s 43 | 44 | x = np.reshape(x,[x.shape[0],1]) 45 | weight = patch[np.where(patch > 0)] 46 | 47 | lr = linear_model.RANSACRegressor() 48 | lr.fit(x,y,weight) 49 | 50 | return lr.estimator_.coef_[0] 51 | 52 | 53 | def getLines(self, pachsize = 17, cutoff = 0.3, mincut = 0.2): 54 | PMAP = np.array(self.pmap) 55 | PMAP[PMAP < mincut] = 0 56 | matrix = np.array(PMAP) 57 | 58 | s = (pachsize - 1) // 2 59 | 60 | lines = [] 61 | while matrix.max() > cutoff: 62 | tmp = np.where( matrix >= matrix.max()) 63 | ox = tmp[0][0] 64 | oy = tmp[1][0] 65 | 66 | matrix[ox-s:ox+s+1,oy-s:oy+s+1] = 0 67 | coeff = self.window2line(PMAP[ox-s:ox+s+1,oy-s:oy+s+1], s) 68 | 69 | lines.append([ox,oy, coeff]) 70 | 71 | return lines 72 | 73 | 74 | def getClusters(self, cutoff = 0.3, eps = 0.3): 75 | PMAP = pmapCutoff(self.pmap, cutoff) 76 | X = np.transpose(np.where(PMAP > 0)) 77 | db = DBSCAN(eps=eps, min_samples=20).fit(X) 78 | labels = db.labels_ 79 | 80 | cmap = np.zeros(PMAP.shape) 81 | for i in range(len(labels)): 82 | cmap[ X[i][0] , X[i][1]] = labels[i] 83 | return np.int32(cmap) 84 | 85 | 86 | def showClusters(self, cmap, specnumber = -1): 87 | imageMap = np.zeros((cmap.shape[0], cmap.shape[1], 3)) 88 | nclass = np.max(cmap) 89 | 90 | for c in range(1,nclass): 91 | 92 | if specnumber > 0 and c == specnumber: 93 | col = [255,0,0] 94 | else: 95 | col = getRandomColour(3) 96 | 97 | ind = np.where(cmap == c) 98 | for i in range(len(ind[0])): 99 | for j in range(3): 100 | imageMap[ ind[0][i], ind[1][i], j ] = col[j] 101 | 102 | return np.uint8(imageMap) 103 | 104 | def mergeClusters(self, cmap, c1, c2): 105 | cmap2 = np.array(cmap) 106 | cmin = np.min([c1,c2]) 107 | 108 | I1 = np.where(cmap2 == c1) 109 | cmap2[I1] = cmin 110 | 111 | I2 = np.where(cmap2 == c2) 112 | cmap2[I2] = cmin 113 | 114 | return cmap2 115 | 116 | def getClusterCentroid(self, cmap, c): 117 | [i,j] = np.where(cmap == c) 118 | 119 | I = np.mean(i) 120 | J = np.mean(j) 121 | 122 | return [I,J] 123 | 124 | def getClusterDistance(self, cmap, c1, c2, center=False): 125 | 126 | if center: 127 | # Compute distance of center points of each cluster 128 | [i1,j1] = self.getClusterCentroid(cmap, c1) 129 | [i2, j2] = self.getClusterCentroid(cmap, c2) 130 | 131 | return np.sqrt( (i1-i2)*(i1-i2) + (j1-j2)*(j1-j2) ) 132 | else: 133 | # Compute distance of nearest points in two clusters: 134 | D = np.inf 135 | [I1, J1] = np.where(cmap == c1) 136 | [I2, J2] = np.where(cmap == c2) 137 | 138 | k = len(I1) 139 | l = len(I2) 140 | 141 | I1 = np.matlib.repmat(np.array(I1), l, 1) 142 | J1 = np.matlib.repmat(np.array(J1), l, 1) 143 | I2 = np.matlib.repmat(np.array(I2), k, 1).transpose() 144 | J2 = np.matlib.repmat(np.array(J2), k, 1).transpose() 145 | 146 | Dmatrix = np.square((I1-I2)) + np.square((J1-J2)) 147 | D = np.min(Dmatrix) 148 | return np.sqrt(D) 149 | 150 | 151 | 152 | def sortClustesrsByDistance(self, cmap, cbase): 153 | clist = np.unique(cmap)[1:] 154 | d = np.zeros_like(clist) 155 | 156 | for c in range(len(clist)): 157 | d[c] = self.getClusterDistance(cmap, cbase, clist[c], center=True) 158 | 159 | args = np.argsort(d) 160 | return clist[args] 161 | 162 | 163 | 164 | 165 | def getClusterLinearError(self, cmap, c): 166 | centroid = self.getClusterCentroid(cmap, c) 167 | centroid = np.uint64(centroid) 168 | 169 | ind = np.where(cmap == c) 170 | return self.convertCluster2Line(centroid, ind, getError=True) 171 | 172 | def getClusterCurveError(self, cmap, c, degree=3): 173 | centroid = self.getClusterCentroid(cmap, c) 174 | centroid = np.uint64(centroid) 175 | 176 | ind = np.where(cmap == c) 177 | return self.convertCluster2Curve(centroid, ind, degree, getError=True) 178 | 179 | def getClusterBestCurveError(self, cmap, c, degree=None): 180 | 181 | if degree is None: 182 | degree = [1,3,5] 183 | 184 | centroid = self.getClusterCentroid(cmap, c) 185 | centroid = np.uint64(centroid) 186 | 187 | ind = np.where(cmap == c) 188 | return self.convertCluster2BestCurve(centroid, ind, degree, getError=True) 189 | 190 | 191 | 192 | def convertCluster2Curve(self, center, cluster, degree=3, getError=False): 193 | # Cluster : 2xN array [[x1,x2,...],[y1,y2,...]] 194 | # Center : [X0,Y0] 195 | X = np.array(cluster[0]) 196 | Y = np.array(cluster[1]) 197 | 198 | X -= center[0] 199 | Y -= center[1] 200 | X = np.reshape(X, [X.shape[0], 1]) 201 | 202 | 203 | model = make_pipeline(PolynomialFeatures(degree), Ridge()) 204 | model.fit(X,Y) 205 | 206 | 207 | if getError: 208 | yhat = model.predict(X) 209 | return mean_squared_error(Y, yhat) 210 | 211 | else: 212 | xset = np.unique(X) 213 | X1 = np.reshape(xset, [xset.shape[0], 1]) 214 | yset = model.predict(X1) 215 | 216 | return [ xset + center[0], yset + center[1] ] 217 | 218 | def convertCluster2BestCurve(self, center, cluster, degree=None, getError=False): 219 | # Cluster : 2xN array [[x1,x2,...],[y1,y2,...]] 220 | # Center : [X0,Y0] 221 | 222 | if degree is None: 223 | degree = [1,3,5] 224 | 225 | X = np.array(cluster[0]) 226 | Y = np.array(cluster[1]) 227 | 228 | X -= center[0] 229 | Y -= center[1] 230 | X = np.reshape(X, [X.shape[0], 1]) 231 | 232 | 233 | Emin = np.inf 234 | BestModel = None 235 | 236 | for d in degree: 237 | model = make_pipeline(PolynomialFeatures(d), Ridge()) 238 | model.fit(X,Y) 239 | 240 | yhat = model.predict(X) 241 | err = mean_squared_error(Y, yhat) 242 | 243 | if err < Emin: 244 | BestModel = model 245 | Emin = err 246 | 247 | 248 | if getError: 249 | return Emin 250 | 251 | else: 252 | xset = np.unique(X) 253 | X1 = np.reshape(xset, [xset.shape[0], 1]) 254 | yset = model.predict(X1) 255 | 256 | return [ xset + center[0], yset + center[1] ] 257 | 258 | 259 | 260 | def convertCluster2Line(self, center, cluster, getError=False): 261 | # Cluster : 2xN array [[x1,x2,...],[y1,y2,...]] 262 | # Center : [X0,Y0] 263 | 264 | X = np.array(cluster[0]) 265 | Y = np.array(cluster[1]) 266 | 267 | X -= center[0] 268 | Y -= center[1] 269 | X = np.reshape(X, [X.shape[0], 1]) 270 | 271 | lr = linear_model.RANSACRegressor() 272 | lr.fit(X,Y) 273 | 274 | if getError: 275 | yhat = lr.predict(X) 276 | return mean_squared_error(Y, yhat) 277 | 278 | else: 279 | 280 | Xmin = [X.min() + center[0], lr.predict(X.min()) + center[1]] 281 | Xmax = [X.max() + center[0], lr.predict(X.max()) + center[1]] 282 | 283 | return [center, Xmin, Xmax] 284 | 285 | 286 | 287 | def doIteration(self, cmap, crange = 5, threshold = 0.8): 288 | clusterList = np.unique(cmap)[1:] 289 | 290 | cnt = self.getClusterSizes(cmap, clusterList) 291 | cnt_max = np.max(cnt) 292 | cnt = cnt_max - cnt 293 | _p = cnt / np.sum(cnt) 294 | 295 | 296 | cbase = np.random.choice(clusterList, p=_p) 297 | 298 | 299 | if DEBUG_MODE: 300 | print("===================") 301 | print("Chose cluster {} with size {}".format(cbase, len(np.where(cmap == cbase)[0]))) 302 | 303 | if cbase <= 0: 304 | return cmap 305 | 306 | cnearby = self.sortClustesrsByDistance(cmap, cbase) 307 | 308 | 309 | if METHOD.__eq__("Linear"): 310 | E1 = self.getClusterLinearError(cmap, cbase) 311 | elif METHOD.__eq__("BestCurve"): 312 | E1 = self.getClusterBestCurveError(cmap, cbase, degree=DEGREELIST) 313 | else: 314 | E1 = self.getClusterCurveError(cmap, cbase, degree=DEGREE) 315 | 316 | 317 | 318 | EMIN = np.inf 319 | BestMerge = cmap 320 | self.SpecialColor = -1 321 | Best_Desc = "No Merge!" 322 | 323 | 324 | 325 | 326 | for i in range(crange): 327 | cprim = cnearby[i+1] 328 | 329 | if cprim <= 0: 330 | continue 331 | 332 | 333 | 334 | # Computing Error for other cluster 335 | if METHOD.__eq__("Linear"): 336 | E2 = self.getClusterLinearError(cmap, cprim) 337 | elif METHOD.__eq__("BestCurve"): 338 | E2 = self.getClusterBestCurveError(cmap, cprim, degree=DEGREELIST) 339 | else: 340 | E2 = self.getClusterCurveError(cmap, cprim, degree=DEGREE) 341 | 342 | 343 | 344 | cmerge = self.mergeClusters(cmap, cbase, cprim) 345 | 346 | 347 | 348 | # Computing Error if merge these two clusters 349 | if METHOD.__eq__("Linear"): 350 | Emerge = self.getClusterLinearError(cmerge, np.min([cprim, cbase])) 351 | elif METHOD.__eq__("BestCurve"): 352 | Emerge = self.getClusterBestCurveError(cmerge, np.min([cprim, cbase]), degree=DEGREELIST) 353 | else: 354 | Emerge = self.getClusterCurveError(cmerge, np.min([cprim, cbase]), degree=DEGREE) 355 | 356 | 357 | 358 | 359 | if Emerge < EMIN and E1+E2 >= Emerge * threshold: 360 | EMIN = Emerge 361 | BestMerge = cmerge 362 | self.SpecialColor = np.min([cbase, cprim]) 363 | 364 | if DEBUG_MODE: 365 | Best_Desc = "--- Merged {} and {}".format(cbase, cprim) 366 | 367 | 368 | 369 | if DEBUG_MODE: 370 | print(Best_Desc) 371 | print("--- Total number of clusters = {}".format(len(np.unique(BestMerge)))) 372 | 373 | return BestMerge 374 | 375 | 376 | 377 | 378 | 379 | 380 | def makeConversion(self, cutoff = 0.3, eps = 0.3): 381 | cmap = self.getClusters(cutoff, eps) 382 | return self.convertClustersToLines(cmap) 383 | 384 | 385 | 386 | def convertClustersToLines(self, cmap): 387 | nclass = np.unique(cmap) 388 | lines = [] 389 | 390 | # Each line consist of two pairs [[x1,y1],[x2,y2]] 391 | for c in nclass: 392 | 393 | if c <= 0: 394 | continue 395 | 396 | ind = np.where(cmap == c) 397 | center = [np.uint64(np.mean(ind[0])) , np.uint64(np.mean(ind[1]))] 398 | L = self.convertCluster2Line(center, ind) 399 | L = np.uint64(L) 400 | 401 | lines += [ [ L[1] , L[2]] ] 402 | 403 | return lines 404 | 405 | 406 | def convertClustersToCurves(self, cmap, degree = 3): 407 | nclass = np.unique(cmap) 408 | curves = [] 409 | 410 | for c in nclass: 411 | if c<=0: 412 | continue 413 | 414 | ind = np.where(cmap == c) 415 | center = [np.uint64(np.mean(ind[0])) , np.uint64(np.mean(ind[1]))] 416 | C = self.convertCluster2Curve(center, ind, degree) 417 | C = np.uint64(C) 418 | 419 | curves += [C] 420 | 421 | return curves 422 | 423 | 424 | def convertClustersToBestCurves(self, cmap, degree = None): 425 | 426 | if degree is None: 427 | degree = [1,3,5] 428 | 429 | nclass = np.unique(cmap) 430 | curves = [] 431 | 432 | for c in nclass: 433 | 434 | if c <= 0: 435 | continue 436 | 437 | ind = np.where(cmap == c) 438 | center = [np.uint64(np.mean(ind[0])) , np.uint64(np.mean(ind[1]))] 439 | C = self.convertCluster2BestCurve(center, ind, degree) 440 | C = np.uint64(C) 441 | 442 | curves += [C] 443 | 444 | return curves 445 | 446 | 447 | 448 | 449 | 450 | def drawLines(self, pachsize = 17, cutoff = 0.3, mincut = 0.2): 451 | 452 | lines = self.getLines(pachsize, cutoff, mincut) 453 | lines = np.array(lines) 454 | 455 | self.LINES = lines 456 | 457 | bg = np.zeros([self.pmap.shape[0], self.pmap.shape[1], 3]) 458 | bg[:,:,1] = np.floor(self.pmap * 255) 459 | bg = np.uint8(bg) 460 | 461 | IDX = np.transpose(lines[:,0:2]) 462 | slopes = np.arctan( -lines[:,2] ) 463 | limage = drawLinesSlope(bg , IDX,slopes, ws = pachsize) 464 | 465 | Image.fromarray(limage).show() 466 | 467 | 468 | 469 | def getClusterSizes(self, cmap, nclass): 470 | 471 | cnt = np.zeros_like(nclass) 472 | 473 | for i in range(len(nclass)): 474 | c = nclass[i] 475 | ind = np.where( cmap == c ) 476 | cnt[i] = len(ind[0]) 477 | 478 | return cnt 479 | 480 | 481 | def runMethod(self, coeff = 0.5, eps = 1, iteration = 100): 482 | 483 | BG1 = Image.open("TMP1.png") 484 | BG2 = Image.open("TMP2.png") 485 | 486 | CurveList = [] 487 | 488 | cmap = self.getClusters(coeff, eps) 489 | 490 | for t in range(iteration): 491 | 492 | if len(np.unique(cmap)) < 20: 493 | break 494 | 495 | if DEBUG_MODE: 496 | print("Iteration = {}".format(t)) 497 | 498 | 499 | cmap = self.doIteration(cmap, crange=8, threshold=1) 500 | 501 | img = self.showClusters(cmap, self.SpecialColor) 502 | im = Image.fromarray(img) 503 | im.save('./applet_images/cluster/{}.png'.format(t)) 504 | 505 | 506 | #Convert this state to lines: 507 | if METHOD.__eq__("Linear"): 508 | lines = self.convertClustersToLines(cmap) 509 | CurveList += [lines] 510 | 511 | elif METHOD.__eq__("BestCurve"): 512 | curves = self.convertClustersToBestCurves(cmap, DEGREELIST) 513 | CurveList += [curves] 514 | else: 515 | curves = self.convertClustersToCurves(cmap, DEGREE) 516 | CurveList += [curves] 517 | 518 | 519 | bg = np.zeros((self.pmap.shape[0], self.pmap.shape[1], 3)) 520 | bg = np.uint8(bg) 521 | 522 | 523 | bgList = [bg, BG1, BG2] 524 | for i in range(len(bgList)): 525 | B = bgList[i] 526 | if METHOD.__eq__("Linear"): 527 | im = drawLinesWithEndingPoints(B, lines) 528 | else: 529 | im = drawCurves(B, curves) 530 | 531 | im.save('./applet_images/cluster/{}_{}_l.png'.format( i,t)) 532 | 533 | np.save("Curves.npy" , CurveList) 534 | 535 | 536 | 537 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LineamentLearning 2 | 3 | Minerals exploration is becoming more difficult, particularly because most mineral deposits at the surface of the earth have been found. While there may be a lot of sensing data, there is a shortage of expertise to interpret that data. This thesis aims to bring some of the recent advances in AI to the interpretation of sensing data. Our AI model learns one-dimensional features (lineaments) from two-dimensional data (in particular, magnetics surveys, maps of gravity and digital elevation maps), which surprisingly has not had a great deal of attention (whereas getting two-dimensional or zero-dimensional features is very common). We define a convolutional neural network to predict the probability that a lineament passes through each location on the map. Then, using these probabilities, cluster analysis, and regression models, we develop a post-processing method to predict lineaments. We train and evaluate our model on large real-world datasets in BC and Australia. 4 | 5 | This repository contains all codes used in my [Master Thesis](http://hdl.handle.net/2429/68438). This program was developed under Python3, using Numpy, Keras, Tensorflow, Pillow, TKinter, Matplotlib and Scipy libraries. 6 | 7 | ## Input Layers 8 | We use 8 aerial images to train this model: 9 | 10 | ![InputLayers](./InputLayers.png) 11 | 12 | ## Model 13 | We designed and trained the following model using Keras and Tensorflow libraries. It starts from the input layer on the left which is consist of patches of size W × W × 8. Then we have a convolution 21 layer that creates a 3 × 3 convolution kernel that is convolved with the layer input to produce the output. A rectified linear unit (ReLU) is applied to the outputs of the convolutions. In order to reduce the dimensionality and to allow generalization in patches, we use 6 × 6 max pooling operations, which combine the outputs of neuron clusters at one layer into a single neuron in the next layer. We use a flatten layer that reshapes and merges previous hidden layers in the network into a single one-dimensional array. Finally, we use a fully connected neural network with two hidden layers with ReLU activations and one output layer of size one with Sigmoid activation. 14 | 15 | ![NNModel](./Model.png) 16 | 17 | ## GUI Applet 18 | We developed our own small GUI Applet to open datasets, train our model with different variables. 19 | ![AppletDemo](./AppletDemo.png) 20 | 21 | 22 | ## Author 23 | * [**Amin Aghaee**](https://github.com/aminrd/) 24 | 25 | You can find more details in my thesis [here](http://hdl.handle.net/2429/68438). 26 | -------------------------------------------------------------------------------- /RotateLearning.py: -------------------------------------------------------------------------------- 1 | 2 | __author__ = "Amin Aghaee" 3 | __copyright__ = "Copyright 2018, Amin Aghaee" 4 | 5 | import os 6 | import sys 7 | import numpy as np 8 | import random 9 | import argparse 10 | 11 | # Loading Related Modules: 12 | # -------------------------------------- 13 | from globalVariables import * 14 | from Utility import * 15 | from DATASET import * 16 | #from MODEL import * 17 | from FILTER import * 18 | #from PmapViewer import * 19 | from Logger import * 20 | # -------------------------------------- 21 | 22 | 23 | 24 | def GET_PARSER(): 25 | parser = argparse.ArgumentParser() 26 | parser.add_argument('work', default='teset-choosy') 27 | parser.add_argument('-W', '--WSIZE', type=int, default=45) 28 | parser.add_argument('-it', '--iterations', type=int, default=ITERATIONS) 29 | parser.add_argument('-prefix', '--prepprefix', default='ANG_') 30 | parser.add_argument('-nprep', '--prefnumber', type=int, default=15) 31 | parser.add_argument('-CB', '--callback', default='FaultDetection.hdf5') 32 | return parser 33 | 34 | 35 | def SET_DEFAULT_ARGUMENTS(args): 36 | print('#'*30) 37 | print('Setting up global variables:') 38 | print(args) 39 | 40 | global ITERATIONS 41 | ITERATIONS = args.iterations 42 | global WindowSize 43 | WindowSize = args.WSIZE 44 | print('#' * 30) 45 | 46 | 47 | 48 | if __name__== "__main__": 49 | parser = GET_PARSER() 50 | args = parser.parse_args() 51 | work = args.work 52 | #SET_DEFAULT_ARGUMENTS(args) 53 | 54 | 55 | 56 | 57 | 58 | 59 | # ------------------ Training model only on faulty areas ------------------------------------------------------------ 60 | if work.__eq__("train-choosy"): 61 | 62 | step = np.pi / NUMBER_OF_DEGREE_MODELS 63 | 64 | for d in range(NUMBER_OF_DEGREE_MODELS): 65 | 66 | # Working on degree: 67 | baseDegree = np.pi / 2.0 - step * d - 0.00001 68 | 69 | fname = CB + 'Rotate_choosy_{}.hdf5'.format(d) 70 | model = MODEL(checkpoint=fname) 71 | 72 | for it in range(ITERATIONS): 73 | numberOfFiles = 3 + (it*33)//ITERATIONS 74 | 75 | if DEBUG_MODE: 76 | print("=" * 30) 77 | print("Iteration number is : {}".format(it)) 78 | print("---- working of {} number of files".format(numberOfFiles)) 79 | print("---- working on degree: {} degrees".format(baseDegree * 180 / np.pi)) 80 | print("=" * 30) 81 | 82 | X = np.zeros((1, WindowSize, WindowSize, Layers)) 83 | Y = np.zeros((1, 1)) 84 | 85 | myRatio = 1 / (6*numberOfFiles) 86 | 87 | for i in random.sample(list(range(fileNumber)), numberOfFiles): 88 | 89 | idx = np.int((i+1) * 10) 90 | ds_fname = DSDIR + "Australia_{}.mat".format(idx) 91 | ds = DATASET(ds_fname) 92 | 93 | [Xb, Yb, IDXb] = ds.generateDS(ds.DEGREES, ds.trainMask, ratio=myRatio, choosy=True, output_type = baseDegree) 94 | 95 | X = np.concatenate((X, Xb), axis=0) 96 | Y = np.concatenate((Y, Yb), axis=0) 97 | 98 | model.train(X, Y, epochs=1) 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | # ------------------ Testing model only on faulty areas ------------------------------------------------------------ 110 | elif work.__eq__("test-choosy"): 111 | 112 | 113 | 114 | #testList = list(range(36)) # See Results on all different rotations 115 | testList = list([23]) # See Results only on main file (because 36 = 360 degrees rotation = main file) 116 | 117 | step = np.pi / NUMBER_OF_DEGREE_MODELS 118 | 119 | 120 | 121 | for i in testList: 122 | 123 | if DEBUG_MODE: 124 | print("Working on rotation number : {}".format(i+1)) 125 | 126 | idx = np.int((i+1)*10) 127 | ds_fname = DSDIR + "Australia_{}.mat".format(idx) 128 | ds = DATASET(ds_fname) 129 | [X, Y, IDX] = ds.generateDS(ds.DEGREES, ds.MASK, ratio=0.99, choosy=True) 130 | 131 | MaxProb = -np.ones(len(Y)) 132 | MaxSlope = np.full(len(Y), np.pi / 2.0) 133 | 134 | slopes = np.zeros(NUMBER_OF_DEGREE_MODELS) 135 | 136 | for d in range(NUMBER_OF_DEGREE_MODELS): 137 | 138 | slopes[d] = (np.pi / 2.0 - step * d) 139 | 140 | fname = CB + 'Rotate_choosy_{}.hdf5'.format(d) 141 | model = MODEL(param_dir=fname) 142 | baseDegree = (np.pi / 2.0 - step * d) * 180 // np.pi 143 | Yh = model.predict(X) 144 | 145 | empty_matrix = np.zeros((ds.x, ds.y, 3)) 146 | O = np.array(empty_matrix) 147 | O[:, :, 0] = ds.OUTPUT * 255 148 | O[:, :, 1] = ds.OUTPUT * 255 149 | O[:, :, 2] = ds.OUTPUT * 255 150 | O = np.uint8(O) 151 | 152 | tmp = drawLines(O, IDX, Yh, WIDTH=3, FILL=128, ws=1, fname=FG + "Degree_{}_Line_{}_overlay.png".format(baseDegree, idx)) 153 | empty_matrix = np.uint8(empty_matrix) 154 | tmp = drawLines(empty_matrix, IDX, Yh, WIDTH=3, FILL=128, ws=3, fname=FG + "Degree_{}_Line_{}_alone.png".format(baseDegree,idx)) 155 | 156 | YhNormal = np.ndarray.flatten(Yh) 157 | mIdx = np.where(MaxProb < YhNormal) 158 | MaxProb[mIdx] = YhNormal[mIdx] 159 | MaxSlope[mIdx] = slopes[d] 160 | 161 | 162 | empty = np.uint8(np.zeros((ds.x, ds.y, 3))) 163 | tmp = drawLinesSlope(empty, IDX, MaxSlope, ws=6 ,fname=FG + 'Predictions_Alone_{}.png'.format(i + 1)) 164 | 165 | for r in range(3): 166 | empty[:,:,r] = np.uint8(ds.OUTPUT) 167 | 168 | tmp = drawLinesSlope(empty, IDX, MaxSlope, ws=6, fname=FG + 'Predictions_Overlay_{}.png'.format(i + 1)) 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | # ------------------ Train Fault detection method on all area, Not break mask, instead Bootstrapping on all input images ----------------------------- 182 | elif work.__eq__("train-fault-all"): 183 | 184 | windowList = [35,45] 185 | 186 | for W in windowList: 187 | fname = CB + 'FaultDetection_{}.hdf5'.format(W) 188 | model = MODEL(w = W, checkpoint=fname) 189 | 190 | 191 | for it in range(ITERATIONS): 192 | 193 | numberOfFiles = 3 + (it * (fileNumber - 3)) // ITERATIONS 194 | 195 | if DEBUG_MODE: 196 | print("=" * 30) 197 | print("Iteration number is : {}".format(it)) 198 | print("---- working of {} number of files".format(numberOfFiles)) 199 | print("=" * 30) 200 | 201 | X = np.zeros((1, W, W, Layers)) 202 | Y = np.zeros((1, 1)) 203 | 204 | myRatio = 1 / (9*numberOfFiles) 205 | 206 | for i in random.sample(list(range(fileNumber)), numberOfFiles): 207 | 208 | idx = np.int((i+1) * 10) 209 | ds_fname = DSDIR + "Australia_{}.mat".format(idx) 210 | ds = DATASET(ds_fname) 211 | 212 | t_mask_small = ds.shrinkMask('train') 213 | ds.expandBy(width=35, epsilon=0.9) 214 | 215 | [Xb, Yb, IDXb] = ds.generateDS(ds.OUTPUT, ds.trainMask, w = W, ratio=myRatio, output_type=0) 216 | 217 | X = np.concatenate((X,Xb), axis=0) 218 | Y = np.concatenate((Y, Yb), axis=0) 219 | 220 | model.train(X,Y,epochs=1) 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | # ------------------ Test Fault detection method on all area, break mask ----------------------------- 233 | elif work.__eq__("test-fault-all"): 234 | 235 | windowList = [35,45] 236 | 237 | #testList = list(range(36)) 238 | testList = list([35]) 239 | 240 | for i in testList: 241 | 242 | idx = np.int((i+1) * 10) 243 | ds_fname = DSDIR + "Australia_{}.mat".format(idx) 244 | #ds_fname = DSDIR + "QUEST_0.mat" 245 | 246 | ds = DATASET(ds_fname) 247 | 248 | O = np.zeros((ds.x, ds.y, 3)) 249 | O[:, :, 0] = ds.OUTPUT * 255 250 | O[:, :, 1] = ds.OUTPUT * 255 251 | O[:, :, 2] = ds.OUTPUT * 255 252 | O = np.uint8(O) 253 | 254 | mergeAll = np.zeros((ds.x, ds.y, 3)) 255 | 256 | for W in windowList: 257 | 258 | fname = CB + 'FaultDetection_{}.hdf5'.format(W) 259 | model = MODEL(w = W, param_dir=fname) 260 | 261 | if DEBUG_MODE: 262 | print("-"*30) 263 | print("Loading Model W = {}".format(W)) 264 | print("Drawing output for rotation number : {}".format(i+1)) 265 | 266 | 267 | [X, Y, IDX] = ds.generateDS(ds.OUTPUT, ds.MASK, ratio=0.25, output_type=0, w =W) 268 | Yh = model.predict(X) 269 | 270 | #d = drawLines(O, IDX , Yh, WIDTH = 1 , FILL = 128, ws = 1, fname = FG+'Map_allarea_w{}_{}.png'.format(W,i+1)) 271 | d = drawLines(O, IDX, Yh, WIDTH=1, FILL=128, ws=1, fname=FG + 'Map_r_allarea_w{}_{}.png'.format(W, i + 1)) 272 | 273 | pmap = probMap(ds.OUTPUT.shape, IDX, Yh) 274 | #P = PmapViewer(matrix=pmap, bg=ds.OUTPUT) 275 | #P.save(FG+'Map_allarea_w{}_{}.npz'.format(W,i+1)) 276 | #P.save(FG + 'Map_allarea_QUEST_w{}_{}.npz'.format(W, i + 1)) 277 | 278 | del X 279 | del Y 280 | del IDX 281 | 282 | mergeAll = mergeAll / len(windowList) 283 | showMatrix(mergeAll, dim=3, fname=FG + 'MergeAll_{}.png'.format(i+1), show=False) 284 | #showMatrix(mergeAll, dim=3, fname=FG + 'MergeAll_QUEST_{}.png'.format(i + 1), show=False) 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | # ------------------ Test Fault detection method on all area, break mask ----------------------------- 298 | elif work.__eq__("test-fault-all-derotate"): 299 | 300 | fname = CB + 'FaultDetection.hdf5' 301 | model = MODEL(param_dir=fname) 302 | windowList = [15, 21, 27, 35, 45] 303 | 304 | #testList = list(range(36)) 305 | testList = list([35]) 306 | 307 | for i in testList: 308 | 309 | if DEBUG_MODE: 310 | print("Drawing output for rotation number : {}".format(i+1)) 311 | 312 | idx = np.int((i+1) * 10) 313 | ds_fname = DSDIR + "Australia_{}.mat".format(idx) 314 | 315 | ds = DATASET(ds_fname) 316 | 317 | O = np.zeros((ds.x, ds.y, 3)) 318 | O[:, :, 0] = ds.OUTPUT * 255 319 | O[:, :, 1] = ds.OUTPUT * 255 320 | O[:, :, 2] = ds.OUTPUT * 255 321 | O = np.uint8(O) 322 | 323 | 324 | [X, Y, IDX] = ds.generateDS(ds.OUTPUT, ds.testMask, raio = 0.99, output_type=0) 325 | Yh = model.predict(X) 326 | d = drawLines(O, IDX , Yh, WIDTH = 1 , FILL = 128, ws = 1, fname = FG+'Map_testarea_{}.png'.format(i+1)) 327 | del X 328 | del Y 329 | del IDX 330 | d = rotateWithMap(d, ds.M2R, map_type = 'm2r', dim = 3) 331 | 332 | if i == 0: 333 | mergeTest = np.zeros(d.shape) 334 | mergeAll = np.zeros(d.shape) 335 | 336 | mergeTest = mergeTest + d 337 | 338 | [X, Y, IDX] = ds.generateDS(ds.OUTPUT, ds.MASK, raio=0.25, output_type=0) 339 | Yh = model.predict(X) 340 | d = drawLines(O, IDX , Yh, WIDTH = 1 , FILL = 128, ws = 1, fname = FG+'Map_allarea_{}.png'.format(i+1)) 341 | del X 342 | del Y 343 | del IDX 344 | d = rotateWithMap(d, ds.M2R, map_type='m2r', dim=3) 345 | 346 | mergeAll = mergeAll + d 347 | 348 | mergeAll = np.uint8(mergeAll / 36) 349 | im = Image.fromarray(mergeAll) 350 | im.save(FG+'mergeAll.png') 351 | mergeTest = np.uint8(mergeTest / 36) 352 | im = Image.fromarray(mergeTest) 353 | im.save(FG+'mergeTest.png') 354 | 355 | 356 | 357 | 358 | 359 | 360 | 361 | 362 | 363 | 364 | 365 | 366 | 367 | 368 | elif work.__eq__("prepare-datasets-ang"): 369 | 370 | ds = DATASET(DSDIR + 'Australia_360.mat') 371 | 372 | NFILE = 30 373 | NREPEAT = 10 374 | RATIO = 0.5 375 | FNAME = DSREADY + 'ANG_' 376 | flt_name = FILTERDIR + 'Filters_w45_100.mat' 377 | 378 | 379 | for t1 in range(NFILE): 380 | 381 | X = np.zeros((1, WindowSize, WindowSize, Layers)) 382 | Y = np.zeros((1, 1)) 383 | for t2 in range(NREPEAT): 384 | 385 | [Xb, Yb, IDXb] = ds.generateDSwithFilter('train',ds.DEGREES, ds.trainMask, ratio=RATIO, choosy=True) 386 | X = np.concatenate((X, Xb), axis=0) 387 | Y = np.concatenate((Y, Yb), axis=0) 388 | 389 | np.savez(FNAME+'{}'.format(t1), X=X, Y=Y) 390 | 391 | 392 | 393 | 394 | 395 | 396 | 397 | 398 | 399 | 400 | 401 | 402 | elif work.__eq__("prepare-datasets-flt"): 403 | 404 | W = args.WSIZE 405 | 406 | 407 | ds1 = DATASET(DSDIR + 'Australia_strip.mat') 408 | ds2 = DATASET(DSDIR + 'QUEST_strip.mat') 409 | 410 | RATIO = [0.04, 0.005] 411 | oname = ['A_','Q_'] 412 | 413 | 414 | ds1.expandBy(width=W, epsilon=0.9) 415 | ds2.expandBy(width=W, epsilon=0.9) 416 | 417 | ds = [ds1, ds2] 418 | 419 | 420 | 421 | NFILE = [100,100] 422 | 423 | for t2 in range(len(ds)): 424 | 425 | for t1 in range(NFILE[t2]): 426 | 427 | [X, Y, IDXb] = ds[t2].generateDSwithFilter('train',ds[t2].OUTPUT, ds[t2].trainMask, w=W , ratio=RATIO[t2], choosy=False) 428 | 429 | FNAME = DSREADY + oname[t2] + '{}'.format(t1) 430 | np.savez(FNAME, X=X, Y=Y) 431 | 432 | 433 | 434 | 435 | 436 | 437 | 438 | 439 | 440 | 441 | 442 | 443 | 444 | elif work == "train-prepared": 445 | 446 | W = args.WSIZE 447 | 448 | prefix = args.prepprefix 449 | nfile = args.prefnumber 450 | 451 | if prefix == "A_": 452 | fname = CB + '{}_Fault_Australia.hdf5'.format(W) 453 | elif prefix == "Q_": 454 | fname = CB + '{}_Fault_Quest.hdf5'.format(W) 455 | else: 456 | fname = CB + '{}_Fault_Mixed.hdf5'.format(W) 457 | 458 | model = MODEL(w=W, checkpoint=fname) 459 | 460 | 461 | for i in range(nfile): 462 | 463 | if DEBUG_MODE: 464 | print("******* Working of prepared file: {}".format(i+1) + '-'+ slideBar(i*100/args.prefnumber)) 465 | 466 | if prefix == "A_" or prefix == "Q_": 467 | ds_fname = DSREADY+ prefix + "{}.npz".format(i) 468 | data = np.load(ds_fname) 469 | model.train(data['X'], data['Y'], epochs=1) 470 | 471 | else: 472 | mixed_list = ['A_','Q_'] 473 | X = np.zeros((1, W, W, Layers)) 474 | Y = np.zeros((1, 1)) 475 | 476 | for p in mixed_list: 477 | ds_fname = DSREADY + p + "{}.npz".format(i) 478 | data = np.load(ds_fname) 479 | X = np.concatenate((X, data['X']), axis=0) 480 | Y = np.concatenate((Y, data['Y']), axis=0) 481 | 482 | model.train(X, Y, epochs=1) 483 | 484 | 485 | 486 | 487 | 488 | 489 | 490 | 491 | 492 | 493 | 494 | elif work.__eq__("test-fault-all-prep"): 495 | 496 | testList = ['Australia_strip.mat', 'QUEST_strip.mat'] 497 | 498 | W = 45 499 | flt_name = FILTERDIR + 'Filters_0_w45.mat' 500 | 501 | fname = CB + args.callback 502 | model = MODEL(w=W, param_dir=fname) 503 | 504 | 505 | for T in testList: 506 | ds_fname = DSDIR + T 507 | ds = DATASET(ds_fname) 508 | 509 | O = np.zeros((ds.x, ds.y, 3)) 510 | O[:, :, 0] = ds.OUTPUT * 255 511 | O[:, :, 1] = ds.OUTPUT * 255 512 | O[:, :, 2] = ds.OUTPUT * 255 513 | O = np.uint8(O) 514 | 515 | 516 | if DEBUG_MODE: 517 | print("-"*30) 518 | print("Loading Model W = {}".format(W)) 519 | print("Drawing output for rotation number : {}".format(i+1)) 520 | 521 | [X, Y, IDX] = ds.generateDSwithFilter('test', ds.OUTPUT, ds.MASK, ratio=0.1, w=W, choosy=False) 522 | Yh = model.predict(X) 523 | 524 | d = drawLines(O, IDX , Yh, WIDTH = 1 , FILL = 128, ws = 1, fname = FG+'Map_allarea_w{}_{}.png'.format(W,i+1), threshold=0.4) 525 | pmap = probMap(ds.OUTPUT.shape, IDX, Yh) 526 | del X 527 | del Y 528 | del IDX 529 | 530 | ofname = FG + 'Probmap_{}.png'.format(idx) 531 | showMatrix(pmap, dim=2, fname=ofname, show=False) 532 | 533 | P = PmapViewer(matrix=pmap, bg = ds.OUTPUT) 534 | P.save(dir = FG + 'Probmap_{}.npz'.format(idx)) 535 | 536 | 537 | 538 | 539 | 540 | 541 | 542 | 543 | 544 | 545 | 546 | 547 | elif work.__eq__("test-choosy-prepared"): 548 | 549 | #testList = list(range(36)) # See Results on all different rotations 550 | testList = list([35]) # See Results only on main file (because 36 = 360 degrees rotation = main file) 551 | W = 45 552 | 553 | flt_name = FILTERDIR + 'Filters_w45_36.mat' 554 | FLT = FILTER(flt_name) 555 | 556 | model = MODEL(w=W, param_dir=CB + 'Rotate_choosy.hdf5') 557 | 558 | 559 | for i in testList: 560 | 561 | if DEBUG_MODE: 562 | print("-"*30) 563 | print("Drawing output for rotation number : {}".format(i+1)) 564 | 565 | idx = np.int((i+1)*10) 566 | ds_fname = DSDIR + "Australia_{}.mat".format(idx) 567 | ds = DATASET(ds_fname) 568 | 569 | PMAPS = np.zeros((ds.OUTPUT.shape[0], ds.OUTPUT.shape[1], FLT.N)) 570 | [X, Y, IDX] = ds.generateDSwithFilter('test', ds.DEGREES, ds.MASK, ratio=0.99, w=W, choosy=True) 571 | 572 | for r in range(FLT.N): 573 | 574 | [fnum,filter] = FLT.getFilterbyNumber(r) 575 | 576 | Xr = np.array(X) 577 | xr = np.zeros((W,W,Layers)) 578 | 579 | # Rotate test test: 580 | for id in range(Xr.shape[0]): 581 | xr = Xr[id, :,:,:] 582 | Xr[id, :,:,:] = rotateWithMap(xr, filter, map_type = 'm2r', dim = 2) 583 | 584 | Yh = model.predict(Xr) 585 | PMAPS[:,:,r] = probMap(ds.OUTPUT.shape, IDX, Yh) 586 | 587 | P = PmapViewer(matrix=PMAPS, bg = ds.OUTPUT) 588 | P.save(dir=FG + 'Probmap_choosy_{}.npz'.format(idx)) 589 | 590 | 591 | 592 | 593 | 594 | 595 | 596 | 597 | elif work.__eq__("apply-on-prediction"): 598 | Wf = 45 # Fault deteciton window size 599 | Wa = 45 # Angel detection window size 600 | threshold = 0.4 601 | 602 | flt_name = FILTERDIR + 'Filters_0_w45.mat' 603 | 604 | ds_fname = DSDIR + "Australia_360.mat" 605 | ds = DATASET(ds_fname) 606 | 607 | model_flt = MODEL(w=Wf, param_dir=CB + 'FaultDetection.hdf5') 608 | model_ang = MODEL(w=Wa, param_dir=CB + 'Rotate_choosy.hdf5') 609 | 610 | [X, Y, IDX] = ds.generateDSwithFilter('test', ds.OUTPUT, ds.MASK, ratio=0.2, w=Wf, choosy=False) 611 | Yh1 = model_flt.predict(X) 612 | pmap = probMap(ds.OUTPUT.shape, IDX, Yh1) 613 | newMask = pmapCutoff(pmap, threshold) 614 | 615 | 616 | flt_name = FILTERDIR + 'Filters_w45_36.mat' 617 | FLT = FILTER(flt_name) 618 | 619 | PMAPS = np.zeros((ds.OUTPUT.shape[0], ds.OUTPUT.shape[1], FLT.N)) 620 | [X, Y, IDX] = ds.generateDSwithFilter('test', ds.DEGREES, newMask, ratio=0.99, w=Wa, 621 | choosy=True) 622 | 623 | MaxProb = -np.ones(len(Y)) 624 | MaxSlope = np.full(len(Y), np.pi / 2.0) 625 | 626 | for r in range(FLT.N//2): 627 | 628 | [fnum, filter] = FLT.getFilterbyNumber(r) 629 | slope = 2*np.pi*r / FLT.N 630 | slope = np.arctan(np.tan(slope)) 631 | 632 | 633 | Xr = np.array(X) 634 | xr = np.zeros((Wa, Wa, Layers)) 635 | 636 | # Rotate test test: 637 | for id in range(Xr.shape[0]): 638 | xr = Xr[id, :, :, :] 639 | Xr[id, :, :, :] = rotateWithMap(xr, filter, map_type='m2r', dim=2) 640 | 641 | Yh = model_ang.predict(Xr) 642 | 643 | YhNormal = np.ndarray.flatten(Yh) 644 | mIdx = np.where(MaxProb < YhNormal) 645 | MaxProb[mIdx] = YhNormal[mIdx] 646 | MaxSlope[mIdx] = slope 647 | 648 | empty = np.uint8(np.zeros((ds.x, ds.y, 3))) 649 | tmp = drawLinesSlope(empty, IDX, MaxSlope, ws=10, fname=FG + 'Combined_Alone.png') 650 | 651 | for r in range(3): 652 | empty[:, :, r] = np.uint8(ds.OUTPUT*255) 653 | 654 | tmp = drawLinesSlope(empty, IDX, MaxSlope, ws=15, fname=FG + 'Combined_Ovelay.png') 655 | 656 | 657 | 658 | 659 | 660 | 661 | elif work.__eq__("prepare-pmap"): 662 | Wf = args.WSIZE 663 | ratio = 0.999 664 | 665 | testList = ['Australia_strip.mat', 'QUEST_strip.mat'] 666 | 667 | 668 | for T in testList: 669 | ds_fname = DSDIR + T 670 | ds = DATASET(ds_fname) 671 | 672 | model_flt = MODEL(w=Wf, param_dir=CB + args.callback) 673 | 674 | 675 | masknumber = 80 676 | masks = ds.shrinkMask(maskName="all", number=masknumber) 677 | pmap = np.zeros(ds.OUTPUT.shape) 678 | 679 | for i in range(masknumber): 680 | [X, Y, IDX] = ds.generateDSwithFilter('test', ds.OUTPUT, masks[i], ratio=ratio, w=Wf, choosy=False) 681 | Yh1 = model_flt.predict(X) 682 | pmap_tmp = probMap(ds.OUTPUT.shape, IDX, Yh1) 683 | pmap = np.maximum(pmap, pmap_tmp) 684 | 685 | 686 | # Logging activity: 687 | L = Logger() 688 | L.addlog("-"*30) 689 | L.addlog(" W = {} ".format(Wf)) 690 | L.addlog(" Callback = {}".format(args.callback)) 691 | L.addlog(" Map = {}".format(T)) 692 | 693 | ev_train = ds.evaluate(pmap, Wf, 'train') 694 | ev_test = ds.evaluate(pmap, Wf, 'test') 695 | ev_all = ds.evaluate(pmap, Wf, 'all') 696 | 697 | L.addlog(" Train Error = {} , {}".format(ev_train[0], ev_train[1])) 698 | L.addlog(" Test Error = {} , {}".format(ev_test[0], ev_test[1])) 699 | L.addlog(" All Error = {} , {}".format(ev_all[0], ev_all[1])) 700 | 701 | 702 | pmapname = PMAP_DIR + '{}_Pmamp_'.format(Wf)+ args.callback + '_on_{}_'.format(T[:5]) + '.npz' 703 | np.savez(pmapname, matrix=pmap) 704 | 705 | 706 | elif work == "evaluate_pmap": 707 | T = args.prepprefix 708 | ds_fname = DSDIR + T 709 | ds = DATASET(ds_fname) 710 | 711 | Train_E = [[],[]] 712 | Test_E = [[], []] 713 | All_E = [[], []] 714 | 715 | for Wf in range(9,57,4): 716 | 717 | if DEBUG_MODE: 718 | print("- Evaluating {} ------- W = {}".format(T, Wf)) 719 | 720 | 721 | # if args.callback == 'zeros': 722 | # pmap = np.zeros_like(ds.OUTPUT) 723 | # elif args.callback == 'ones': 724 | # pmap = np.ones_like(ds.OUTPUT) 725 | # else: 726 | # pmap = ds.expandBy(Wf, epsilon=0.9, set=False) 727 | 728 | pmapname = PMAP_DIR + '{}_Pmamp_'.format(Wf) + "{}_".format(Wf) + args.callback + '_on_{}_'.format(T[:5]) + '.npz' 729 | pmap = np.load(pmapname)['matrix'] 730 | 731 | 732 | eval_type = 'loss' 733 | 734 | [pos, neg] = ds.evaluate(pmap, Wf, 'train', eval_type) 735 | Train_E[0] += [pos] 736 | Train_E[1] += [neg] 737 | 738 | [pos, neg] = ds.evaluate(pmap, Wf, 'test', eval_type) 739 | Test_E[0] += [pos] 740 | Test_E[1] += [neg] 741 | 742 | [pos, neg] = ds.evaluate(pmap, Wf, 'all', eval_type) 743 | All_E[0] += [pos] 744 | All_E[1] += [neg] 745 | 746 | errors = {'TrainE':Train_E , 'TestE':Test_E, 'AllE':All_E} 747 | sio.savemat('loss_'+ args.callback + "_" + T + "_eval.mat", errors) 748 | 749 | 750 | 751 | 752 | else: 753 | print(globals()) 754 | print("No job is defined!") 755 | -------------------------------------------------------------------------------- /Utility.py: -------------------------------------------------------------------------------- 1 | 2 | # Utility file: Contains functions to show matrices, normalizations, .... 3 | __author__ = "Amin Aghaee" 4 | __copyright__ = "Copyright 2018, Amin Aghaee" 5 | import numpy as np 6 | from PIL import Image, ImageDraw 7 | from globalVariables import * 8 | 9 | 10 | 11 | def slideBar(pct = 10.0, totalLength = 30): 12 | [p1,p2] = [(pct*totalLength)//100 , ((100-pct)*totalLength)//100] 13 | return '{'+ '='*int(p1) +'#'+ '-'*int(p2) +'}' 14 | 15 | 16 | 17 | def myNormalizer(matrix): 18 | xmax, xmin = matrix.max(), matrix.min() 19 | 20 | if xmax == xmin: 21 | if xmax == 0: 22 | return np.zeros(np.array(matrix).shape) 23 | else: 24 | return np.ones(np.array(matrix).shape) 25 | 26 | [XMAX, XMIN] = [xmax, xmin] 27 | 28 | if xmin < -10000.0: 29 | idxMin = matrix == xmin 30 | matrix[idxMin] = 0.0 31 | XMIN = matrix.min() 32 | if xmax > 10000.0: 33 | idxMax = matrix == xmax 34 | matrix[idxMax] = 0.0 35 | XMAX = matrix.max() 36 | 37 | matrix = (matrix - XMIN) / (XMAX - XMIN) 38 | 39 | if xmin < -10000.0: 40 | matrix[idxMin] = -100.0 41 | if xmax > 10000.0: 42 | matrix[idxMax] = 100.0 43 | 44 | return matrix 45 | 46 | 47 | 48 | def rotateWithMap(mat, rmap, map_type = 'r2m', dim = 1): 49 | 50 | MODE_VALUE = 100000 51 | matrix = np.array(mat) 52 | newMat = np.zeros(matrix.shape) 53 | 54 | if dim == 1: 55 | [_x, _y] = matrix.shape 56 | else: 57 | [_x, _y] = matrix[:,:,0].shape 58 | 59 | 60 | if map_type.__eq__('r2m'): 61 | 62 | if dim == 1: 63 | flagMat = np.zeros(matrix.shape) 64 | else: 65 | flagMat = np.zeros(matrix[:, :, 1].shape) 66 | 67 | 68 | for i in range(_x): 69 | for j in range(_y): 70 | val = rmap[i][j] 71 | x0 = val // MODE_VALUE 72 | y0 = val % MODE_VALUE 73 | 74 | if x0 >= _x or y0 >= _y or x0 < 0 or y0 <0: 75 | continue 76 | 77 | if dim == 1: 78 | newMat[x0][y0] = matrix[i][j] 79 | else: 80 | newMat[x0,y0,:] = matrix[i,j,:] 81 | 82 | flagMat[x0][y0] = 1 83 | 84 | for i in range(1,_x-1): 85 | for j in range(1, _y - 1): 86 | if flagMat[i][j] == 0: 87 | if dim == 1: 88 | newMat[i][j] = (newMat[i + 1][j] + newMat[i - 1][j] + newMat[i][j + 1] + newMat[i][j - 1]) // 4 89 | else: 90 | newMat[i,j,:] = (newMat[i-1,j,:] + newMat[i+1,j-1,:] + newMat[i,j+1,:] + newMat[i,j,:])//4 91 | 92 | 93 | elif map_type.__eq__('m2r'): 94 | for i in range(_x): 95 | for j in range(_y): 96 | val = rmap[i][j] 97 | 98 | x0 = val // MODE_VALUE 99 | y0 = val % MODE_VALUE 100 | 101 | if x0 >= _x or y0 >= _y or x0 < 0 or y0 < 0: 102 | continue 103 | 104 | if dim == 1: 105 | newMat[i][j] = matrix[x0][y0] 106 | else: 107 | newMat[i,j,:] = matrix[x0,y0,:] 108 | 109 | return newMat 110 | 111 | 112 | 113 | def showMatrix(matrix , dim = 3, fname = FG+'DEFAULT.png', show = True): 114 | a = np.array(matrix) 115 | 116 | if a.min() < 0: 117 | a[np.where(a == a.min())] = 0 118 | 119 | if a.max() > 1: 120 | a[np.where(a == a.max())] = 1 121 | 122 | a = a * 255 123 | a = np.uint8(a) 124 | if dim == 3: 125 | img = Image.fromarray(a[:,:,0] , 'L') 126 | elif dim == 2: 127 | img = Image.fromarray(a[:, :], 'L') 128 | 129 | img.save(fname) 130 | if show==True: 131 | img.show() 132 | 133 | return img 134 | 135 | 136 | def markPredictions(matrix, pmap, WIDTH = 3 , FILL = 128, fname = FG+'Default.png'): 137 | im = Image.fromarray(matrix) 138 | idx = np.where(pmap == 1) 139 | draw = ImageDraw.Draw(im) 140 | 141 | for k in range(len(idx[0])): 142 | [i,j] = [idx[0][k] , idx[1][k]] 143 | draw.line((j , i , j , i ), fill = FILL, width=WIDTH) 144 | 145 | im.save(fname) 146 | return im 147 | 148 | 149 | 150 | def drawLines(matrix, idx , Y, WIDTH = 3 , FILL = 128, ws = 50, fname = FG+'lines.png', threshold = 0.51): 151 | # ws = window size, how many pixels go left or right in x-axis 152 | 153 | im = Image.fromarray(matrix) 154 | draw = ImageDraw.Draw(im) 155 | 156 | for k in range(len(idx[0])): 157 | [i,j] = [idx[0][k] , idx[1][k]] 158 | if Y[k] >= threshold: 159 | draw.line((j , i - ws , j , i + ws ), fill = FILL, width=WIDTH) 160 | 161 | 162 | im.save(fname) 163 | return np.asanyarray(im) 164 | 165 | 166 | def probMap(shape,idx, Y): 167 | pmap = np.zeros(shape) 168 | for k in range(len(idx[0])): 169 | pmap[idx[0][k] , idx[1][k]] = Y[k] 170 | return pmap 171 | 172 | def pmapCutoff(pmap, threshold = 0.5): 173 | p = np.zeros(pmap.shape) 174 | p[ np.where(pmap >= threshold) ] = 1 175 | return p 176 | 177 | 178 | def modeIndex(M): 179 | '''Gets N 2D probability maps and 180 | returns maximum index of those values''' 181 | matrix = np.array(M) 182 | nonIndex = np.where(matrix[:,:,0] == 0) 183 | 184 | result = -np.ones((matrix.shape[0], matrix.shape[1])) 185 | mx = np.array(result) 186 | 187 | for d in range(matrix.shape[2]): 188 | mx = np.maximum(mx, matrix[:,:,d]) 189 | 190 | for d in range(matrix.shape[2]): 191 | idx = np.where(mx == matrix[:,:,d]) 192 | result[idx] = d 193 | 194 | result[nonIndex] = -1 195 | return result 196 | 197 | 198 | def drawLinesSlope(matrix, idx , sloopes, WIDTH = 3 , FILL = 128, ws = 50, fname = FG+'Slopes.png', prelative = False, parray=None): 199 | '''ws = window size, how many pixels go left or right in x-axis''' 200 | 201 | slopes = np.tan(sloopes) 202 | im = Image.fromarray(matrix) 203 | draw = ImageDraw.Draw(im) 204 | 205 | if prelative==False: 206 | 207 | for k in range(len(idx[0])): 208 | [i,j] = [idx[0][k] , idx[1][k]] 209 | S = slopes[k] 210 | 211 | if np.abs(S) <= 1.0: 212 | [x1, y1] = [-ws, np.floor(-S * ws)] 213 | [x2, y2] = [ ws, np.floor( S * ws)] 214 | elif np.abs(S > 4.5): 215 | [x1, y1] = [np.floor(-ws / S), -ws] 216 | [x2, y2] = [np.floor( ws / S), ws] 217 | else: 218 | continue 219 | #[x1,x2] = [0,0] 220 | #[y1,y2] = [0-ws, ws] 221 | 222 | draw.line((j + x1, i - y1, j + x2, i - y2), fill = FILL, width=WIDTH) 223 | 224 | else: 225 | 226 | parray = np.ndarray.flatten(parray) 227 | 228 | for k in range(len(idx[0])): 229 | [i, j] = [idx[0][k], idx[1][k]] 230 | S = slopes[k] 231 | _ws = int(np.ceil(ws * parray[k])) + 1 232 | 233 | if np.abs(S) <= 1.0: 234 | [x1, y1] = [-_ws, np.floor(-S * _ws)] 235 | [x2, y2] = [_ws, np.floor(S * _ws)] 236 | elif np.abs(S > 4.5): 237 | [x1, y1] = [np.floor(-_ws / S), -_ws] 238 | [x2, y2] = [np.floor(_ws / S), _ws] 239 | else: 240 | [x1, x2] = [0, 0] 241 | [y1, y2] = [0 - _ws, _ws] 242 | 243 | draw.line((j + x1, i - y1, j + x2, i - y2), fill=FILL, width=WIDTH) 244 | 245 | 246 | im.save(fname) 247 | return np.asanyarray(im) 248 | 249 | 250 | def drawLinesWithEndingPoints(bg, lines, fname=FG+'lines.png', _width=5): 251 | # Format of lines: array of pairs [P1,P2] 252 | # P1 = [x1,y1] , P2=[x2,y2] 253 | bg = np.uint8(bg) 254 | im = Image.fromarray(bg) 255 | draw = ImageDraw.Draw(im) 256 | 257 | for l in lines: 258 | draw.line((l[0][1], l[0][0], l[1][1], l[1][0]), fill=128, width=_width) 259 | 260 | 261 | im.save(fname) 262 | return im 263 | 264 | 265 | def drawCurves(bg, curves, fname=FG+'curves.png', _width=5): 266 | # Each curve contains two lists [Xset, Yset] 267 | # Xset = [x1,x2,....] , Yset = [y1, y2, ...] 268 | 269 | bg = np.uint8(bg) 270 | im = Image.fromarray(bg) 271 | draw = ImageDraw.Draw(im) 272 | 273 | for c in curves: 274 | x = c[0] 275 | y = c[1] 276 | 277 | for i in range(len(x)-1): 278 | draw.line( (y[i], x[i], y[i+1], x[i+1]) , fill=128, width=_width ) 279 | 280 | im.save(fname) 281 | return im 282 | 283 | 284 | 285 | 286 | 287 | def colour2vec(colour = 'red'): 288 | if colour.__eq__('red'): 289 | return np.array([1,0,0]) 290 | elif colour.__eq__('green'): 291 | return np.array([0, 1, 0]) 292 | elif colour.__eq__('blue'): 293 | return np.array([0, 0, 1]) 294 | elif colour.__eq__('yellow'): 295 | return np.array([1, 1, 0]) 296 | elif colour.__eq__('white'): 297 | return np.array([1, 1, 1]) 298 | elif colour.__eq__('blue'): 299 | return np.array([0, 0, 1]) 300 | else: 301 | return np.array([0, 0, 0]) 302 | 303 | 304 | 305 | def getRandomColour(channel=3, tint = 'default'): 306 | 307 | if tint == 'red': 308 | r = np.random.choice(range(200, 255)) 309 | g = np.random.choice(range(10, 100)) 310 | b = np.random.choice(range(10, 100)) 311 | return [r,g,b] 312 | 313 | elif tint == 'green': 314 | g = np.random.choice(range(200, 255)) 315 | r = np.random.choice(range(10, 100)) 316 | b = np.random.choice(range(10, 100)) 317 | return [r,g,b] 318 | 319 | elif tint == 'blue': 320 | b = np.random.choice(range(200, 255)) 321 | r = np.random.choice(range(10, 100)) 322 | g = np.random.choice(range(10, 100)) 323 | return [r,g,b] 324 | 325 | else: 326 | return np.random.choice(range(10, 255), channel) 327 | 328 | 329 | 330 | def circular_mask(width = 5 , R = None): 331 | radius = (width - 1) / 2 332 | 333 | if R is None: 334 | R = radius 335 | 336 | Y, X = np.ogrid[:width, :width] 337 | distance = np.sqrt((Y - radius) ** 2 + (X - radius) ** 2) 338 | 339 | return distance <= R 340 | -------------------------------------------------------------------------------- /applet.json: -------------------------------------------------------------------------------- 1 | { 2 | "dataset":{ 3 | "link" : "./Dataset/Australia/Rotations/Australia_strip.mat" 4 | }, 5 | "model1":{ 6 | "link" : "./CallBacks/Rotate/FaultDetection.hdf5", 7 | "w": "45" 8 | }, 9 | "model2": { 10 | "link" : "./CallBacks/Rotate/Rotate_choosy.hdf5", 11 | "w": "45" 12 | }, 13 | "pmap": { 14 | "link" : "./PMAP.npz", 15 | "plink": "./Results/NewTrainingRandom_strip_mixed/Pmamp_Fault_Australia.hdf5Australia_strip.mat.npz", 16 | "alink": "./Results/TrainOnRandomSelection_w35_angel/PMAP_angel.npz", 17 | "trained" : "1", 18 | "lnumber" : "0" 19 | }, 20 | "filter":{ 21 | "link" : "./Filters/Filters_w45_36.mat" 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /applet_images/graybar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aminrd/LineamentLearning/3d5ecaef46bdc3a86532a6d5e41f3a316553c566/applet_images/graybar.png -------------------------------------------------------------------------------- /globalVariables.py: -------------------------------------------------------------------------------- 1 | # Define all global variables here: 2 | __author__ = "Amin Aghaee" 3 | __copyright__ = "Copyright 2018, Amin Aghaee" 4 | 5 | import numpy as np 6 | 7 | DEBUG_MODE = True # [DB=T][DB=F] 8 | 9 | # -------------------------------------------------- 10 | WindowSize = 45 # Size of window for learning slopes 11 | Layers = 8 # Layer number which running our model on 12 | fileNumber = 36 # 36 for only using the rotations, 108 for flipping as well 13 | 14 | 15 | 16 | maskTh = 0.9 # For TH = 0.9 , means when 90% of a window is inside a mask, it is acceptable 17 | radianTH = np.pi / 12.0 18 | ITERATIONS = 150 # Maximum number of Iterations on learning procedures 19 | # Number of different models for Degrees 20 | # E.g. each of those models are designed to predict different angels 21 | NUMBER_OF_DEGREE_MODELS = 6 22 | 23 | 24 | 25 | MATLAB_DATASET_FILE = 'PYDataset.mat' 26 | 27 | 28 | # -------------------------------------------------- 29 | # Directories: 30 | 31 | CB = './CallBacks/Rotate/' 32 | FG = './Figures/Rotate/' 33 | DSDIR = './Dataset/Australia/Rotations/' 34 | DSREADY = './Dataset/DSREADY/' 35 | FILTERDIR = './Filters/' 36 | PMAP_DIR = './Pmaps/' 37 | 38 | # -------------------------------------------------- 39 | 40 | 41 | # APPLET global variables: 42 | MAX_WINDOW_SIZE = 1000 43 | LOAD_MODELS = False 44 | -------------------------------------------------------------------------------- /temp.py: -------------------------------------------------------------------------------- 1 | __author__ = "Amin Aghaee" 2 | __copyright__ = "Copyright 2018, Amin Aghaee" 3 | 4 | #from Prob2Line import * 5 | #pmap = np.load('./Results/NewTrainingRandom_strip_mixed/Pmamp_Fault_Australia.hdf5Australia_strip.mat.npz') 6 | #pmap = pmap['matrix'] 7 | #p2l = prob2map(pmap) 8 | #p2l.runMethod(coeff=0.66, eps = 3, iteration=350) 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | from DATASET import * 17 | import scipy.io as sio 18 | # -------------------------------------- 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | testList = ['Australia_strip.mat', 'QUEST_strip.mat'] 27 | for T in testList: 28 | ds_fname = DSDIR + T 29 | ds = DATASET(ds_fname) 30 | 31 | Z = np.zeros_like(ds.OUTPUT) 32 | O = np.ones_like(ds.OUTPUT) 33 | R = np.random.random(ds.OUTPUT.shape) 34 | 35 | z = {} 36 | o = {} 37 | r = {} 38 | 39 | pmap_list = [Z,R,O] 40 | output_list = [z,r,o] 41 | for i in range(3): 42 | m = output_list[i] 43 | 44 | m['Train_p'] = [] 45 | m['Train_n'] = [] 46 | m['Test_p'] = [] 47 | m['Test_n'] = [] 48 | m['All_p'] = [] 49 | m['All_n'] = [] 50 | 51 | pmap = pmap_list[i] 52 | 53 | for w in range(9, 57, 4): 54 | print("Teste: {} ----- W = {}".format(T, w)) 55 | 56 | [pos, neg] = ds.evaluate(pmap, w, 'train', etype='our') 57 | m['Train_p'] += [pos] 58 | m['Train_n'] += [neg] 59 | 60 | [pos, neg] = ds.evaluate(pmap, w, 'test', etype='our') 61 | m['Test_p'] += [pos] 62 | m['Test_n'] += [neg] 63 | 64 | 65 | [pos, neg] = ds.evaluate(pmap, w, 'all', etype='our') 66 | m['All_p'] += [pos] 67 | m['All_n'] += [neg] 68 | 69 | 70 | sio.savemat(T[0:5]+'_extreme.mat' , [z,r,o] ) 71 | 72 | 73 | 74 | pmapname = '45_Pmamp_45_Fault_Quest.hdf5_on_QUEST_.npz' 75 | outputname = 'Quest_on_Quest' 76 | 77 | pmap = np.load(pmapname)['matrix'] 78 | 79 | png = np.uint8(pmap * 255) 80 | im = Image.fromarray(png) 81 | im.save(outputname + '.png') 82 | 83 | im = Image.fromarray(pmap) 84 | im.save(outputname + '.tiff') 85 | --------------------------------------------------------------------------------