├── .gitignore ├── LICENSE ├── README.md ├── main.py ├── main_webcam.py ├── pySaliencyMap.py ├── pySaliencyMapDefs.py ├── test.jpg ├── test2.png ├── test3.jpg └── test3_gt.jpg /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Akisato Kimura 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | pySaliencyMap 2 | ============= 3 | 4 | Python implementation for extracting a saliency map [1] from a still image. 5 | 6 | Requirements: 7 | 8 | Python (>= 2.7 is preferable) 9 | numpy (>= 1.7 is preferable) 10 | OpenCV (>= 2.4 is preferable) 11 | matplotlib (if you would like to run main.py) 12 | 13 | Usage: 14 | 15 | If you would like to test this package, please try 16 | python main.py 17 | This provides a simple example how to use the class pySaliencyMap. 18 | 19 | We also provide a sample code for testing this package with your own webcams. Please try 20 | python main_webcam.py 21 | 22 | References: 23 | 24 | [1] L. Itti, C. Koch, E. Niebur, A Model of Saliency-Based Visual Attention for Rapid Scene Analysis, IEEE Transactions on Pattern Analysis and Machine Intelligence, Vol. 20, No. 11, pp. 1254-1259, Nov 1998. 25 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: main 3 | # Purpose: Testing the package pySaliencyMap 4 | # 5 | # Author: Akisato Kimura 6 | # 7 | # Created: May 4, 2014 8 | # Copyright: (c) Akisato Kimura 2014- 9 | # Licence: All rights reserved 10 | #------------------------------------------------------------------------------- 11 | 12 | import cv2 13 | import matplotlib.pyplot as plt 14 | import pySaliencyMap 15 | 16 | # main 17 | if __name__ == '__main__': 18 | # read 19 | img = cv2.imread('test3.jpg') 20 | # initialize 21 | imgsize = img.shape 22 | img_width = imgsize[1] 23 | img_height = imgsize[0] 24 | sm = pySaliencyMap.pySaliencyMap(img_width, img_height) 25 | # computation 26 | saliency_map = sm.SMGetSM(img) 27 | binarized_map = sm.SMGetBinarizedSM(img) 28 | salient_region = sm.SMGetSalientRegion(img) 29 | # visualize 30 | # plt.subplot(2,2,1), plt.imshow(img, 'gray') 31 | plt.subplot(2,2,1), plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)) 32 | plt.title('Input image') 33 | # cv2.imshow("input", img) 34 | plt.subplot(2,2,2), plt.imshow(saliency_map, 'gray') 35 | plt.title('Saliency map') 36 | # cv2.imshow("output", map) 37 | plt.subplot(2,2,3), plt.imshow(binarized_map) 38 | plt.title('Binarilized saliency map') 39 | # cv2.imshow("Binarized", binarized_map) 40 | plt.subplot(2,2,4), plt.imshow(cv2.cvtColor(salient_region, cv2.COLOR_BGR2RGB)) 41 | plt.title('Salient region') 42 | # cv2.imshow("Segmented", segmented_map) 43 | 44 | plt.show() 45 | # cv2.waitKey(0) 46 | cv2.destroyAllWindows() 47 | -------------------------------------------------------------------------------- /main_webcam.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: main_webcam 3 | # Purpose: Testing the package pySaliencyMap with your own webcams 4 | # 5 | # Author: Akisato Kimura 6 | # 7 | # Created: May 14, 2016 8 | # Copyright: (c) Akisato Kimura 2016- 9 | # Licence: All rights reserved 10 | #------------------------------------------------------------------------------- 11 | 12 | import cv2 13 | import matplotlib.pyplot as plt 14 | import pySaliencyMap 15 | 16 | # main 17 | if __name__ == '__main__': 18 | # set up webcams 19 | capture = cv2.VideoCapture(0) 20 | # repeat until pressing a key "q" 21 | while(True): 22 | # capture 23 | retval, frame = capture.read() 24 | # initialize 25 | frame_size = frame.shape 26 | frame_width = frame_size[1] 27 | frame_height = frame_size[0] 28 | sm = pySaliencyMap.pySaliencyMap(frame_width, frame_height) 29 | # computation 30 | saliency_map = sm.SMGetSM(frame) 31 | # binarized_map = sm.SMGetBinarizedSM(frame) 32 | # salient_region = sm.SMGetSalientRegion(frame) 33 | # visualize 34 | cv2.imshow('Input image', cv2.flip(frame, 1)) 35 | cv2.imshow('Saliency map', cv2.flip(saliency_map, 1)) 36 | # cv2.imshow('Binalized saliency map', cv2.flip(binarized_map, 1)) 37 | # cv2.imshow('Salient region', cv2.flip(salient_region, 1)) 38 | # exit if the key "q" is pressed 39 | if cv2.waitKey(1) & 0xFF == ord('q'): 40 | break 41 | 42 | cv2.destroyAllWindows() 43 | -------------------------------------------------------------------------------- /pySaliencyMap.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: pySaliencyMap 3 | # Purpose: Extracting a saliency map from a single still image 4 | # 5 | # Author: Akisato Kimura 6 | # 7 | # Created: April 24, 2014 8 | # Copyright: (c) Akisato Kimura 2014- 9 | # Licence: All rights reserved 10 | #------------------------------------------------------------------------------- 11 | 12 | import cv2 13 | import numpy as np 14 | import pySaliencyMapDefs 15 | 16 | class pySaliencyMap: 17 | # initialization 18 | def __init__(self, width, height): 19 | self.width = width 20 | self.height = height 21 | self.prev_frame = None 22 | self.SM = None 23 | self.GaborKernel0 = np.array(pySaliencyMapDefs.GaborKernel_0) 24 | self.GaborKernel45 = np.array(pySaliencyMapDefs.GaborKernel_45) 25 | self.GaborKernel90 = np.array(pySaliencyMapDefs.GaborKernel_90) 26 | self.GaborKernel135 = np.array(pySaliencyMapDefs.GaborKernel_135) 27 | 28 | # extracting color channels 29 | def SMExtractRGBI(self, inputImage): 30 | # convert scale of array elements 31 | src = np.float32(inputImage) * 1./255 32 | # split 33 | (B, G, R) = cv2.split(src) 34 | # extract an intensity image 35 | I = cv2.cvtColor(src, cv2.COLOR_BGR2GRAY) 36 | # return 37 | return R, G, B, I 38 | 39 | # feature maps 40 | ## constructing a Gaussian pyramid 41 | def FMCreateGaussianPyr(self, src): 42 | dst = list() 43 | dst.append(src) 44 | for i in range(1,9): 45 | nowdst = cv2.pyrDown(dst[i-1]) 46 | dst.append(nowdst) 47 | return dst 48 | ## taking center-surround differences 49 | def FMCenterSurroundDiff(self, GaussianMaps): 50 | dst = list() 51 | for s in range(2,5): 52 | now_size = GaussianMaps[s].shape 53 | now_size = (now_size[1], now_size[0]) ## (width, height) 54 | tmp = cv2.resize(GaussianMaps[s+3], now_size, interpolation=cv2.INTER_LINEAR) 55 | nowdst = cv2.absdiff(GaussianMaps[s], tmp) 56 | dst.append(nowdst) 57 | tmp = cv2.resize(GaussianMaps[s+4], now_size, interpolation=cv2.INTER_LINEAR) 58 | nowdst = cv2.absdiff(GaussianMaps[s], tmp) 59 | dst.append(nowdst) 60 | return dst 61 | ## constructing a Gaussian pyramid + taking center-surround differences 62 | def FMGaussianPyrCSD(self, src): 63 | GaussianMaps = self.FMCreateGaussianPyr(src) 64 | dst = self.FMCenterSurroundDiff(GaussianMaps) 65 | return dst 66 | ## intensity feature maps 67 | def IFMGetFM(self, I): 68 | return self.FMGaussianPyrCSD(I) 69 | ## color feature maps 70 | def CFMGetFM(self, R, G, B): 71 | # max(R,G,B) 72 | tmp1 = cv2.max(R, G) 73 | RGBMax = cv2.max(B, tmp1) 74 | RGBMax[RGBMax <= 0] = 0.0001 # prevent dividing by 0 75 | # min(R,G) 76 | RGMin = cv2.min(R, G) 77 | # RG = (R-G)/max(R,G,B) 78 | RG = (R - G) / RGBMax 79 | # BY = (B-min(R,G)/max(R,G,B) 80 | BY = (B - RGMin) / RGBMax 81 | # clamp nagative values to 0 82 | RG[RG < 0] = 0 83 | BY[BY < 0] = 0 84 | # obtain feature maps in the same way as intensity 85 | RGFM = self.FMGaussianPyrCSD(RG) 86 | BYFM = self.FMGaussianPyrCSD(BY) 87 | # return 88 | return RGFM, BYFM 89 | ## orientation feature maps 90 | def OFMGetFM(self, src): 91 | # creating a Gaussian pyramid 92 | GaussianI = self.FMCreateGaussianPyr(src) 93 | # convoluting a Gabor filter with an intensity image to extract oriemtation features 94 | GaborOutput0 = [ np.empty((1,1)), np.empty((1,1)) ] # dummy data: any kinds of np.array()s are OK 95 | GaborOutput45 = [ np.empty((1,1)), np.empty((1,1)) ] 96 | GaborOutput90 = [ np.empty((1,1)), np.empty((1,1)) ] 97 | GaborOutput135 = [ np.empty((1,1)), np.empty((1,1)) ] 98 | for j in range(2,9): 99 | GaborOutput0.append( cv2.filter2D(GaussianI[j], cv2.CV_32F, self.GaborKernel0) ) 100 | GaborOutput45.append( cv2.filter2D(GaussianI[j], cv2.CV_32F, self.GaborKernel45) ) 101 | GaborOutput90.append( cv2.filter2D(GaussianI[j], cv2.CV_32F, self.GaborKernel90) ) 102 | GaborOutput135.append( cv2.filter2D(GaussianI[j], cv2.CV_32F, self.GaborKernel135) ) 103 | # calculating center-surround differences for every oriantation 104 | CSD0 = self.FMCenterSurroundDiff(GaborOutput0) 105 | CSD45 = self.FMCenterSurroundDiff(GaborOutput45) 106 | CSD90 = self.FMCenterSurroundDiff(GaborOutput90) 107 | CSD135 = self.FMCenterSurroundDiff(GaborOutput135) 108 | # concatenate 109 | dst = list(CSD0) 110 | dst.extend(CSD45) 111 | dst.extend(CSD90) 112 | dst.extend(CSD135) 113 | # return 114 | return dst 115 | ## motion feature maps 116 | def MFMGetFM(self, src): 117 | # convert scale 118 | I8U = np.uint8(255 * src) 119 | cv2.waitKey(10) 120 | # calculating optical flows 121 | if self.prev_frame is not None: 122 | farne_pyr_scale= pySaliencyMapDefs.farne_pyr_scale 123 | farne_levels = pySaliencyMapDefs.farne_levels 124 | farne_winsize = pySaliencyMapDefs.farne_winsize 125 | farne_iterations = pySaliencyMapDefs.farne_iterations 126 | farne_poly_n = pySaliencyMapDefs.farne_poly_n 127 | farne_poly_sigma = pySaliencyMapDefs.farne_poly_sigma 128 | farne_flags = pySaliencyMapDefs.farne_flags 129 | flow = cv2.calcOpticalFlowFarneback(\ 130 | prev = self.prev_frame, \ 131 | next = I8U, \ 132 | pyr_scale = farne_pyr_scale, \ 133 | levels = farne_levels, \ 134 | winsize = farne_winsize, \ 135 | iterations = farne_iterations, \ 136 | poly_n = farne_poly_n, \ 137 | poly_sigma = farne_poly_sigma, \ 138 | flags = farne_flags, \ 139 | flow = None \ 140 | ) 141 | flowx = flow[...,0] 142 | flowy = flow[...,1] 143 | else: 144 | flowx = np.zeros(I8U.shape) 145 | flowy = np.zeros(I8U.shape) 146 | # create Gaussian pyramids 147 | dst_x = self.FMGaussianPyrCSD(flowx) 148 | dst_y = self.FMGaussianPyrCSD(flowy) 149 | # update the current frame 150 | self.prev_frame = np.uint8(I8U) 151 | # return 152 | return dst_x, dst_y 153 | 154 | # conspicuity maps 155 | ## standard range normalization 156 | def SMRangeNormalize(self, src): 157 | minn, maxx, dummy1, dummy2 = cv2.minMaxLoc(src) 158 | if maxx!=minn: 159 | dst = src/(maxx-minn) + minn/(minn-maxx) 160 | else: 161 | dst = src - minn 162 | return dst 163 | ## computing an average of local maxima 164 | def SMAvgLocalMax(self, src): 165 | # size 166 | stepsize = pySaliencyMapDefs.default_step_local 167 | width = src.shape[1] 168 | height = src.shape[0] 169 | # find local maxima 170 | numlocal = 0 171 | lmaxmean = 0 172 | for y in range(0, height-stepsize, stepsize): 173 | for x in range(0, width-stepsize, stepsize): 174 | localimg = src[y:y+stepsize, x:x+stepsize] 175 | lmin, lmax, dummy1, dummy2 = cv2.minMaxLoc(localimg) 176 | lmaxmean += lmax 177 | numlocal += 1 178 | # averaging over all the local regions 179 | return lmaxmean / numlocal 180 | ## normalization specific for the saliency map model 181 | def SMNormalization(self, src): 182 | dst = self.SMRangeNormalize(src) 183 | lmaxmean = self.SMAvgLocalMax(dst) 184 | normcoeff = (1-lmaxmean)*(1-lmaxmean) 185 | return dst * normcoeff 186 | ## normalizing feature maps 187 | def normalizeFeatureMaps(self, FM): 188 | NFM = list() 189 | for i in range(0,6): 190 | normalizedImage = self.SMNormalization(FM[i]) 191 | nownfm = cv2.resize(normalizedImage, (self.width, self.height), interpolation=cv2.INTER_LINEAR) 192 | NFM.append(nownfm) 193 | return NFM 194 | ## intensity conspicuity map 195 | def ICMGetCM(self, IFM): 196 | NIFM = self.normalizeFeatureMaps(IFM) 197 | ICM = sum(NIFM) 198 | return ICM 199 | ## color conspicuity map 200 | def CCMGetCM(self, CFM_RG, CFM_BY): 201 | # extracting a conspicuity map for every color opponent pair 202 | CCM_RG = self.ICMGetCM(CFM_RG) 203 | CCM_BY = self.ICMGetCM(CFM_BY) 204 | # merge 205 | CCM = CCM_RG + CCM_BY 206 | # return 207 | return CCM 208 | ## orientation conspicuity map 209 | def OCMGetCM(self, OFM): 210 | OCM = np.zeros((self.height, self.width)) 211 | for i in range (0,4): 212 | # slicing 213 | nowofm = OFM[i*6:(i+1)*6] # angle = i*45 214 | # extracting a conspicuity map for every angle 215 | NOFM = self.ICMGetCM(nowofm) 216 | # normalize 217 | NOFM2 = self.SMNormalization(NOFM) 218 | # accumulate 219 | OCM += NOFM2 220 | return OCM 221 | ## motion conspicuity map 222 | def MCMGetCM(self, MFM_X, MFM_Y): 223 | return self.CCMGetCM(MFM_X, MFM_Y) 224 | 225 | # core 226 | def SMGetSM(self, src): 227 | # definitions 228 | size = src.shape 229 | width = size[1] 230 | height = size[0] 231 | # check 232 | # if(width != self.width or height != self.height): 233 | # sys.exit("size mismatch") 234 | # extracting individual color channels 235 | R, G, B, I = self.SMExtractRGBI(src) 236 | # extracting feature maps 237 | IFM = self.IFMGetFM(I) 238 | CFM_RG, CFM_BY = self.CFMGetFM(R, G, B) 239 | OFM = self.OFMGetFM(I) 240 | MFM_X, MFM_Y = self.MFMGetFM(I) 241 | # extracting conspicuity maps 242 | ICM = self.ICMGetCM(IFM) 243 | CCM = self.CCMGetCM(CFM_RG, CFM_BY) 244 | OCM = self.OCMGetCM(OFM) 245 | MCM = self.MCMGetCM(MFM_X, MFM_Y) 246 | # adding all the conspicuity maps to form a saliency map 247 | wi = pySaliencyMapDefs.weight_intensity 248 | wc = pySaliencyMapDefs.weight_color 249 | wo = pySaliencyMapDefs.weight_orientation 250 | wm = pySaliencyMapDefs.weight_motion 251 | SMMat = wi*ICM + wc*CCM + wo*OCM + wm*MCM 252 | # normalize 253 | normalizedSM = self.SMRangeNormalize(SMMat) 254 | normalizedSM2 = normalizedSM.astype(np.float32) 255 | smoothedSM = cv2.bilateralFilter(normalizedSM2, 7, 3, 1.55) 256 | self.SM = cv2.resize(smoothedSM, (width,height), interpolation=cv2.INTER_NEAREST) 257 | # return 258 | return self.SM 259 | 260 | def SMGetBinarizedSM(self, src): 261 | # get a saliency map 262 | if self.SM is None: 263 | self.SM = self.SMGetSM(src) 264 | # convert scale 265 | SM_I8U = np.uint8(255 * self.SM) 266 | # binarize 267 | thresh, binarized_SM = cv2.threshold(SM_I8U, thresh=0, maxval=255, type=cv2.THRESH_BINARY+cv2.THRESH_OTSU) 268 | return binarized_SM 269 | 270 | def SMGetSalientRegion(self, src): 271 | # get a binarized saliency map 272 | binarized_SM = self.SMGetBinarizedSM(src) 273 | # GrabCut 274 | img = src.copy() 275 | mask = np.where((binarized_SM!=0), cv2.GC_PR_FGD, cv2.GC_PR_BGD).astype('uint8') 276 | bgdmodel = np.zeros((1,65),np.float64) 277 | fgdmodel = np.zeros((1,65),np.float64) 278 | rect = (0,0,1,1) # dummy 279 | iterCount = 1 280 | cv2.grabCut(img, mask=mask, rect=rect, bgdModel=bgdmodel, fgdModel=fgdmodel, iterCount=iterCount, mode=cv2.GC_INIT_WITH_MASK) 281 | # post-processing 282 | mask_out = np.where((mask==cv2.GC_FGD) + (mask==cv2.GC_PR_FGD), 255, 0).astype('uint8') 283 | output = cv2.bitwise_and(img,img,mask=mask_out) 284 | return output 285 | -------------------------------------------------------------------------------- /pySaliencyMapDefs.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: pySaliencyMapDefs 3 | # Purpose: Definitions for class pySaliencyMap 4 | # 5 | # Author: Akisato Kimura 6 | # 7 | # Created: April 24, 2014 8 | # Copyright: (c) Akisato Kimura 2014- 9 | # Licence: All rights reserved 10 | #------------------------------------------------------------------------------- 11 | 12 | # parameters for computing optical flows using the Gunner Farneback's algorithm 13 | farne_pyr_scale = 0.5 14 | farne_levels = 3 15 | farne_winsize = 15 16 | farne_iterations = 3 17 | farne_poly_n = 5 18 | farne_poly_sigma = 1.2 19 | farne_flags = 0 20 | 21 | # parameters for detecting local maxima 22 | default_step_local = 16 23 | 24 | # feature weights 25 | weight_intensity = 0.30 26 | weight_color = 0.30 27 | weight_orientation = 0.20 28 | weight_motion = 0.20 29 | 30 | # coefficients of Gabor filters 31 | GaborKernel_0 = [\ 32 | [ 1.85212E-06, 1.28181E-05, -0.000350433, -0.000136537, 0.002010422, -0.000136537, -0.000350433, 1.28181E-05, 1.85212E-06 ],\ 33 | [ 2.80209E-05, 0.000193926, -0.005301717, -0.002065674, 0.030415784, -0.002065674, -0.005301717, 0.000193926, 2.80209E-05 ],\ 34 | [ 0.000195076, 0.001350077, -0.036909595, -0.014380852, 0.211749204, -0.014380852, -0.036909595, 0.001350077, 0.000195076 ],\ 35 | [ 0.000624940, 0.004325061, -0.118242318, -0.046070008, 0.678352526, -0.046070008, -0.118242318, 0.004325061, 0.000624940 ],\ 36 | [ 0.000921261, 0.006375831, -0.174308068, -0.067914552, 1.000000000, -0.067914552, -0.174308068, 0.006375831, 0.000921261 ],\ 37 | [ 0.000624940, 0.004325061, -0.118242318, -0.046070008, 0.678352526, -0.046070008, -0.118242318, 0.004325061, 0.000624940 ],\ 38 | [ 0.000195076, 0.001350077, -0.036909595, -0.014380852, 0.211749204, -0.014380852, -0.036909595, 0.001350077, 0.000195076 ],\ 39 | [ 2.80209E-05, 0.000193926, -0.005301717, -0.002065674, 0.030415784, -0.002065674, -0.005301717, 0.000193926, 2.80209E-05 ],\ 40 | [ 1.85212E-06, 1.28181E-05, -0.000350433, -0.000136537, 0.002010422, -0.000136537, -0.000350433, 1.28181E-05, 1.85212E-06 ]\ 41 | ] 42 | GaborKernel_45 = [\ 43 | [ 4.04180E-06, 2.25320E-05, -0.000279806, -0.001028923, 3.79931E-05, 0.000744712, 0.000132863, -9.04408E-06, -1.01551E-06 ],\ 44 | [ 2.25320E-05, 0.000925120, 0.002373205, -0.013561362, -0.022947700, 0.000389916, 0.003516954, 0.000288732, -9.04408E-06 ],\ 45 | [ -0.000279806, 0.002373205, 0.044837725, 0.052928748, -0.139178011, -0.108372072, 0.000847346, 0.003516954, 0.000132863 ],\ 46 | [ -0.001028923, -0.013561362, 0.052928748, 0.460162150, 0.249959607, -0.302454279, -0.108372072, 0.000389916, 0.000744712 ],\ 47 | [ 3.79931E-05, -0.022947700, -0.139178011, 0.249959607, 1.000000000, 0.249959607, -0.139178011, -0.022947700, 3.79931E-05 ],\ 48 | [ 0.000744712, 0.003899160, -0.108372072, -0.302454279, 0.249959607, 0.460162150, 0.052928748, -0.013561362, -0.001028923 ],\ 49 | [ 0.000132863, 0.003516954, 0.000847346, -0.108372072, -0.139178011, 0.052928748, 0.044837725, 0.002373205, -0.000279806 ],\ 50 | [ -9.04408E-06, 0.000288732, 0.003516954, 0.000389916, -0.022947700, -0.013561362, 0.002373205, 0.000925120, 2.25320E-05 ],\ 51 | [ -1.01551E-06, -9.04408E-06, 0.000132863, 0.000744712, 3.79931E-05, -0.001028923, -0.000279806, 2.25320E-05, 4.04180E-06 ]\ 52 | ] 53 | GaborKernel_90 = [\ 54 | [ 1.85212E-06, 2.80209E-05, 0.000195076, 0.000624940, 0.000921261, 0.000624940, 0.000195076, 2.80209E-05, 1.85212E-06 ],\ 55 | [ 1.28181E-05, 0.000193926, 0.001350077, 0.004325061, 0.006375831, 0.004325061, 0.001350077, 0.000193926, 1.28181E-05 ],\ 56 | [ -0.000350433, -0.005301717, -0.036909595, -0.118242318, -0.174308068, -0.118242318, -0.036909595, -0.005301717, -0.000350433 ],\ 57 | [ -0.000136537, -0.002065674, -0.014380852, -0.046070008, -0.067914552, -0.046070008, -0.014380852, -0.002065674, -0.000136537 ],\ 58 | [ 0.002010422, 0.030415784, 0.211749204, 0.678352526, 1.000000000, 0.678352526, 0.211749204, 0.030415784, 0.002010422 ],\ 59 | [ -0.000136537, -0.002065674, -0.014380852, -0.046070008, -0.067914552, -0.046070008, -0.014380852, -0.002065674, -0.000136537 ],\ 60 | [ -0.000350433, -0.005301717, -0.036909595, -0.118242318, -0.174308068, -0.118242318, -0.036909595, -0.005301717, -0.000350433 ],\ 61 | [ 1.28181E-05, 0.000193926, 0.001350077, 0.004325061, 0.006375831, 0.004325061, 0.001350077, 0.000193926, 1.28181E-05 ],\ 62 | [ 1.85212E-06, 2.80209E-05, 0.000195076, 0.000624940, 0.000921261, 0.000624940, 0.000195076, 2.80209E-05, 1.85212E-06 ] 63 | ] 64 | GaborKernel_135 = [\ 65 | [ -1.01551E-06, -9.04408E-06, 0.000132863, 0.000744712, 3.79931E-05, -0.001028923, -0.000279806, 2.2532E-05, 4.0418E-06 ],\ 66 | [ -9.04408E-06, 0.000288732, 0.003516954, 0.000389916, -0.022947700, -0.013561362, 0.002373205, 0.00092512, 2.2532E-05 ],\ 67 | [ 0.000132863, 0.003516954, 0.000847346, -0.108372072, -0.139178011, 0.052928748, 0.044837725, 0.002373205, -0.000279806 ],\ 68 | [ 0.000744712, 0.000389916, -0.108372072, -0.302454279, 0.249959607, 0.46016215, 0.052928748, -0.013561362, -0.001028923 ],\ 69 | [ 3.79931E-05, -0.022947700, -0.139178011, 0.249959607, 1.000000000, 0.249959607, -0.139178011, -0.0229477, 3.79931E-05 ],\ 70 | [ -0.001028923, -0.013561362, 0.052928748, 0.460162150, 0.249959607, -0.302454279, -0.108372072, 0.000389916, 0.000744712 ],\ 71 | [ -0.000279806, 0.002373205, 0.044837725, 0.052928748, -0.139178011, -0.108372072, 0.000847346, 0.003516954, 0.000132863 ],\ 72 | [ 2.25320E-05, 0.000925120, 0.002373205, -0.013561362, -0.022947700, 0.000389916, 0.003516954, 0.000288732, -9.04408E-06 ],\ 73 | [ 4.04180E-06, 2.25320E-05, -0.000279806, -0.001028923, 3.79931E-05 , 0.000744712, 0.000132863, -9.04408E-06, -1.01551E-06 ]\ 74 | ] 75 | -------------------------------------------------------------------------------- /test.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akisatok/pySaliencyMap/b7aee8af04c77375fc8c286801a641efae7cdee4/test.jpg -------------------------------------------------------------------------------- /test2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akisatok/pySaliencyMap/b7aee8af04c77375fc8c286801a641efae7cdee4/test2.png -------------------------------------------------------------------------------- /test3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akisatok/pySaliencyMap/b7aee8af04c77375fc8c286801a641efae7cdee4/test3.jpg -------------------------------------------------------------------------------- /test3_gt.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/akisatok/pySaliencyMap/b7aee8af04c77375fc8c286801a641efae7cdee4/test3_gt.jpg --------------------------------------------------------------------------------