├── Dev ├── __init__.py ├── blend_factor_estimation.py ├── common.py ├── evaluation_script.py ├── image_watermark_decomposition.py ├── initial_estimation.py ├── matte_update.py ├── single_image_matting.py └── watermark_removal.py ├── README.md ├── Report.pdf └── images ├── 0imagenet_gt.png ├── 0imagenet_rc.png ├── 0imagenet_wm.png ├── 1fotolia_gt.png ├── 1fotolia_rc.png ├── 1fotolia_wm.png ├── 1imagenet_gt.png ├── 1imagenet_rc.png ├── 1imagenet_wm.png ├── 2fotolia_gt.png ├── 2fotolia_rc.png └── 2fotolia_wm.png /Dev/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Dev/blend_factor_estimation.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | 4 | 5 | def get_gradients(image, ksize): 6 | gx = cv2.Sobel(image, cv2.CV_64F, 1, 0, ksize=ksize) 7 | gy = cv2.Sobel(image, cv2.CV_64F, 0, 1, ksize=ksize) 8 | return np.array([gx, gy]) 9 | 10 | 11 | # This is to estimate "C" the image blend factor to help initialize alpha 12 | def blend_factor_estimation(images, poisson_est, matte): 13 | # Adapted from: 14 | # https://github.com/rohitrango/automatic-watermark-detection/blob/master/src/watermark_reconstruct.py 15 | # (free to use for academic/research purposes) 16 | # Changed to work for grayscale images 17 | 18 | num_images = images.shape[0] 19 | Jm = (images - poisson_est) 20 | gx_jm = np.zeros(images.shape) 21 | gy_jm = np.zeros(images.shape) 22 | 23 | for i in range(num_images): 24 | gx_jm[i], gy_jm[i] = get_gradients(Jm[i], ksize=1) 25 | 26 | Jm_grad = np.sqrt(np.add(np.square(gx_jm), np.square(gy_jm))) 27 | est_Ik = matte * np.median(images, axis=0) 28 | gx_estIk, gy_estIk = get_gradients(est_Ik, ksize=3) 29 | estIk_grad = np.sqrt(np.add(np.square(gx_estIk), np.square(gy_estIk))) 30 | 31 | C = np.sum(Jm_grad[:, :, :] * estIk_grad[:, :]) / np.sum(np.square(estIk_grad[:, :])) / num_images 32 | 33 | return C, est_Ik 34 | -------------------------------------------------------------------------------- /Dev/common.py: -------------------------------------------------------------------------------- 1 | from matplotlib import pyplot as plt 2 | 3 | DEBUG_MODE = True 4 | 5 | 6 | def debug(function, *args, **kwargs): 7 | if DEBUG_MODE: 8 | function(*args, **kwargs) 9 | 10 | 11 | def show_image(image): 12 | plt.imshow(image, cmap='gray') 13 | -------------------------------------------------------------------------------- /Dev/evaluation_script.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import cv2 4 | import numpy as np 5 | from skimage.measure import compare_ssim as ssim 6 | from skimage.measure import compare_psnr as psnr 7 | 8 | 9 | def load_image(image_path, color=cv2.IMREAD_GRAYSCALE): 10 | img = cv2.imread(image_path, color) 11 | return img 12 | 13 | 14 | def load_images(directory): 15 | image_paths = [os.path.join(directory, f) for f in os.listdir(directory)] 16 | images = np.array([load_image(i) for i in image_paths]) 17 | return images 18 | 19 | 20 | def calculate_PSNR(gt, rc): 21 | num_images = rc.shape[0] 22 | PSNR_values = np.zeros([num_images]) 23 | 24 | for i in range(num_images): 25 | PSNR = psnr(gt[i], rc[i]) 26 | PSNR_values[i] = PSNR 27 | return np.mean(PSNR_values) 28 | 29 | 30 | def calculate_DSSIM(gt, rc): 31 | num_images = rc.shape[0] 32 | DSSIM_values = np.zeros([num_images]) 33 | 34 | for i in range(num_images): 35 | DSSIM = 0.5 * (1 - ssim(gt[i], rc[i])) 36 | DSSIM_values[i] = DSSIM 37 | return np.mean(DSSIM_values) 38 | 39 | 40 | def parse_args(): 41 | parser = argparse.ArgumentParser() 42 | parser.add_argument("groundtruths", help="Path to a watermarked image dataset", type=str) 43 | parser.add_argument("reconstructed", help="Path to a watermarked image dataset", type=str) 44 | args = parser.parse_args() 45 | return args 46 | 47 | 48 | if __name__ == '__main__': 49 | args = parse_args() 50 | ground_truths = load_images(args.groundtruths) 51 | reconstructed_images = load_images(args.reconstructed) 52 | PSNR = calculate_PSNR(ground_truths, reconstructed_images) 53 | DSSIM = calculate_DSSIM(ground_truths, reconstructed_images) 54 | print('PSNR: \t{}\nDSSIM: \t{}'.format(PSNR, DSSIM)) 55 | -------------------------------------------------------------------------------- /Dev/image_watermark_decomposition.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import numpy.linalg 3 | from common import * 4 | import time 5 | import cv2 6 | 7 | 8 | # The "Derivative Operator" 9 | # Adapted from: 10 | # https://stackoverflow.com/questions/9567882/sobel-filter-kernel-of-large-size 11 | # Changed to allow even sized kernels to support the algorithm 12 | def derivative(shape, axis): 13 | """ 14 | shape must be odd: eg. (5,5) 15 | axis is the direction, with 0 to positive x and 1 to positive y 16 | """ 17 | # Closest Higher Odd 18 | odd_y = False 19 | odd_x = False 20 | if shape[0] % 2 == 0: 21 | odd_x = True 22 | shape = (int((shape[0] / 2.) + 0.1) * 2 + 1, shape[1]) 23 | if shape[1] % 2 == 0: 24 | odd_y = True 25 | shape = (shape[0], int((shape[1] / 2.) + 0.1) * 2 + 1) 26 | 27 | k = np.zeros(shape) 28 | 29 | p = [(j, i) for j in range(shape[0]) 30 | for i in range(shape[1]) 31 | if not (i == (shape[1] - 1) / 2. and j == (shape[0] - 1) / 2.)] 32 | 33 | for j, i in p: 34 | j_ = int(j - (shape[0] - 1) / 2.) 35 | i_ = int(i - (shape[1] - 1) / 2.) 36 | k[j, i] = (i_ if axis == 0 else j_) / float(i_ * i_ + j_ * j_) 37 | 38 | if odd_x: 39 | k = np.delete(k, int(k.shape[0] / 2), 0) 40 | 41 | if odd_y: 42 | k = np.delete(k, int(k.shape[1] / 2), 1) 43 | 44 | k = k.astype(np.float32) 45 | return k 46 | 47 | 48 | def _phi(s, epsilon=0.001): 49 | res = np.sqrt(s + epsilon ** 2) 50 | return res 51 | 52 | 53 | def _phi_prime(s, epsilon=0.001): 54 | res = 1 / (2 * _phi(s, epsilon)) 55 | return res 56 | 57 | 58 | def get_gradients(image): 59 | gx = cv2.Sobel(image, cv2.CV_64F, 1, 0, ksize=1) 60 | gy = cv2.Sobel(image, cv2.CV_64F, 0, 1, ksize=1) 61 | return np.array([gx, gy]) 62 | 63 | 64 | # Input: 65 | # J: Watermarked image 66 | # I: The estimated I specific to the image 67 | # W_k: The image specific estimated watermark 68 | # W: Estimated watermark that changes with itterations 69 | # W_m: The initial estimated watermark from the previous step 70 | # alpha: The estimated alpha matte of the image 71 | # lambda_I: [0, 1] 72 | # lambda_w: [0.001, 0.1] 73 | # lambda_alpha: 0.01 74 | # beta: [0.001, 0.01] 75 | # gamma: ?????? who knows!!!!!!! 76 | # Output: 77 | # M and b where M * [W, I] = b for irls optimization 78 | def _decomp(J, I_k, W_k, W, W_m, alpha, D_x, D_y, lambda_I=1, lambda_w=0.005, lambda_alpha=0.01, beta=1, gamma=1): 79 | # From Supplementary Material 80 | # The original image estimate is the watermarked image minus the watermark 81 | I_kx, I_ky = get_gradients(I_k) 82 | W_x, W_y = get_gradients(W) 83 | W_kx, W_ky = get_gradients(W_k) 84 | alpha_x, alpha_y = get_gradients(alpha) 85 | abs_alpha_x = np.absolute(alpha_x) 86 | abs_alpha_y = np.absolute(alpha_y) 87 | alpha_diag = np.diag(alpha.ravel()) 88 | alpha_diag_bar = 1 - alpha_diag 89 | # Error between The actual watermarked image and the estimated watermarked image 90 | phi_prime_data = np.diag(_phi_prime((alpha * W_k + (1 - alpha) * I_k - J) ** 2).ravel()) 91 | 92 | phi_prime_W = np.diag(_phi_prime( ( abs_alpha_x * W_kx + abs_alpha_y * W_ky )**2 ).ravel() ) 93 | phi_prime_I = np.diag(_phi_prime( ( abs_alpha_x * I_kx + abs_alpha_y * I_ky )**2 ).ravel() ) 94 | 95 | # Error between the initial watermark guess and the current guess, used to keep them "close" 96 | # Here there is a mistake in the supplementary material, brackets 97 | phi_prime_f = np.diag( 98 | _phi_prime(numpy.linalg.norm(get_gradients(alpha * W_k) - get_gradients(W_m), axis=0) ** 2).ravel()) 99 | 100 | phi_prime_aux = np.diag(_phi_prime((W_k - W) ** 2).reshape(-1)) 101 | phi_prime_rI = np.diag(_phi_prime(abs_alpha_x * I_kx ** 2 + abs_alpha_y * I_ky ** 2).ravel()) 102 | phi_prime_rW = np.diag(_phi_prime(abs_alpha_x * W_x ** 2 + abs_alpha_y * W_y ** 2).ravel()) 103 | 104 | c_x = np.diag(abs_alpha_x.ravel()) 105 | c_y = np.diag(abs_alpha_y.ravel()) 106 | 107 | L_f = D_y.T.dot(phi_prime_f).dot(D_y) + D_x.T.dot(phi_prime_f).dot(D_x) 108 | L_I = D_x.T.dot(c_x * phi_prime_rI).dot(D_x) + D_y.T.dot(c_y * phi_prime_rI).dot(D_y) 109 | L_w = D_x.T.dot(c_x * phi_prime_rW).dot(D_x) + D_y.T.dot(c_y * phi_prime_rW).dot(D_y) 110 | A_f = alpha_diag.T.dot(L_f * alpha_diag) + gamma * phi_prime_aux 111 | 112 | b_w = alpha_diag.T.dot(phi_prime_data).dot(J.ravel()) + beta * L_f.dot(W_m.ravel()) + gamma * phi_prime_aux.dot( 113 | W.ravel()) 114 | b_I = alpha_diag_bar.T.dot(phi_prime_data).dot(J.ravel()) 115 | 116 | b = np.vstack((b_w, b_I)).reshape(W.shape[0] ** 2 * 2, 1) 117 | 118 | upper_left = (alpha_diag ** 2).dot(phi_prime_data) + lambda_w * L_w + beta * A_f 119 | upper_right = alpha_diag.dot(alpha_diag_bar).dot(phi_prime_data) # Bottleneck 120 | bottom_left = upper_right 121 | bottom_right = (alpha_diag_bar ** 2).dot(phi_prime_data) + lambda_I * L_I 122 | 123 | M = np.vstack((np.hstack((upper_left, upper_right)), np.hstack((bottom_left, bottom_right)))) 124 | 125 | return M, b 126 | 127 | 128 | # Input: 129 | # J: Image with the watermark 130 | # I_k: The previously estimated alpha 131 | # W_k: The previously estimated watermark 132 | # W_hat: Original estimate 133 | # alpha: Global alpha estimate 134 | # Output: 135 | # W: The watermark estimate 136 | # I: The image estimate estimate 137 | def image_watermark_decomposition(J, I_k, W_k, W, W_hat, alpha, D_x, D_y): 138 | debug(print, "Starting Image-Watermark Decomposition...") 139 | t1 = time.time() 140 | M, b = _decomp(J, I_k, W_k, W, W_hat, alpha, D_x, D_y) 141 | debug(print, "Decomposition Time: {}".format(time.time() - t1)) 142 | 143 | t1 = time.time() 144 | x, res, rank, s = numpy.linalg.lstsq(M, b, rcond=None) 145 | debug(print, "Least Square Regressor Time: {}".format(time.time() - t1)) 146 | 147 | W = x[:J.shape[0] * J.shape[1]] 148 | I = x[J.shape[0] * J.shape[1]:] 149 | W = W.reshape(J.shape) 150 | I = I.reshape(J.shape) 151 | debug(cv2.imwrite, "test_I.png", I) 152 | debug(cv2.imwrite, "test_W.png", W) 153 | return W, I 154 | 155 | 156 | if __name__ == "__main__": 157 | pass 158 | -------------------------------------------------------------------------------- /Dev/initial_estimation.py: -------------------------------------------------------------------------------- 1 | from common import * 2 | import numpy as np 3 | import cv2 4 | 5 | BINARY_THRESH = 0.5 6 | CANNY_THRESH = 0.5 7 | MAX_UINT8_PIXEL = 255 8 | 9 | """ 10 | def _extract_bounding_box(watermark): 11 | white_pixels = np.where(watermark == MAX_UINT8_PIXEL) 12 | x1 = min(white_pixels[0]) 13 | x2 = max(white_pixels[0]) 14 | y1 = min(white_pixels[1]) 15 | y2 = max(white_pixels[1]) 16 | return (x1, y1, x2, y2) # Rectangle coordinates 17 | 18 | 19 | def _debug_print_bounding_box(images, bounding_box): 20 | for i, img in enumerate(images): 21 | image = img.copy() 22 | image[bounding_box[0]:bounding_box[2], bounding_box[1]] = MAX_UINT8_PIXEL 23 | image[bounding_box[0]:bounding_box[2], bounding_box[3]] = MAX_UINT8_PIXEL 24 | image[bounding_box[0], bounding_box[1]:bounmatte, alphading_box[3]] = MAX_UINT8_PIXEL 25 | image[bounding_box[2], bounding_box[1]:bounding_box[3]] = MAX_UINT8_PIXEL 26 | save_image("boundingBox_{}.png".format(i), image) 27 | """ 28 | 29 | 30 | def get_gradients(image): 31 | gx = cv2.Sobel(image, cv2.CV_64F, 1, 0, ksize=1) 32 | gy = cv2.Sobel(image, cv2.CV_64F, 0, 1, ksize=1) 33 | return gx, gy 34 | 35 | 36 | def poisson_reconstruction(gradx, grady, kernel_size=3, num_iters=5000, h=0.75): 37 | # Adapted from: 38 | # https://github.com/rohitrango/automatic-watermark-detection/blob/master/src/estimate_watermark.py 39 | # (free to use for academic/research purposes) 40 | # Changed to work for grayscale images 41 | fxx = cv2.Sobel(gradx, cv2.CV_64F, 1, 0, ksize=kernel_size) 42 | fyy = cv2.Sobel(grady, cv2.CV_64F, 0, 1, ksize=kernel_size) 43 | 44 | laplacian = fxx + fyy 45 | m, n = laplacian.shape 46 | 47 | est = np.zeros(laplacian.shape) 48 | 49 | est[1:-1, 1:-1] = np.random.random((m - 2, n - 2)) 50 | 51 | for i in range(num_iters): 52 | est[1:-1, 1:-1] = 0.25 * ( 53 | est[0:-2, 1:-1] + est[1:-1, 0:-2] + est[2:, 1:-1] + est[1:-1, 2:] - h * h * laplacian[1:-1, 1:-1]) 54 | 55 | return est 56 | 57 | 58 | def extract_watermark_outline(images, outline='binary'): 59 | # Get x and y gradients 60 | gradients_x = [0] * len(images) 61 | gradients_y = [0] * len(images) 62 | for i, img in enumerate(images): 63 | gradients_x[i], gradients_y[i] = get_gradients(img) 64 | 65 | # Get the absolute value of the gradients and combine 66 | wm_gradients_x = np.median(gradients_x, axis=0) 67 | wm_gradients_y = np.median(gradients_y, axis=0) 68 | wm_gradients_x_abs = np.absolute(wm_gradients_x) 69 | wm_gradients_y_abs = np.absolute(wm_gradients_y) 70 | watermark = np.sqrt(np.add(np.power(wm_gradients_x_abs, 2), np.power(wm_gradients_y_abs, 2))) 71 | 72 | # Normalize 73 | watermark = MAX_UINT8_PIXEL * (watermark - np.min(watermark)) / (np.max(watermark) - np.min(watermark)) 74 | watermark = watermark.astype(np.uint8) 75 | 76 | # Outline Detection 77 | if outline == 'canny': 78 | watermark = cv2.Canny(watermark, MAX_UINT8_PIXEL * CANNY_THRESH, MAX_UINT8_PIXEL * CANNY_THRESH) 79 | if outline == 'binary': 80 | _, watermark = cv2.threshold(watermark, BINARY_THRESH, 1, cv2.THRESH_BINARY) 81 | else: 82 | raise (ValueError("Outline argument invalid")) 83 | 84 | return watermark, wm_gradients_x, wm_gradients_y 85 | 86 | 87 | def initial_estimation(images): 88 | debug(print, "Starting initial estimation...") 89 | outline, gradx, grady = extract_watermark_outline(images) 90 | debug(print, "Starting poisson reconstruction...") 91 | W = poisson_reconstruction(gradx, grady) 92 | return W 93 | -------------------------------------------------------------------------------- /Dev/matte_update.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | from tqdm import tqdm 4 | from common import * 5 | 6 | 7 | def phi(s, epsilon=0.001): 8 | res = np.sqrt(s + epsilon ** 2) 9 | return res 10 | 11 | 12 | def phi_prime(s, epsilon=0.001): 13 | res = 1 / (2 * phi(s, epsilon)) 14 | return res 15 | 16 | 17 | def get_gradients(image): 18 | gx = cv2.Sobel(image, cv2.CV_64F, 1, 0, ksize=1) 19 | gy = cv2.Sobel(image, cv2.CV_64F, 0, 1, ksize=1) 20 | return np.array([gx, gy]) 21 | 22 | 23 | def matte_update(J, I_k, W_k, W_med, W_hat, alpha, D_x, D_y, lambda_alpha=0.01, beta=0.005): 24 | # From Supplementary Material 25 | debug(print, "Starting matte update...") 26 | alpha_x, alpha_y = get_gradients(alpha) 27 | alpha_diag = np.diag(alpha.ravel()) 28 | 29 | W_diag = np.diag(W_med.ravel()) 30 | 31 | for i in tqdm(range(J.shape[0])): 32 | alphaWk = alpha * W_k[i] 33 | alphaWk_gx, alphaWk_gy = get_gradients(alphaWk) 34 | phi_prime_f = np.diag(phi_prime( 35 | np.linalg.norm(get_gradients(alpha * W_k[i]) - get_gradients(W_hat), axis=0) ** 2).ravel()) # atten 36 | phi_k_a = np.diag( 37 | ((phi_prime(((alpha * W_k[i] + (1 - alpha) * I_k[i] - J[i]) ** 2))) * ((W_med - I_k[i]) ** 2)).ravel()) 38 | phi_k_b = (((phi_prime(((alpha * W_k[i] + (1 - alpha) * I_k[i] - J[i]) ** 2))) * ( 39 | (W_med - I_k[i]) * J[i] - I_k[i])).ravel()) 40 | phi_alpha = np.diag(phi_prime(alpha_x ** 2 + alpha_y ** 2).ravel()) 41 | L_alpha = D_x.T.dot(phi_alpha.dot(D_x)) + D_y.T.dot(phi_alpha.dot(D_y)) 42 | L_f = D_y.T.dot(phi_prime_f).dot(D_y) + D_x.T.dot(phi_prime_f).dot(D_x) 43 | A_f = W_diag.T.dot(L_f).dot(W_diag) 44 | 45 | if i == 0: 46 | A1 = phi_k_a + lambda_alpha * L_alpha + beta * A_f 47 | b1 = phi_k_b + beta * W_diag.dot(L_f).dot(W_hat.ravel()) 48 | else: 49 | A1 += (phi_k_a + lambda_alpha * L_alpha + beta * A_f) 50 | b1 += (phi_k_b + beta * W_diag.T.dot(L_f).dot(W_hat.ravel())) 51 | 52 | x, res, rank, s = np.linalg.lstsq(A1, b1, rcond=None) 53 | alpha = x.reshape(J.shape[1:]) 54 | 55 | return alpha 56 | -------------------------------------------------------------------------------- /Dev/single_image_matting.py: -------------------------------------------------------------------------------- 1 | """Implementation of Closed-Form Matting. 2 | This module implements natural image matting method described in: 3 | Levin, Anat, Dani Lischinski, and Yair Weiss. "A closed-form solution to natural image matting." 4 | IEEE Transactions on Pattern Analysis and Machine Intelligence 30.2 (2008): 228-242. 5 | The code can be used in two ways: 6 | 1. By importing solve_foregound_background in your code: 7 | ``` 8 | import closed_form_matting 9 | ... 10 | # For scribles input 11 | alpha = closed_form_matting.closed_form_matting_with_scribbles(image, scribbles) 12 | # For trimap input 13 | alpha = closed_form_matting.closed_form_matting_with_trimap(image, trimap) 14 | # For prior with confidence 15 | alpha = closed_form_matting.closed_form_matting_with_prior( 16 | image, prior, prior_confidence, optional_const_mask) 17 | # To get Matting Laplacian for image 18 | laplacian = compute_laplacian(image, optional_const_mask) 19 | ``` 20 | 2. From command line: 21 | ``` 22 | # Scribbles input 23 | ./closed_form_matting.py input_image.png -s scribbles_image.png -o output_alpha.png 24 | # Trimap input 25 | ./closed_form_matting.py input_image.png -t scribbles_image.png -o output_alpha.png 26 | # Add flag --solve-fg to compute foreground color and output RGBA image instead 27 | # of alpha. 28 | ``` 29 | """ 30 | 31 | from __future__ import division 32 | from numpy.lib.stride_tricks import as_strided 33 | import logging 34 | import cv2 35 | import numpy as np 36 | import scipy.sparse 37 | import scipy.sparse.linalg 38 | from common import * 39 | 40 | 41 | def _rolling_block(A, block=(3, 3)): 42 | """Applies sliding window to given matrix.""" 43 | shape = (A.shape[0] - block[0] + 1, A.shape[1] - block[1] + 1) + block 44 | strides = (A.strides[0], A.strides[1]) + A.strides 45 | return as_strided(A, shape=shape, strides=strides) 46 | 47 | 48 | def compute_laplacian(img, mask=None, eps=10**(-7), win_rad=1): 49 | """Computes Matting Laplacian for a given image. 50 | Args: 51 | img: 3-dim numpy matrix with input image 52 | mask: mask of pixels for which Laplacian will be computed. 53 | If not set Laplacian will be computed for all pixels. 54 | eps: regularization parameter controlling alpha smoothness 55 | from Eq. 12 of the original paper. Defaults to 1e-7. 56 | win_rad: radius of window used to build Matting Laplacian (i.e. 57 | radius of omega_k in Eq. 12). 58 | Returns: sparse matrix holding Matting Laplacian. 59 | """ 60 | 61 | win_size = (win_rad * 2 + 1) ** 2 62 | h, w, d = img.shape 63 | # Number of window centre indices in h, w axes 64 | c_h, c_w = h - 2 * win_rad, w - 2 * win_rad 65 | win_diam = win_rad * 2 + 1 66 | 67 | indsM = np.arange(h * w).reshape((h, w)) 68 | ravelImg = img.reshape(h * w, d) 69 | win_inds = _rolling_block(indsM, block=(win_diam, win_diam)) 70 | 71 | win_inds = win_inds.reshape(c_h, c_w, win_size) 72 | if mask is not None: 73 | mask = cv2.dilate( 74 | mask.astype(np.uint8), 75 | np.ones((win_diam, win_diam), np.uint8) 76 | ).astype(np.bool) 77 | win_mask = np.sum(mask.ravel()[win_inds], axis=2) 78 | win_inds = win_inds[win_mask > 0, :] 79 | else: 80 | win_inds = win_inds.reshape(-1, win_size) 81 | 82 | 83 | winI = ravelImg[win_inds] 84 | 85 | win_mu = np.mean(winI, axis=1, keepdims=True) 86 | win_var = np.einsum('...ji,...jk ->...ik', winI, winI) / win_size - np.einsum('...ji,...jk ->...ik', win_mu, win_mu) 87 | 88 | inv = np.linalg.inv(win_var + (eps/win_size)*np.eye(3)) 89 | 90 | X = np.einsum('...ij,...jk->...ik', winI - win_mu, inv) 91 | vals = np.eye(win_size) - (1.0/win_size)*(1 + np.einsum('...ij,...kj->...ik', X, winI - win_mu)) 92 | 93 | nz_indsCol = np.tile(win_inds, win_size).ravel() 94 | nz_indsRow = np.repeat(win_inds, win_size).ravel() 95 | nz_indsVal = vals.ravel() 96 | L = scipy.sparse.coo_matrix((nz_indsVal, (nz_indsRow, nz_indsCol)), shape=(h*w, h*w)) 97 | return L 98 | 99 | 100 | def closed_form_matting_with_prior(image, prior, prior_confidence, consts_map=None): 101 | """Applies closed form matting with prior alpha map to image. 102 | Args: 103 | image: 3-dim numpy matrix with input image. 104 | prior: matrix of same width and height as input image holding apriori alpha map. 105 | prior_confidence: matrix of the same shape as prior hodling confidence of prior alpha. 106 | consts_map: binary mask of pixels that aren't expected to change due to high 107 | prior confidence. 108 | Returns: 2-dim matrix holding computed alpha map. 109 | """ 110 | 111 | assert image.shape[:2] == prior.shape, ('prior must be 2D matrix with height and width equal ' 112 | 'to image.') 113 | assert image.shape[:2] == prior_confidence.shape, ('prior_confidence must be 2D matrix with ' 114 | 'height and width equal to image.') 115 | assert (consts_map is None) or image.shape[:2] == consts_map.shape, ( 116 | 'consts_map must be 2D matrix with height and width equal to image.') 117 | 118 | logging.info('Computing Matting Laplacian.') 119 | laplacian = compute_laplacian(image, ~consts_map if consts_map is not None else None) 120 | 121 | confidence = scipy.sparse.diags(prior_confidence.ravel()) 122 | logging.info('Solving for alpha.') 123 | solution = scipy.sparse.linalg.spsolve( 124 | laplacian + confidence, 125 | prior.ravel() * prior_confidence.ravel() 126 | ) 127 | alpha = np.minimum(np.maximum(solution.reshape(prior.shape), 0), 1) 128 | return alpha 129 | 130 | 131 | def closed_form_matting_with_scribbles(image, scribbles, scribbles_confidence=100.0): 132 | """Apply Closed-Form matting to given image using scribbles image.""" 133 | 134 | assert image.shape == scribbles.shape, 'scribbles must have exactly same shape as image.' 135 | prior = np.sign(np.sum(scribbles - image, axis=2)) / 2 + 0.5 136 | consts_map = prior != 0.5 137 | return closed_form_matting_with_prior( 138 | image, 139 | prior, 140 | scribbles_confidence * consts_map, 141 | consts_map 142 | ) 143 | 144 | ########### 145 | # OWN WORK 146 | ########### 147 | 148 | 149 | def single_image_matting(images, W): 150 | debug(print, "Starting single image matting...") 151 | W = (W * 255).astype(np.uint8) 152 | _, scribbles = cv2.threshold(W, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU) 153 | scribbles = (scribbles / 255).astype(np.float64) 154 | scribbles_3d = np.stack((scribbles, scribbles, scribbles), axis=-1) 155 | si_mattes = np.array([ 156 | closed_form_matting_with_scribbles(np.stack((images[k], images[k], images[k]), axis=-1), scribbles_3d) 157 | for k in range(images.shape[0]) 158 | ]) 159 | return np.median(si_mattes, axis=0) 160 | 161 | 162 | ########### 163 | # OWN WORK 164 | ########### -------------------------------------------------------------------------------- /Dev/watermark_removal.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import os 3 | import argparse 4 | import numpy as np 5 | from tqdm import tqdm 6 | 7 | import initial_estimation 8 | import single_image_matting 9 | import image_watermark_decomposition 10 | import blend_factor_estimation 11 | import matte_update 12 | 13 | from common import * 14 | from image_watermark_decomposition import derivative 15 | 16 | 17 | def load_image(image_path, color=cv2.IMREAD_GRAYSCALE): 18 | img = cv2.imread(image_path, color) 19 | return img 20 | 21 | 22 | def load_images(directory): 23 | image_paths = [os.path.join(directory, f) for f in os.listdir(directory)] 24 | images = np.array([load_image(i) for i in image_paths]) 25 | return images 26 | 27 | 28 | def save_image(image_path, image): 29 | cv2.imwrite(image_path, image) 30 | 31 | 32 | def show_image(image): 33 | # im_255 = 255 * (image - np.min(image)) / (np.max(image) - np.min(image)) 34 | plt.imshow(image, cmap="gray") 35 | plt.show() 36 | 37 | 38 | def normalize(image): 39 | return 1 * (image - np.min(image)) / (np.max(image) - np.min(image)) 40 | 41 | 42 | def optimization(W, alpha, images, itterations): 43 | W_hat = np.array(W) 44 | W_k = np.empty([images.shape[0], W.shape[0], W.shape[1]]) # Per-image Watermark estimation 45 | I_k = np.empty([images.shape[0], W.shape[0], W.shape[1]]) # Per-image Alpha estimation 46 | 47 | for i in range(W_k.shape[0]): 48 | W_k[i] = W.copy() 49 | I_k[i] = images[i] - W 50 | 51 | debug(print, "Computing derivative...") 52 | D_x = derivative(np.diag(alpha.ravel()).shape, 0) 53 | D_y = derivative(np.diag(alpha.ravel()).shape, 1) 54 | 55 | for itt in range(itterations): 56 | # 1. Image Watermark Decomposition 57 | for i in tqdm(range(images.shape[0]), desc="Itteration {}".format(itt + 1), leave=False): 58 | J = images[i] 59 | W_k[i], I_k[i] = image_watermark_decomposition.image_watermark_decomposition(J, I_k[i], W_k[i], W, W_hat, 60 | alpha, D_x, D_y) 61 | 62 | # 2. Watermark Update 63 | W = np.median(W_k, axis=0) 64 | 65 | # 3. Matte Update 66 | alpha = matte_update.matte_update(images, I_k, W_k, W, W_hat, alpha, D_x, D_y) 67 | return W, alpha 68 | 69 | 70 | def subtract_watermarks(path, images, watermark, alpha, before): 71 | if before: 72 | save_path = path + '/subtraction/before_optimization/' 73 | else: 74 | save_path = path + '/subtraction/after_optimization/' 75 | 76 | for i in range(images.shape[0]): 77 | save_image(save_path + "{0:05}.png".format(i), (images[i] - watermark * alpha) * 255) 78 | 79 | 80 | def invert_watermark(path, images, watermark, alpha, before): 81 | if before: 82 | save_path = path + '/invertion/before_optimization/' 83 | else: 84 | save_path = path + '/invertion/after_optimization/' 85 | 86 | for i in range(images.shape[0]): 87 | save_image(save_path + "{0:05}.png".format(i), ((images[i] - watermark * alpha) / (1 - alpha)) * 255) 88 | 89 | 90 | def remove_watermarks(images, itterations=1): 91 | # determine poisson estimate (initial watermark guess) 92 | poisson_est = initial_estimation.initial_estimation(images) 93 | poisson_est = normalize(poisson_est) 94 | 95 | # determine the matte (alpha) 96 | matte = single_image_matting.single_image_matting(images, poisson_est) 97 | # calculates blend factor (C) 98 | blend_factor, est_IK = blend_factor_estimation.blend_factor_estimation(images, poisson_est, matte) 99 | matte = matte * blend_factor 100 | 101 | # before optimization results for subtraction and invertion 102 | subtract_watermarks(save_path, images, poisson_est, matte, before=True) 103 | invert_watermark(save_path, images, poisson_est, matte, before=True) 104 | 105 | debug(show_image, matte) 106 | debug(show_image, poisson_est) 107 | 108 | images = images[0:50] 109 | watermark, alpha = optimization(matte, poisson_est, images, itterations) 110 | 111 | debug(show_image, watermark) 112 | debug(show_image, alpha) 113 | 114 | # after optimization results for subtraction and invertion 115 | subtract_watermarks(save_path, images, watermark, alpha, before=False) 116 | invert_watermark(save_path, images, watermark, alpha, before=False) 117 | 118 | 119 | def parse_args(): 120 | parser = argparse.ArgumentParser() 121 | parser.add_argument("dataset", help="Path to a watermarked image dataset", type=str) 122 | args = parser.parse_args() 123 | return args 124 | 125 | 126 | def mkdirs(dataset): 127 | dir_to_create = '../Results/' + dataset.split('/')[2] + '/' + dataset.split('/')[3] 128 | subdir1 = dir_to_create + '/subtraction/before_optimization' 129 | subdir2 = dir_to_create + '/subtraction/after_optimization' 130 | subdir3 = dir_to_create + '/invertion/before_optimization' 131 | subdir4 = dir_to_create + '/invertion/after_optimization' 132 | if not os.path.exists(subdir1): 133 | os.makedirs(subdir1) 134 | if not os.path.exists(subdir2): 135 | os.makedirs(subdir2) 136 | if not os.path.exists(subdir3): 137 | os.makedirs(subdir3) 138 | if not os.path.exists(subdir4): 139 | os.makedirs(subdir4) 140 | return dir_to_create 141 | 142 | 143 | if __name__ == "__main__": 144 | args = parse_args() 145 | save_path = mkdirs(args.dataset) 146 | images = load_images(args.dataset) 147 | images = normalize(images) 148 | remove_watermarks(images) 149 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Exploiting Watermark Consistency 2 | 3 | #### *Attempt* at replicating Dekel et. al: [On the Effectiveness of Visible Watermarks](http://openaccess.thecvf.com/content_cvpr_2017/papers/Dekel_On_the_Effectiveness_CVPR_2017_paper.pdf) 4 | 5 | ##### Contained is source code, and details of our implementation, as well as a thorough evaluation of the algorithm both quantitatively and qualitatively presented in an IEEE conference type format. While the algorithm does not perform as desired, it was a great project to work on and a great learning experience! 6 | 7 | ___ 8 | ### Sample Results 9 | 10 | Watermarked Images | Our Results | Expected Results 11 | :-------------------------:|:-------------------------:|:-------------------------: 12 | ![](./images/0imagenet_wm.png) | ![](./images/0imagenet_rc.png) | ![](./images/0imagenet_gt.png) 13 | ![](./images/1imagenet_wm.png) | ![](./images/1imagenet_rc.png) | ![](./images/1imagenet_gt.png) 14 | ![](./images/1fotolia_wm.png) | ![](./images/1fotolia_rc.png) | ![](./images/1fotolia_gt.png) 15 | ![](./images/2fotolia_wm.png) | ![](./images/2fotolia_rc.png) | ![](./images/2fotolia_gt.png) 16 | 17 | ___ 18 | ###### *please see code + report for citations of work we used during development :)* 19 | -------------------------------------------------------------------------------- /Report.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/Report.pdf -------------------------------------------------------------------------------- /images/0imagenet_gt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/0imagenet_gt.png -------------------------------------------------------------------------------- /images/0imagenet_rc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/0imagenet_rc.png -------------------------------------------------------------------------------- /images/0imagenet_wm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/0imagenet_wm.png -------------------------------------------------------------------------------- /images/1fotolia_gt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/1fotolia_gt.png -------------------------------------------------------------------------------- /images/1fotolia_rc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/1fotolia_rc.png -------------------------------------------------------------------------------- /images/1fotolia_wm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/1fotolia_wm.png -------------------------------------------------------------------------------- /images/1imagenet_gt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/1imagenet_gt.png -------------------------------------------------------------------------------- /images/1imagenet_rc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/1imagenet_rc.png -------------------------------------------------------------------------------- /images/1imagenet_wm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/1imagenet_wm.png -------------------------------------------------------------------------------- /images/2fotolia_gt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/2fotolia_gt.png -------------------------------------------------------------------------------- /images/2fotolia_rc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/2fotolia_rc.png -------------------------------------------------------------------------------- /images/2fotolia_wm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sukhdip-Sandhu/Automatic-Watermark-Removal/0b856161e48082c17976806a423b24d5ca2bd63f/images/2fotolia_wm.png --------------------------------------------------------------------------------