├── images
├── cat_dog.png
├── doberman.png
├── cat_dog_viz.png
└── doberman_viz.png
├── utils.py
├── integrated_gradients.py
├── README.md
├── visual_backprop.py
├── saliency.py
└── guided_backprop.py
/images/cat_dog.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/experiencor/deep-viz-keras/HEAD/images/cat_dog.png
--------------------------------------------------------------------------------
/images/doberman.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/experiencor/deep-viz-keras/HEAD/images/doberman.png
--------------------------------------------------------------------------------
/images/cat_dog_viz.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/experiencor/deep-viz-keras/HEAD/images/cat_dog_viz.png
--------------------------------------------------------------------------------
/images/doberman_viz.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/experiencor/deep-viz-keras/HEAD/images/doberman_viz.png
--------------------------------------------------------------------------------
/utils.py:
--------------------------------------------------------------------------------
1 | def show_image(image, grayscale = True, ax=None, title=''):
2 | if ax is None:
3 | plt.figure()
4 | plt.axis('off')
5 |
6 | if len(image.shape) == 2 or grayscale == True:
7 | if len(image.shape) == 3:
8 | image = np.sum(np.abs(image), axis=2)
9 |
10 | vmax = np.percentile(image, 99)
11 | vmin = np.min(image)
12 |
13 | plt.imshow(image, cmap=plt.cm.gray, vmin=vmin, vmax=vmax)
14 | plt.title(title)
15 | else:
16 | image = image + 127.5
17 | image = image.astype('uint8')
18 |
19 | plt.imshow(image)
20 | plt.title(title)
21 |
22 | def load_image(file_path):
23 | im = PIL.Image.open(file_path)
24 | im = np.asarray(im)
25 |
26 | return im - 127.5
--------------------------------------------------------------------------------
/integrated_gradients.py:
--------------------------------------------------------------------------------
1 | # Copyright 2017 Google Inc. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Utilities to compute an IntegratedGradients SaliencyMask."""
16 |
17 | import numpy as np
18 | from saliency import GradientSaliency
19 |
20 | class IntegratedGradients(GradientSaliency):
21 | """A SaliencyMask class that implements the integrated gradients method.
22 |
23 | https://arxiv.org/abs/1703.01365
24 | """
25 |
26 | def GetMask(self, input_image, input_baseline=None, nsamples=100):
27 | """Returns a integrated gradients mask."""
28 | if input_baseline == None:
29 | input_baseline = np.zeros_like(input_image)
30 |
31 | assert input_baseline.shape == input_image.shape
32 |
33 | input_diff = input_image - input_baseline
34 |
35 | total_gradients = np.zeros_like(input_image)
36 |
37 | for alpha in np.linspace(0, 1, nsamples):
38 | input_step = input_baseline + alpha * input_diff
39 | total_gradients += super(IntegratedGradients, self).get_mast(input_step)
40 |
41 | return total_gradients * input_diff
42 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | This repository contains the implementations in Keras of various methods to understand the prediction by a Convolutional Neural Networks. Implemented methods are:
3 |
4 | * Vanila gradient [https://arxiv.org/abs/1312.6034]
5 | * Guided backprop [https://arxiv.org/abs/1412.6806]
6 | * Integrated gradient [https://arxiv.org/abs/1703.01365]
7 | * Visual backprop [https://arxiv.org/abs/1611.05418]
8 |
9 | Each of them is accompanied with the corresponding smoothgrad version [https://arxiv.org/abs/1706.03825], which improves on any baseline method by adding random noise.
10 |
11 | Courtesy of https://github.com/tensorflow/saliency and https://github.com/mbojarski/VisualBackProp.
12 |
13 | # Examples
14 |
15 | * Dog
16 |
17 |
18 |
19 | * Dog and Cat
20 |
21 |
22 |
23 |
24 | # Usage
25 |
26 | cd deep-viz-keras
27 |
28 | ```python
29 | from guided_backprop import GuidedBackprop
30 | from utils import *
31 | from keras.applications.vgg16 import VGG16
32 |
33 | # Load the pretrained VGG16 model and make the guided backprop operator
34 | vgg16_model = VGG16(weights='imagenet')
35 | vgg16_model.compile(loss='categorical_crossentropy', optimizer='adam')
36 | guided_bprop = GuidedBackprop(vgg16_model)
37 |
38 | # Load the image and compute the guided gradient
39 | image = load_image('/path/to/image')
40 | mask = guided_bprop.get_mask(image) # compute the gradients
41 | show_image(mask) # display the grayscaled mask
42 | ```
43 |
44 | The examples.ipynb contains the demos of all implemented methods using the built-in VGG16 model of Keras.
45 |
46 | # Notes
47 |
48 | + To compute gradient of any output w.r.t. any input https://github.com/experiencor/deep-viz-keras/issues/5#issuecomment-376452683.
49 |
--------------------------------------------------------------------------------
/visual_backprop.py:
--------------------------------------------------------------------------------
1 | from saliency import SaliencyMask
2 | import numpy as np
3 | import keras.backend as K
4 | from keras.layers import Input, Conv2DTranspose
5 | from keras.models import Model
6 | from keras.initializers import Ones, Zeros
7 |
8 | class VisualBackprop(SaliencyMask):
9 | """A SaliencyMask class that computes saliency masks with VisualBackprop (https://arxiv.org/abs/1611.05418).
10 | """
11 |
12 | def __init__(self, model, output_index=0):
13 | """Constructs a VisualProp SaliencyMask."""
14 | inps = [model.input, K.learning_phase()] # input placeholder
15 | outs = [layer.output for layer in model.layers] # all layer outputs
16 | self.forward_pass = K.function(inps, outs) # evaluation function
17 |
18 | self.model = model
19 |
20 | def get_mask(self, input_image):
21 | """Returns a VisualBackprop mask."""
22 | x_value = np.expand_dims(input_image, axis=0)
23 |
24 | visual_bpr = None
25 | layer_outs = self.forward_pass([x_value, 0])
26 |
27 | for i in range(len(self.model.layers)-1, -1, -1):
28 | if 'Conv2D' in str(type(self.model.layers[i])):
29 | layer = np.mean(layer_outs[i], axis=3, keepdims=True)
30 | layer = layer - np.min(layer)
31 | layer = layer/(np.max(layer)-np.min(layer)+1e-6)
32 |
33 | if visual_bpr is not None:
34 | if visual_bpr.shape != layer.shape:
35 | visual_bpr = self._deconv(visual_bpr)
36 | visual_bpr = visual_bpr * layer
37 | else:
38 | visual_bpr = layer
39 |
40 | return visual_bpr[0]
41 |
42 | def _deconv(self, feature_map):
43 | """The deconvolution operation to upsample the average feature map downstream"""
44 | x = Input(shape=(None, None, 1))
45 | y = Conv2DTranspose(filters=1,
46 | kernel_size=(3,3),
47 | strides=(2,2),
48 | padding='same',
49 | kernel_initializer=Ones(),
50 | bias_initializer=Zeros())(x)
51 |
52 | deconv_model = Model(inputs=[x], outputs=[y])
53 |
54 | inps = [deconv_model.input, K.learning_phase()] # input placeholder
55 | outs = [deconv_model.layers[-1].output] # output placeholder
56 | deconv_func = K.function(inps, outs) # evaluation function
57 |
58 | return deconv_func([feature_map, 0])[0]
--------------------------------------------------------------------------------
/saliency.py:
--------------------------------------------------------------------------------
1 | # Copyright 2017 Google Inc. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Utilities to compute SaliencyMasks."""
16 | import numpy as np
17 | import keras.backend as K
18 |
19 | class SaliencyMask(object):
20 | """Base class for saliency masks. Alone, this class doesn't do anything."""
21 | def __init__(self, model, output_index=0):
22 | """Constructs a SaliencyMask.
23 |
24 | Args:
25 | model: the keras model used to make prediction
26 | output_index: the index of the node in the last layer to take derivative on
27 | """
28 | pass
29 |
30 | def get_mask(self, input_image):
31 | """Returns an unsmoothed mask.
32 |
33 | Args:
34 | input_image: input image with shape (H, W, 3).
35 | """
36 | pass
37 |
38 | def get_smoothed_mask(self, input_image, stdev_spread=.2, nsamples=50):
39 | """Returns a mask that is smoothed with the SmoothGrad method.
40 |
41 | Args:
42 | input_image: input image with shape (H, W, 3).
43 | """
44 | stdev = stdev_spread * (np.max(input_image) - np.min(input_image))
45 |
46 | total_gradients = np.zeros_like(input_image)
47 | for i in range(nsamples):
48 | noise = np.random.normal(0, stdev, input_image.shape)
49 | x_value_plus_noise = input_image + noise
50 |
51 | total_gradients += self.get_mask(x_value_plus_noise)
52 |
53 | return total_gradients / nsamples
54 |
55 | class GradientSaliency(SaliencyMask):
56 | r"""A SaliencyMask class that computes saliency masks with a gradient."""
57 |
58 | def __init__(self, model, output_index=0):
59 | # Define the function to compute the gradient
60 | input_tensors = [model.input, # placeholder for input image tensor
61 | K.learning_phase(), # placeholder for mode (train or test) tense
62 | ]
63 | gradients = model.optimizer.get_gradients(model.output[0][output_index], model.input)
64 | self.compute_gradients = K.function(inputs=input_tensors, outputs=gradients)
65 |
66 | def get_mask(self, input_image):
67 | """Returns a vanilla gradient mask.
68 |
69 | Args:
70 | input_image: input image with shape (H, W, 3).
71 | """
72 |
73 | # Execute the function to compute the gradient
74 | x_value = np.expand_dims(input_image, axis=0)
75 | gradients = self.compute_gradients([x_value, 0])[0][0]
76 |
77 | return gradients
78 |
--------------------------------------------------------------------------------
/guided_backprop.py:
--------------------------------------------------------------------------------
1 | # Copyright 2017 Google Inc. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Utilites to computed GuidedBackprop SaliencyMasks"""
16 |
17 | from saliency import SaliencyMask
18 | import numpy as np
19 | import tensorflow as tf
20 | import keras.backend as K
21 | from keras.models import load_model
22 |
23 | class GuidedBackprop(SaliencyMask):
24 | """A SaliencyMask class that computes saliency masks with GuidedBackProp.
25 |
26 | This implementation copies the TensorFlow graph to a new graph with the ReLU
27 | gradient overwritten as in the paper:
28 | https://arxiv.org/abs/1412.6806
29 | """
30 |
31 | GuidedReluRegistered = False
32 |
33 | def __init__(self, model, output_index=0, custom_loss=None):
34 | """Constructs a GuidedBackprop SaliencyMask."""
35 |
36 | if GuidedBackprop.GuidedReluRegistered is False:
37 | @tf.RegisterGradient("GuidedRelu")
38 | def _GuidedReluGrad(op, grad):
39 | gate_g = tf.cast(grad > 0, "float32")
40 | gate_y = tf.cast(op.outputs[0] > 0, "float32")
41 | return gate_y * gate_g * grad
42 | GuidedBackprop.GuidedReluRegistered = True
43 |
44 | """
45 | Create a dummy session to set the learning phase to 0 (test mode in keras) without
46 | inteferring with the session in the original keras model. This is a workaround
47 | for the problem that tf.gradients returns error with keras models that contains
48 | Dropout or BatchNormalization.
49 |
50 | Basic Idea: save keras model => create new keras model with learning phase set to 0 => save
51 | the tensorflow graph => create new tensorflow graph with ReLU replaced by GuiededReLU.
52 | """
53 | model.save('/tmp/gb_keras.h5')
54 | with tf.Graph().as_default():
55 | with tf.Session().as_default():
56 | K.set_learning_phase(0)
57 | load_model('/tmp/gb_keras.h5', custom_objects={"custom_loss":custom_loss})
58 | session = K.get_session()
59 | tf.train.export_meta_graph()
60 |
61 | saver = tf.train.Saver()
62 | saver.save(session, '/tmp/guided_backprop_ckpt')
63 |
64 | self.guided_graph = tf.Graph()
65 | with self.guided_graph.as_default():
66 | self.guided_sess = tf.Session(graph = self.guided_graph)
67 |
68 | with self.guided_graph.gradient_override_map({'Relu': 'GuidedRelu'}):
69 | saver = tf.train.import_meta_graph('/tmp/guided_backprop_ckpt.meta')
70 | saver.restore(self.guided_sess, '/tmp/guided_backprop_ckpt')
71 |
72 | self.imported_y = self.guided_graph.get_tensor_by_name(model.output.name)[0][output_index]
73 | self.imported_x = self.guided_graph.get_tensor_by_name(model.input.name)
74 |
75 | self.guided_grads_node = tf.gradients(self.imported_y, self.imported_x)
76 |
77 | def get_mask(self, input_image):
78 | """Returns a GuidedBackprop mask."""
79 | x_value = np.expand_dims(input_image, axis=0)
80 | guided_feed_dict = {}
81 | guided_feed_dict[self.imported_x] = x_value
82 |
83 | gradients = self.guided_sess.run(self.guided_grads_node, feed_dict = guided_feed_dict)[0][0]
84 |
85 | return gradients
--------------------------------------------------------------------------------