Skip to content

Instantly share code, notes, and snippets.

@TeraBytesMemory
Last active August 18, 2020 04:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save TeraBytesMemory/3e80b0a2686aecd7b7ec3247fd50daf9 to your computer and use it in GitHub Desktop.
Save TeraBytesMemory/3e80b0a2686aecd7b7ec3247fd50daf9 to your computer and use it in GitHub Desktop.
[WIP]grad cam in keras
K = tf.keras.backend
class GradCam(object):
def __init__(self, model, loss=tf.keras.losses.CategoricalCrossentropy()):
self._model = model
self._loss_model_truth = tf.keras.layers.Input((model.layers[-1].output.shape[-1],))
self._loss_model = loss(self._loss_model_truth, model.layers[-1].output)
def run(self, X, target_class_vector, target_layer):
conv_output = model.get_layer(name=target_layer).output
grads = K.gradients(self._loss_model, conv_output)[0]
gradient_function = K.function([
self._model.layers[0].input,
self._loss_model_truth
], [conv_output, grads])
output, grads_val = gradient_function([X[np.newaxis, :], target_class_vector[np.newaxis, :]])
output, grads_val = output[0], grads_val[0]
gradient_weight = np.mean(grads_val, axis=(0, 1)) #GAP
score = output * gradient_weight
score = score * (score > 0).astype(np.float32) # relu
return score
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment