Skip to content

Instantly share code, notes, and snippets.

@oarriaga
Forked from falcondai/guided_relu.py
Created June 20, 2017 14:46
Show Gist options
  • Save oarriaga/fb432335deed27ba3f667895ccc85140 to your computer and use it in GitHub Desktop.
Save oarriaga/fb432335deed27ba3f667895ccc85140 to your computer and use it in GitHub Desktop.
Tensorflow implementation of guided backpropagation through ReLU
import tensorflow as tf
from tensorflow.python.framework import ops
from tensorflow.python.ops import gen_nn_ops
@ops.RegisterGradient("GuidedRelu")
def _GuidedReluGrad(op, grad):
return tf.select(0. < grad, gen_nn_ops._relu_grad(grad, op.outputs[0]), tf.zeros(grad.get_shape()))
if __name__ == '__main__':
with tf.Session() as sess:
g = tf.get_default_graph()
x = tf.constant([10., 2.])
with g.gradient_override_map({'Relu': 'GuidedRelu'}):
y = tf.nn.relu(x)
z = tf.reduce_sum(-y ** 2)
tf.initialize_all_variables().run()
print x.eval(), y.eval(), z.eval(), tf.gradients(z, x)[0].eval()
# > [ 10. 2.] [ 10. 2.] -104.0 [ 0. 0.]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment