Created
January 10, 2018 03:53
-
-
Save alecGraves/86e4a95d1bfc743d8e579a99a274dfce to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
def prelu(x, scope, decoder=False): | |
''' | |
Performs the parametric relu operation. This implementation is based on: | |
https://stackoverflow.com/questions/39975676/how-to-implement-prelu-activation-in-tensorflow | |
For the decoder portion, prelu becomes just a normal prelu | |
INPUTS: | |
- x(Tensor): a 4D Tensor that undergoes prelu | |
- scope(str): the string to name your prelu operation's alpha variable. | |
- decoder(bool): if True, prelu becomes a normal relu. | |
OUTPUTS: | |
- pos + neg / x (Tensor): gives prelu output only during training; otherwise, just return x. | |
''' | |
#If decoder, then perform relu and just return the output | |
if decoder: | |
return tf.nn.relu(x, name=scope) | |
alpha= tf.expand_dims(tf.get_variable(scope + 'alpha', x.get_shape()[-1], | |
initializer=tf.constant_initializer(0.0), | |
dtype=tf.float32), axis=0) | |
pos = tf.nn.relu(x) | |
# neg = alpha * (x - abs(x)) * 0.5 | |
# neg = alpha* tf.cast(tf.greater(0.0, x), tf.float32) * x | |
# movidius list unsupported: tf. abs, log, pow, greater, elemwise division (realdiv), reciprocal, gather_nd, | |
# squared = x*x | |
# sqrted = squared*.25 + 1 | |
# for i in range(10): | |
# sqrted = 0.5*(sqrted + tf.div(squared,sqrted)) | |
# neg = alpha * (x-sqrted)*.5 | |
neg = alpha * tf.nn.relu(tf.constant(-1.0)*x) | |
return pos + neg |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment