Skip to content

Instantly share code, notes, and snippets.

@valgur
Created April 15, 2019 14:23
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save valgur/b2208e0ba738a9f9432bfa6502b95bad to your computer and use it in GitHub Desktop.
Save valgur/b2208e0ba738a9f9432bfa6502b95bad to your computer and use it in GitHub Desktop.
--- src/leakyrelu.cc (revision 830d5ef7a5b1f33de2aa95405787c03e4de77306)
+++ src/leakyrelu.cc (date 1554811334000)
@@ -22,7 +22,7 @@
using namespace tensorflow;
-REGISTER_OP("LeakyRelu")
+REGISTER_OP("LeakyReluLmb")
.Attr("T: {float, double}")
.Attr("leak: float = 0.1")
.Input("input: T")
@@ -48,10 +48,10 @@
template <class T>
-class LeakyReluOp : public OpKernel
+class LeakyReluLmbOp : public OpKernel
{
public:
- explicit LeakyReluOp(OpKernelConstruction* construction)
+ explicit LeakyReluLmbOp(OpKernelConstruction* construction)
:OpKernel(construction)
{
float leak_tmp;
@@ -87,10 +87,10 @@
#define REG_KB(type) \
REGISTER_KERNEL_BUILDER( \
- Name("LeakyRelu") \
+ Name("LeakyReluLmb") \
.Device(DEVICE_CPU) \
.TypeConstraint<type>("T"), \
- LeakyReluOp<type>);
+ LeakyReluLmbOp<type>);
REG_KB(float)
REG_KB(double)
#undef REG_KB
@@ -98,7 +98,7 @@
-REGISTER_OP("LeakyReluGrad")
+REGISTER_OP("LeakyReluLmbGrad")
.Attr("T: {float, double}")
.Attr("leak: float")
.Input("gradients: T")
@@ -110,15 +110,15 @@
return Status::OK();
})
.Doc(R"doc(
-This computes the gradient for the op 'LeakyRelu'.
+This computes the gradient for the op 'LeakyReluLmb'.
)doc");
template <class T>
-class LeakyReluGradOp : public OpKernel
+class LeakyReluLmbGradOp : public OpKernel
{
public:
- explicit LeakyReluGradOp(OpKernelConstruction* construction)
+ explicit LeakyReluLmbGradOp(OpKernelConstruction* construction)
:OpKernel(construction)
{
float leak_tmp;
@@ -162,10 +162,10 @@
#define REG_KB(type) \
REGISTER_KERNEL_BUILDER( \
- Name("LeakyReluGrad") \
+ Name("LeakyReluLmbGrad") \
.Device(DEVICE_CPU) \
.TypeConstraint<type>("T"), \
- LeakyReluGradOp<type>);
+ LeakyReluLmbGradOp<type>);
REG_KB(float)
REG_KB(double)
#undef REG_KB
--- python/lmbspecialops.py (revision 830d5ef7a5b1f33de2aa95405787c03e4de77306)
+++ python/lmbspecialops.py (date 1554815516000)
@@ -35,7 +35,7 @@
depth_to_flow = lmbspecialopslib.depth_to_flow
depth_to_normals = lmbspecialopslib.depth_to_normals
flow_to_depth2 = lmbspecialopslib.flow_to_depth2
-leaky_relu = lmbspecialopslib.leaky_relu
+leaky_relu = lmbspecialopslib.leaky_relu_lmb
median3x3_downsample = lmbspecialopslib.median3x3_downsample
replace_nonfinite = lmbspecialopslib.replace_nonfinite
scale_invariant_gradient = lmbspecialopslib.scale_invariant_gradient
@@ -316,9 +316,9 @@
input=op.inputs[0])
-@ops.RegisterGradient("LeakyRelu")
-def _leaky_relu_grad(op, grad):
- return lmbspecialopslib.leaky_relu_grad(
+@ops.RegisterGradient("LeakyReluLmb")
+def _leaky_relu_lmb_grad(op, grad):
+ return lmbspecialopslib.leaky_relu_lmb_grad(
gradients=grad,
input=op.inputs[0],
leak=op.get_attr('leak'))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment