Skip to content

Instantly share code, notes, and snippets.

@imenurok
Last active November 28, 2019 05:14
Show Gist options
  • Save imenurok/d8078f8fbc9f7270ddc4efa01f7d45b1 to your computer and use it in GitHub Desktop.
Save imenurok/d8078f8fbc9f7270ddc4efa01f7d45b1 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import chainer
import chainer.functions as F
import chainer.links as L
import numpy as np
class FRN(chainer.Chain):
def __init__(self, in_c):
self.eps=1e-6
super(FRN, self).__init__(
_gamma=L.Parameter(np.ones((1,in_c,1,1),dtype=np.float32)),
_beta=L.Parameter(np.zeros((1,in_c,1,1),dtype=np.float32)),
_eps=L.Parameter(np.zeros((1,1,1,1),dtype=np.float32)),
)
def __call__(self, x):
nyu2 = F.mean(x*x,axis=(2,3),keepdims=True)
_eps = F.broadcast_to(self.eps+F.absolute(self._eps()),nyu2.shape)
x_hat = x/F.broadcast_to(F.sqrt(nyu2+_eps),x.shape)
_gamma = F.broadcast_to(self._gamma(),x_hat.shape)
_beta = F.broadcast_to(self._beta(),x_hat.shape)
h = _gamma*x_hat+_beta
return h
class TLU(chainer.Chain):
def __init__(self, in_c):
super(TLU, self).__init__(
_tau=L.Parameter(np.zeros((1,in_c,1,1),dtype=np.float32)),
)
def __call__(self, x):
_tau = F.broadcast_to(self._tau(),x.shape)
h = F.relu(x-_tau)+_tau
return h
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment