Skip to content

Instantly share code, notes, and snippets.

@khanhnamle1994
Created May 21, 2018 03:29
Show Gist options
  • Save khanhnamle1994/a07d907f6725a3934c88788980fa12f9 to your computer and use it in GitHub Desktop.
Save khanhnamle1994/a07d907f6725a3934c88788980fa12f9 to your computer and use it in GitHub Desktop.
Implementation of RNN Layers
mulGate = MultiplyGate()
addGate = AddGate()
activation = Tanh()
class RNNLayer:
def forward(self, x, prev_s, U, W, V):
self.mulu = mulGate.forward(U, x)
self.mulw = mulGate.forward(W, prev_s)
self.add = addGate.forward(self.mulw, self.mulu)
self.s = activation.forward(self.add)
self.mulv = mulGate.forward(V, self.s)
def backward(self, x, prev_s, U, W, V, diff_s, dmulv):
self.forward(x, prev_s, U, W, V)
dV, dsv = mulGate.backward(V, self.s, dmulv)
ds = dsv + diff_s
dadd = activation.backward(self.add, ds)
dmulw, dmulu = addGate.backward(self.mulw, self.mulu, dadd)
dW, dprev_s = mulGate.backward(W, prev_s, dmulw)
dU, dx = mulGate.backward(U, x, dmulu)
return (dprev_s, dU, dW, dV)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment