Skip to content

Instantly share code, notes, and snippets.

@davidmarek
Created June 20, 2012 13:01
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save davidmarek/2959786 to your computer and use it in GitHub Desktop.
Save davidmarek/2959786 to your computer and use it in GitHub Desktop.
Gradient checking
import numpy as np
def check_grad(f, fprime, x0):
eps = 1e-5
approx = np.zeros(len(x0))
for i in xrange(len(x0)):
x0_ = x0.copy()
x0_[i] += eps
approx[i] = (f(x0_) - f(x0)) / eps
return np.linalg.norm(approx.ravel() - fprime(x0).ravel())
def test_backprop_manually():
X = np.asarray([[0, 1]])
y = np.asarray([[1]])
weights = np.asarray([0.3, 0.7, 0.6])
def function(w):
w_h = w[:2]
w_o = w[2:]
o = np.tanh(np.dot(w_o, np.tanh(np.dot(w_h, X.T)).T))
loss = 0.5 * (y - o) ** 2
return loss
def gradient(w):
w_h = w[:2]
w_o = w[2:]
h = np.tanh(np.dot(w_h, X.T))
o = np.tanh(np.dot(w_o, h.T))
d_o = (y - o) * (-o * o + 1)
dwo = np.dot(h.T, d_o)
d_h = np.dot(d_o, w_o.T) * (-h * h + 1)
dwh = np.dot(X.T, d_h)
out = np.append(dwh, dwo)
return out
print check_grad(function, gradient, weights)
if __name__ == '__main__':
test_backprop_manually()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment