Skip to content

Instantly share code, notes, and snippets.

@srikarplus
Created October 9, 2018 01:53
Show Gist options
  • Save srikarplus/8ded43dc79736c559dced7dbdff5ecbc to your computer and use it in GitHub Desktop.
Save srikarplus/8ded43dc79736c559dced7dbdff5ecbc to your computer and use it in GitHub Desktop.
gradient checking
def checkGradient(nn_initial_params,nn_backprop_Params,input_layer_size, hidden_layer_size, num_labels,myX,myy,mylambda=0.):
myeps = 0.0001
flattened = nn_initial_params
flattenedDs = nn_backprop_Params
n_elems = len(flattened)
#Pick ten random elements, compute numerical gradient, compare to respective D's
for i in range(10):
x = int(np.random.rand()*n_elems)
epsvec = np.zeros((n_elems,1))
epsvec[x] = myeps
cost_high = nnCostFunc(flattened + epsvec.flatten(),input_layer_size, hidden_layer_size, num_labels,myX,myy,mylambda)
cost_low = nnCostFunc(flattened - epsvec.flatten(),input_layer_size, hidden_layer_size, num_labels,myX,myy,mylambda)
mygrad = (cost_high - cost_low) / float(2*myeps)
print("Element: {0}. Numerical Gradient = {1:.9f}. BackProp Gradient = {2:.9f}.".format(x,mygrad,flattenedDs[x]))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment