Skip to content

Instantly share code, notes, and snippets.

@muhammadgaffar
Created October 24, 2018 15:41
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save muhammadgaffar/77f9f5b3b16dac0d85fd9aaeb8b74350 to your computer and use it in GitHub Desktop.
Save muhammadgaffar/77f9f5b3b16dac0d85fd9aaeb8b74350 to your computer and use it in GitHub Desktop.
def gradient_check_n(parameters, gradients, X, Y, epsilon = 1e-7):
parameters_values, _ = dictionary_to_vector(parameters)
grad = gradients_to_vector(gradients)
num_parameters = parameters_values.shape[0]
J_plus = np.zeros((num_parameters, 1))
J_minus = np.zeros((num_parameters, 1))
gradapprox = np.zeros((num_parameters, 1))
# Compute gradapprox
for i in range(num_parameters):
thetaplus = np.copy(parameters_values)
thetaplus[i][0] = thetaplus[i][0] + epsilon
AL, caches = L_model_forward(X,vector_to_dictionary(thetaplus,parameters))
J_plus[i] = compute_cost(AL, Y)
thetaminus = np.copy(parameters_values)
thetaminus[i][0] = thetaminus[i][0] - epsilon
AL, caches = L_model_forward(X,vector_to_dictionary(thetaminus,parameters))
J_minus[i] = compute_cost(AL,Y)
gradapprox[i] = (J_plus[i]-J_minus[i])/(2*epsilon)
numerator = np.linalg.norm(gradapprox-grad)
denominator = np.linalg.norm(gradapprox)+np.linalg.norm(grad)
difference = numerator/denominator
if difference > 2e-7:
print ("\033[93m" + "There is a mistake in the backward propagation! difference = " + str(difference) + "\033[0m")
else:
print ("\033[92m" + "Your backward propagation works perfectly fine! difference = " + str(difference) + "\033[0m")
return difference
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment