Skip to content

Instantly share code, notes, and snippets.

@muhammadgaffar
Created October 24, 2018 15:24
Show Gist options
  • Save muhammadgaffar/a9efce4eefeaacc46a7254ee6fadddb2 to your computer and use it in GitHub Desktop.
Save muhammadgaffar/a9efce4eefeaacc46a7254ee6fadddb2 to your computer and use it in GitHub Desktop.
def NN_model(X, Y, layers_dims, learning_rate = 0.0075, num_iterations = 3000, print_cost=False):
costs = [] # keep track of cost
parameters = initialize_parameters_deep(layers_dims)
# Loop (gradient descent)
for i in range(0, num_iterations):
# Forward propagation: [LINEAR -> RELU]*(L-1) -> LINEAR -> SIGMOID.
AL, caches = L_model_forward(X, parameters)
# Compute cost.
cost = compute_cost(AL, Y)
# Backward propagation.
grads = L_model_backward(AL, Y, caches)
# Update parameters.
parameters = update_parameters(parameters, grads, learning_rate)
# Print the cost every 100 training example
if print_cost and i % 100 == 0:
print ("Cost after iteration %i: %f" %(i, cost))
if print_cost and i % 100 == 0:
costs.append(cost)
# plot the cost
plt.plot(np.squeeze(costs))
plt.ylabel('cost')
plt.xlabel('iterations (per tens)')
plt.title("Learning rate =" + str(learning_rate))
plt.show()
return parameters
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment