Skip to content

Instantly share code, notes, and snippets.

@ImadDabbura
Created September 20, 2018 15:27
Show Gist options
  • Save ImadDabbura/363e134327e9dae0ed2906681fed4d28 to your computer and use it in GitHub Desktop.
Save ImadDabbura/363e134327e9dae0ed2906681fed4d28 to your computer and use it in GitHub Desktop.
def model(X, Y, layers_dims, learning_rate=0.01, num_iterations=1000,
print_cost=True, hidden_layers_activation_fn="relu",
initialization_method="he"):
np.random.seed(1)
# initialize cost list
cost_list = []
# initialize parameters
if initialization_method == "zeros":
parameters = initialize_parameters_zeros(layers_dims)
elif initialization_method == "random":
parameters = initialize_parameters_random(layers_dims)
else:
parameters = initialize_parameters_he_xavier(
layers_dims, initialization_method)
# iterate over num_iterations
for i in range(num_iterations):
# iterate over L-layers to get the final output and the cache
AL, caches = L_model_forward(
X, parameters, hidden_layers_activation_fn)
# compute cost to plot it
cost = compute_cost(AL, Y)
# iterate over L-layers backward to get gradients
grads = L_model_backward(AL, Y, caches, hidden_layers_activation_fn)
# update parameters
parameters = update_parameters(parameters, grads, learning_rate)
# append each 100th cost to the cost list
if (i + 1) % 100 == 0 and print_cost:
print("The cost after {} iterations is: {}".format(i + 1, cost))
if i % 100 == 0:
cost_list.append(cost)
# plot the cost curve
plt.figure(figsize=(12, 8))
plt.plot(cost_list)
plt.xlabel("Iterations (per hundreds)", fontsize=14)
plt.ylabel("Cost", fontsize=14)
plt.title(
"Cost curve: learning rate = {} and {} initialization method".format(
learning_rate, initialization_method), y=1.05, fontsize=16)
return parameters
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment