Skip to content

Instantly share code, notes, and snippets.

@ImadDabbura
Created September 20, 2018 15:08
Show Gist options
  • Save ImadDabbura/e441fc0e34ab9d7b85f13598596700b2 to your computer and use it in GitHub Desktop.
Save ImadDabbura/e441fc0e34ab9d7b85f13598596700b2 to your computer and use it in GitHub Desktop.
# Define the multi-layer model using all the helper functions we wrote before
def L_layer_model(
X, y, layers_dims, learning_rate=0.01, num_iterations=3000,
print_cost=True, hidden_layers_activation_fn="relu"):
np.random.seed(1)
# initialize parameters
parameters = initialize_parameters(layers_dims)
# intialize cost list
cost_list = []
# iterate over num_iterations
for i in range(num_iterations):
# iterate over L-layers to get the final output and the cache
AL, caches = L_model_forward(
X, parameters, hidden_layers_activation_fn)
# compute cost to plot it
cost = compute_cost(AL, y)
# iterate over L-layers backward to get gradients
grads = L_model_backward(AL, y, caches, hidden_layers_activation_fn)
# update parameters
parameters = update_parameters(parameters, grads, learning_rate)
# append each 100th cost to the cost list
if (i + 1) % 100 == 0 and print_cost:
print(f"The cost after {i + 1} iterations is: {cost:.4f}")
if i % 100 == 0:
cost_list.append(cost)
# plot the cost curve
plt.figure(figsize=(10, 6))
plt.plot(cost_list)
plt.xlabel("Iterations (per hundreds)")
plt.ylabel("Loss")
plt.title(f"Loss curve for the learning rate = {learning_rate}")
return parameters
def accuracy(X, parameters, y, activation_fn="relu"):
probs, caches = L_model_forward(X, parameters, activation_fn)
labels = (probs >= 0.5) * 1
accuracy = np.mean(labels == y) * 100
return f"The accuracy rate is: {accuracy:.2f}%."
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment