Skip to content

Instantly share code, notes, and snippets.

View ImadDabbura's full-sized avatar
🎯
Focusing

Imad Dabbura ImadDabbura

🎯
Focusing
View GitHub Profile
def model(X, Y, layers_dims, learning_rate=0.01, num_iterations=1000,
print_cost=True, hidden_layers_activation_fn="relu",
initialization_method="he"):
np.random.seed(1)
# initialize cost list
cost_list = []
# initialize parameters
if initialization_method == "zeros":
def initialize_parameters_zeros(layers_dims):
np.random.seed(1)
parameters = {}
L = len(layers_dims)
for l in range(1, L):
parameters["W" + str(l)] = np.zeros(
(layers_dims[l], layers_dims[l - 1]))
parameters["b" + str(l)] = np.zeros((layers_dims[l], 1))
# Loading packages
import sys
import h5py
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
sys.path.append("../scripts/")
from coding_neural_network_from_scratch import (L_model_forward,
def model_with_regularization(
X, y, layers_dims, learning_rate=0.01, num_epochs=3000,
print_cost=False, hidden_layers_activation_fn="relu", lambd=0):
# get number of examples
m = X.shape[1]
# to get consistents output
np.random.seed(1)
# initialize parameters
def linear_backword_reg(dZ, cache, lambd=0):
A_prev, W, b = cache
m = A_prev.shape[1]
dW = (1 / m) * np.dot(dZ, A_prev.T) + (lambd / m) * W
db = (1 / m) * np.sum(dZ, axis=1, keepdims=True)
dA_prev = np.dot(W.T, dZ)
assert (dA_prev.shape == A_prev.shape)
assert (dW.shape == W.shape)
def compute_cost_reg(AL, y, parameters, lambd=0):
# number of examples
m = y.shape[1]
# compute traditional cross entropy cost
cross_entropy_cost = compute_cost(AL, y)
# convert parameters dictionary to vector
parameters_vector = dictionary_to_vector(parameters)
# Loading packages
import sys
import h5py
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
sys.path.append("../scripts/")
from coding_neural_network_from_scratch import (initialize_parameters,
# Define the multi-layer model using all the helper functions we wrote before
def L_layer_model(
X, y, layers_dims, learning_rate=0.01, num_iterations=3000,
print_cost=True, hidden_layers_activation_fn="relu"):
np.random.seed(1)
# initialize parameters
parameters = initialize_parameters(layers_dims)
# intialize cost list
# Import training dataset
train_dataset = h5py.File("../data/train_catvnoncat.h5")
X_train = np.array(train_dataset["train_set_x"])
y_train = np.array(train_dataset["train_set_y"])
test_dataset = h5py.File("../data/test_catvnoncat.h5")
X_test = np.array(test_dataset["test_set_x"])
y_test = np.array(test_dataset["test_set_y"])
# print the shape of input data and label vector
def update_parameters(parameters, grads, learning_rate):
L = len(parameters) // 2
for l in range(1, L + 1):
parameters["W" + str(l)] = parameters[
"W" + str(l)] - learning_rate * grads["dW" + str(l)]
parameters["b" + str(l)] = parameters[
"b" + str(l)] - learning_rate * grads["db" + str(l)]
return parameters