Skip to content

Instantly share code, notes, and snippets.

@edenau
Last active December 27, 2019 16:40
Show Gist options
  • Save edenau/375003e850a2f53241547c75a8ce1a40 to your computer and use it in GitHub Desktop.
Save edenau/375003e850a2f53241547c75a8ce1a40 to your computer and use it in GitHub Desktop.
def get_loss(y, y_hat, metric='mse'):
if metric == 'mse':
individual_loss = 0.5 * (y_hat - y) ** 2
return np.mean([np.linalg.norm(individual_loss[:,col], 2) for col in range(individual_loss.shape[1])])
else:
raise Exception('Loss metric is not defined.')
def get_dZ_from_loss(y, y_hat, metric):
if metric == 'mse':
return y_hat - y
else:
raise Exception('Loss metric is not defined.')
def get_dactivation(A, act_func):
if act_func == 'relu':
return np.maximum(np.sign(A), np.zeros(A.shape)) # 1 if backward input >0, 0 otherwise; then diaganolize
elif act_func == 'linear':
return np.ones(A.shape)
else:
raise Exception('Activation function is not defined.')
def backward_prop(y, y_hat, metric='mse', layers_dim=layers_dim, neural_net=neural_net, num_train_datum=num_train_datum):
for layer_index in range(len(layers_dim)-1,0,-1):
if layer_index+1 == len(layers_dim): # if output layer
dZ = get_dZ_from_loss(y, y_hat, metric)
else:
dZ = np.multiply(np.dot(neural_net[layer_index+1].W.T, dZ),
get_dactivation(neural_net[layer_index].A, neural_net[layer_index].activation))
dW = np.dot(dZ, neural_net[layer_index-1].A.T) / num_train_datum
db = np.sum(dZ, axis=1, keepdims=True) / num_train_datum
neural_net[layer_index].dW = dW
neural_net[layer_index].db = db
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment