Last active
April 12, 2019 08:48
-
-
Save 97k/9b746acc6bb2a65cd660c07ee5db0e37 to your computer and use it in GitHub Desktop.
Backpropagation code for deep neural net
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def backprop_this_layer(self, da_curr, z_curr, W_curr, b_curr, A_prev, activation_function): | |
if activation_function is 'sigmoid': | |
activation_back = self.sigmoid_backward | |
elif activation_function is 'relu': | |
activation_back = self.relu_backward | |
else: | |
return | |
m = A_prev.shape[1] | |
dz_curr = activation_back(da_curr, z_curr) | |
dw_curr = np.dot(dz_curr, A_prev.T)/m | |
db_curr = np.sum(dz_curr, axis=1, keepdims=True)/m | |
da_prev = np.dot(W_curr.T, dz_curr) | |
return da_prev, dw_curr, db_curr | |
def backward(self, ytrue, ypred, cache): | |
grads = {} | |
m = ytrue.shape[1] | |
da_prev = np.divide(1-ytrue, 1-ypred) - np.divide(ytrue, ypred) | |
for prev_layer_id, layer in reversed(list(enumerate(self.architecture))): | |
layer_id = prev_layer_id + 1 | |
activation = layer['activation'] | |
da_curr = da_prev | |
A_prev = cache['A'+str(prev_layer_id)] | |
Z_curr = cache['Z'+str(layer_id)] | |
W_curr = self.params['W'+str(layer_id)] | |
b_curr = self.params['b'+str(layer_id)] | |
da_prev, dw_curr, db_curr = self.backprop_this_layer( | |
da_curr, Z_curr, W_curr, b_curr, A_prev, activation) | |
grads["dw"+str(layer_id)] = dw_curr | |
grads['db'+str(layer_id)] = db_curr | |
return grads | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment