Skip to content

Instantly share code, notes, and snippets.

@97k
Created March 19, 2019 12:04
Show Gist options
  • Save 97k/04f3763ebd930495e34927d149525342 to your computer and use it in GitHub Desktop.
Save 97k/04f3763ebd930495e34927d149525342 to your computer and use it in GitHub Desktop.
Forward Propagation
def forward_prop_this_layer(self, A_prev, W_curr, b_curr, activation_function):
z_curr = np.dot(W_curr, A_prev) + b_curr
if activation_function is 'relu':
activation = relu
elif activation_function is 'sigmoid':
activation = sigmoid
else:
raise Exception(f"{activation_function} is currently not supported, Only sigmoid, relu are supported")
return activation(z_curr), z_curr
def forward(self, X):
cache = {}
A_current = X
for layer_id_prev, layer in enumerate(self.architecture):
current_layer_id = layer_id_prev+1
A_previous = A_current
activation = layer['activation']
W_curr = self.params['W'+str(current_layer_id)]
b_curr = self.params['b'+str(current_layer_id)]
A_current, Z_curr = forward_prop_this_layer(A_previous, W_curr,
b_curr, activation)
cache['A'+str(layer_id_prev)] = A_previous
cache['Z'+str(current_layer_id)] = Z_curr
return A_current, cache
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment