Skip to content

Instantly share code, notes, and snippets.

@edenau
Last active December 27, 2019 16:41
Show Gist options
  • Save edenau/00ad12e1bc45db76c3e67d0e9ab69d15 to your computer and use it in GitHub Desktop.
Save edenau/00ad12e1bc45db76c3e67d0e9ab69d15 to your computer and use it in GitHub Desktop.
def activation(input_, act_func):
if act_func == 'relu':
return np.maximum(input_, np.zeros(input_.shape))
elif act_func == 'linear':
return input_
else:
raise Exception('Activation function is not defined.')
def forward_prop(input_vec, layers_dim=layers_dim, neural_net=neural_net):
neural_net[0].A = input_vec # Define A in input layer for for-loop convenience
for layer_index in range(1,len(layers_dim)): # W,b,Z,A are undefined in input layer
neural_net[layer_index].Z = np.add(np.dot(neural_net[layer_index].W, neural_net[layer_index-1].A), neural_net[layer_index].b)
neural_net[layer_index].A = activation(neural_net[layer_index].Z, neural_net[layer_index].activation)
return neural_net[layer_index].A
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment