Skip to content

Instantly share code, notes, and snippets.

@Jeraldy
Last active July 25, 2019 14:29
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Jeraldy/b5bb83ed10df20834c75bab6b963bebd to your computer and use it in GitHub Desktop.
Save Jeraldy/b5bb83ed10df20834c75bab6b963bebd to your computer and use it in GitHub Desktop.
"""
Jeraldy Deus | deusjeraldy@gmail.com
Implementing an Artificial Neural Network in numpy
BSD License
"""
import numpy as np
X = np.array([
[0,0],
[0,1],
[1,0],
[1,1]
])
Y = np.array([
[0],
[1],
[1],
[0]
])
m = X.shape[0]
num_nodes = 400
W1 = np.random.randn(num_nodes,X.shape[1])
b1 = np.zeros((num_nodes,1))
W2 = np.random.randn(1,num_nodes)
b2 = np.zeros((1,X.shape[0]))
X = X.T
Y = Y.T
costs = []
for i in range(4000):
# Foward Prop
# LAYER 1
Z1 = np.dot(W1,X) + b1
A1 = 1/(1+np.exp(-Z1))
# LAYER 2
Z2 = np.dot(W2,A1) + b2
A2 = 1/(1+np.exp(-Z2))
# Back Prop
dZ2 = A2 - Y
dW2 = (1/m)*np.dot(dZ2,A1.T)
db2 = (1/m)*np.sum(dZ2,axis=1,keepdims=True)
dZ1 = np.multiply(np.dot(W2.T, dZ2), 1 - np.power(A1, 2))
dW1 = (1/m)*np.dot(dZ1,X.T)
db1 = (1/m)*np.sum(dZ1,axis=1,keepdims=True)
# Gradient Descent
W2 = W2 - 0.01*dW2
b2 = b2 - 0.01*db2
W1 = W1 - 0.01*dW1
b1 = b1 - 0.01*db1
# Loss
L = (-1/m)*np.sum(Y*np.log(A2) + (1-Y)*np.log(1-A2))
L = np.squeeze(L)
if i%500 == 0:
print("=======================================")
print("Loss = ",L)
print("Predictions == ",A2)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment