Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Simple Neural Network in Python which learns AND gate
import numpy as np
def sigmoid(x, derivative=False):
return x*(1-x) if derivative else 1/(1+np.exp(-x))
class NeuralNetwork:
def __init__(self, x, y):
self.input = x
self.weights1 = np.random.rand(self.input.shape[1],4)
self.weights2 = np.random.rand(4,1)
self.y = y
self.output = np.zeros(self.y.shape)
def feedforward(self):
self.layer1 = sigmoid(np.dot(self.input, self.weights1))
self.output = sigmoid(np.dot(self.layer1, self.weights2))
def backprop(self):
# application of the chain rule to find derivative of the loss function with respect to weights2 and weights1
d_weights2 = np.dot(self.layer1.T, (2*(self.y - self.output) * sigmoid(self.output, derivative=True)))
d_weights1 = np.dot(self.input.T, (np.dot(2*(self.y - self.output) * sigmoid(self.output, derivative=True), self.weights2.T) * sigmoid(self.layer1, derivative=True)))
# update the weights with the derivative (slope) of the loss function
self.weights1 += d_weights1
self.weights2 += d_weights2
if __name__ == "__main__":
X = np.array([[0,0],
[0,1],
[1,0],
[1,1]])
y = np.array([[0],[0],[0],[1]])
nn = NeuralNetwork(X,y)
for i in range(1500):
nn.feedforward()
nn.backprop()
print(nn.output)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.