Skip to content

Instantly share code, notes, and snippets.

@Jalalx
Created October 20, 2019 19:16
Show Gist options
  • Save Jalalx/a9051b325f066faae6572dae1180baa1 to your computer and use it in GitHub Desktop.
Save Jalalx/a9051b325f066faae6572dae1180baa1 to your computer and use it in GitHub Desktop.
XOR Neural Network in python using MLP Back-Propagation
import numpy as np
def sigmoid(x):
return 1.0 / (1.0 + np.exp(-x))
def sigmoid_prime(x):
return x * (1.0 - x)
epochs = 5000
input_size, hidden_size, output_size = 2, 3, 1
learning_rate = 0.1
# Truth table
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
Y = np.array([[0], [1], [1], [0]])
# Fill hidden and output layers with random values.
w_hidden = np.random.uniform(size=(input_size, hidden_size))
w_output = np.random.uniform(size=(hidden_size, output_size))
# Learning iteration
for epoch in range(epochs):
# Forward propagation
actual_hidden = sigmoid(np.dot(X, w_hidden))
output = np.dot(actual_hidden, w_output)
# Calculate error (expected output - calculated output)
error = Y - output
# Backward Propagation
dZ = error * learning_rate
w_output += actual_hidden.T.dot(dZ)
dH = dZ.dot(w_output.T) * sigmoid_prime(actual_hidden)
w_hidden += X.T.dot(dH)
actual_hidden = sigmoid(np.dot([0, 0], w_hidden))
actual_output = np.dot(actual_hidden, w_output)
print('[0, 0]', actual_output)
actual_hidden = sigmoid(np.dot([0, 1], w_hidden))
actual_output = np.dot(actual_hidden, w_output)
print('[0, 1]', actual_output)
actual_hidden = sigmoid(np.dot([1, 0], w_hidden))
actual_output = np.dot(actual_hidden, w_output)
print('[1, 0]', actual_output)
actual_hidden = sigmoid(np.dot([1, 1], w_hidden))
actual_output = np.dot(actual_hidden, w_output)
print('[1, 1]', actual_output)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment