Skip to content

Instantly share code, notes, and snippets.

@Zamony
Created August 3, 2018 11:02
Show Gist options
  • Save Zamony/c13d6d83aeaf0a8ed549a4695ff92ccf to your computer and use it in GitHub Desktop.
Save Zamony/c13d6d83aeaf0a8ed549a4695ff92ccf to your computer and use it in GitHub Desktop.
Python neural network implementation for the XOR problem
import pprint
import collections
import random
import math
class NeuralNetwork:
class NMath:
@staticmethod
def mulMatrix(a, b):
m, n, p = len(a), len(a[0]), len(b[0])
if n != len(b): raise ValueError("Cannot multiply these matrixes")
matrix = [ [0 for _ in range(p)] for _ in range(m) ]
for i in range(m):
for k in range(p):
for j in range(n):
matrix[i][k] += a[i][j]*b[j][k]
return matrix
@staticmethod
def sigmoid(x, deriv=False):
if not deriv:
return 1/(1 + math.exp(-x))
return NMath.sigmoid(x) * (1 - NMath.sigmoid(x))
@staticmethod
def applyToMatrix(func, matrix):
return [ [func(elem) for elem in row] for row in matrix ]
@staticmethod
def transposeMatrix(m):
return [ [ row[i] for row in m ] for i in range(len(m[0])) ]
def __init__(self, num_of_input=3, num_of_output=1, learn_rate=0.01):
assert num_of_input > 0 and learn_rate > 0 and num_of_output > 0
self.num_of_input = self.num_of_hidden = num_of_input
self.num_of_output = num_of_output
self.learn_rate = learn_rate
self.hidden = self.output = self.input = None
self.input_weights = [
[random.random() for _ in range(num_of_input)]
for _ in range(self.num_of_hidden)
]
self.output_weights = [
[ random.random() for _ in range(num_of_output) ]
for _ in range(self.num_of_hidden)
]
def forward(self, input_vals):
self.input_vals = input_vals
self.hidden = dict()
self.hidden["net"] = self.NMath.mulMatrix(
input_vals, self.input_weights
)
self.hidden["out"] = self.NMath.applyToMatrix(
self.NMath.sigmoid, self.hidden["net"]
)
self.output = dict()
self.output["net"] = self.NMath.mulMatrix(
self.hidden["out"], self.output_weights
)
self.output["out"] = self.NMath.applyToMatrix(
self.NMath.sigmoid, self.output["net"]
)
return self.output["out"][0][0]
def backward(self, target):
new_output_weights = self.output_weights
new_input_weights = self.input_weights
output_deltas = [0]*len(target)
for i in range(len(new_output_weights)):
for j in range(len(new_output_weights[i])):
out_j = self.output["out"][0][j]
output_deltas[j] = - ( target[j] - out_j ) * out_j * (1 - out_j)
new_output_weights[i][j] -= (
self.learn_rate * output_deltas[j] * self.hidden["out"][0][i]
)
hidden_deltas = self.NMath.mulMatrix(
[output_deltas], self.NMath.transposeMatrix(self.output_weights)
)
for i in range(len(new_input_weights)):
for j in range(len(new_input_weights[i])):
out_j = self.hidden["out"][0][j]
hidden_delta = hidden_deltas[0][j] * out_j * (1 - out_j)
new_input_weights[i][j] -= (
self.learn_rate * hidden_delta * self.input_vals[0][i]
)
self.output_weights = new_output_weights
self.input_weights = new_input_weights
def train(self, epochs, sets):
for _ in range(epochs):
for train_set in sets:
self.forward([train_set["input"]])
self.backward(train_set["target"])
if __name__ == "__main__":
sets = [
{"input":[0, 0], "target":[0]},
{"input":[0, 1], "target":[1]},
{"input":[1, 0], "target":[1]},
{"input":[1, 1], "target":[0]}
]
nn = NeuralNetwork( num_of_input=2, num_of_output=1, learn_rate=0.7 )
nn.train(1000000, sets)
print(nn.forward([[0, 0]]))
print(nn.forward([[0, 1]]))
print(nn.forward([[1, 0]]))
print(nn.forward([[1, 1]]))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment