Skip to content

Instantly share code, notes, and snippets.

@schlerp
Created September 21, 2017 05:46
Show Gist options
  • Save schlerp/35ffbf8209acdbf72d37d02d296860e3 to your computer and use it in GitHub Desktop.
Save schlerp/35ffbf8209acdbf72d37d02d296860e3 to your computer and use it in GitHub Desktop.
using back propagation and genetic algorithm's to train a simple neural network :)
import numpy as np
import itertools
import random
# set up hyper params
pop_size = 10
learn_rate = 3
epochs = 500
train_epochs = 25
in_width = 2
l1_width = 10
l2_width = 10
out_width = 1
# set up data
X = np.array([[0.0,0.0],
[0.0,1.0],
[1.0,0.0],
[1.0,1.0],
[0.5,0.5],
[0.0,0.5],
[1.5,1.5],
[0.0,1.5]])
Y = np.array([[1],
[0],
[0],
[1],
[1],
[0],
[1],
[0]])
def sigmoid(x):
return np.array(1.0 / (1.0 + np.exp(-x)))
def sigmoid_dash(x):
return x*(1-x)
def feed_forward(x, weights):
return sigmoid(np.dot(x, weights))
def cost(Y, Y_pred):
m = len(Y)
return np.sum((1/m) * np.square(Y_pred - Y))
def random_w_init(in_width, width):
return np.array(np.random.normal(0, 1, size=[in_width, width]))
def init_pop(pop_size):
nets = []
for i in range(pop_size):
l1_w = random_w_init(in_width, l1_width)
l2_w = random_w_init(l1_width, l2_width)
out_w = random_w_init(l2_width, out_width)
nets.append(NN(X, Y, l1_w, l2_w, out_w))
return nets
def mutate(gene, multiplier=0.01):
mask = np.random.randint(0,2,size=gene.shape).astype(np.bool)
rand_gene = np.random.rand(*gene.shape)*(np.max(gene)*multiplier)
gene[mask] = rand_gene[mask]
return gene
def breed(parent1, parent2):
children = []
for _ in range(3):
child = []
for i in range(len(parent1)):
original_shape = parent1[i].shape
p1_flat = list(parent1[i].flatten())
p2_flat = list(parent2[i].flatten())
crossover1 = random.randint(1, len(p1_flat)-2)
crossover2 = random.randint(1, len(p1_flat)-2)
if crossover1 > crossover2:
crossover1, crossover2 = crossover2, crossover1
gene = p1_flat[:crossover1] + p2_flat[crossover1:crossover2] + p1_flat[crossover2:]
child.append(mutate(np.array(gene).reshape(original_shape)))
children.append(child)
return children
# the neural network class
class NN(object):
def __init__(self, X, Y, l1_w, l2_w, out_w):
self.X = X
self.Y = Y
self.l1_w = l1_w
self.l2_w = l2_w
self.out_w = out_w
def run(self, epochs=train_epochs):
for i in range(epochs):
# feed forward
self.l1 = feed_forward(self.X, self.l1_w)
self.l2 = feed_forward(self.l1, self.l2_w)
self.out = feed_forward(self.l2, self.out_w)
# calculate error
out_error = self.out - self.Y
out_delta = out_error * sigmoid_dash(self.out)
l2_error = out_delta.dot(self.out_w.T)
l2_delta = l2_error * sigmoid_dash(self.l2)
l1_error = l2_delta.dot(self.l2_w.T)
l1_delta = l1_error * sigmoid_dash(self.l1)
# adjust weights
self.out_w -= learn_rate * np.dot(self.l2.T, out_delta)
self.l2_w -= learn_rate * np.dot(self.l1.T, l2_delta)
self.l1_w -= learn_rate * np.dot(self.X.T, l1_delta)
self.cost = cost(self.Y, self.out)
return (self.cost, self.get_genes())
def get_genes(self):
return (self.l1_w, self.l2_w, self.out_w)
if __name__ == '__main__':
# create networks
nets = init_pop(pop_size)
best_net = [1000000, ([0], [0], [0])]
for epoch in range(epochs):
# run them
results = []
for net in nets:
results.append(net.run())
#print(net.cost)
# get the best 3 for breeding
results.sort(key=lambda x: x[0])
parents = [x[1] for x in results[0:3]]
# store best net
if best_net[0] > results[0][0]:
best_net = results[0]
# breed/mutate
nets = []
for breed_pair in itertools.combinations(parents, 2):
children = breed(*breed_pair)
for child in children:
nets.append(NN(X, Y, *child))
print("end of epoch {}".format(epoch))
print("Best net so far:")
print(best_net)
final_NN = NN(X, Y, *best_net[1])
final_NN.run(100)
print(Y)
print("Vs.")
print(final_NN.out)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment