Skip to content

Instantly share code, notes, and snippets.

View miloharper's full-sized avatar

Milo Spencer-Harper miloharper

View GitHub Profile
import math
import random
import pickle
import os
class NeuralNetwork():
def __init__(self):
# Seed the random number generator, so we get the same random numbers each time
random.seed(1)
@miloharper
miloharper / matrices-training-method.py
Created August 17, 2015 05:42
Training the neural network using matrices.
# We train the neural network through a process of trial and error.
# Adjusting the synaptic weights each time.
def train(self, training_set_inputs, training_set_outputs, number_of_training_iterations):
for iteration in xrange(number_of_training_iterations):
# Pass the training set through our neural network
output_from_layer_1, output_from_layer_2 = self.think(training_set_inputs)
# Calculate the error for layer 2 (The difference between the desired output
# and the predicted output).
layer2_error = training_set_outputs - output_from_layer_2
@miloharper
miloharper / network-train.py
Created August 17, 2015 03:51
Snippet of code which shows how the neural network trains.
def train(self, example):
self.reset_errors()
error = example.output - self.think(example.inputs)
self.layers[-1].neurons[0].error = error
for l in range(len(self.layers) - 1, 0, -1):
for neuron in self.layers[l].neurons:
self.layers[l - 1] = neuron.train(self.layers[l - 1])
@miloharper
miloharper / train.py
Created August 17, 2015 03:46
Snippet of code showing the training function for a Neuron.
def train(self, previous_layer):
for synapse in self.synapses:
# Propagate the error back down the synapse to the neuron in the layer below
previous_layer.neurons[synapse.input_neuron_index].error += self.error * sigmoid_derivative(self.output) * synapse.weight
# Adjust the synapse weight
synapse.weight += synapse.signal * self.error * sigmoid_derivative(self.output)
return previous_layer
@miloharper
miloharper / main.py
Created August 11, 2015 12:19
Generates a video of a neural network learning
from neural_network import NeuralNetwork
from formulae import calculate_average_error
from video import generate_writer, new_frame, annotate_frame, take_still
import parameters
class TrainingExample():
def __init__(self, inputs, output):
self.inputs = inputs
self.output = output
@miloharper
miloharper / console_output.txt
Last active August 29, 2015 14:25
The console output from a multi-layer neural network.
Stage 1) Random starting synaptic weights:
Layer 1 (4 neurons, each with 3 inputs):
[[-0.16595599 0.44064899 -0.99977125 -0.39533485]
[-0.70648822 -0.81532281 -0.62747958 -0.30887855]
[-0.20646505 0.07763347 -0.16161097 0.370439 ]]
Layer 2 (1 neuron, with 4 inputs):
[[-0.5910955 ]
[ 0.75623487]
[-0.94522481]
[ 0.34093502]]
@miloharper
miloharper / main.py
Last active May 30, 2022 23:54
A two layer neural network written in Python, which trains itself to solve a variation of the XOR problem.
from numpy import exp, array, random, dot
class NeuronLayer():
def __init__(self, number_of_neurons, number_of_inputs_per_neuron):
self.synaptic_weights = 2 * random.random((number_of_inputs_per_neuron, number_of_neurons)) - 1
class NeuralNetwork():
def __init__(self, layer1, layer2):
@miloharper
miloharper / short_version.py
Created July 20, 2015 15:57
A neural network in 9 lines of Python code.
from numpy import exp, array, random, dot
training_set_inputs = array([[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1]])
training_set_outputs = array([[0, 1, 1, 0]]).T
random.seed(1)
synaptic_weights = 2 * random.random((3, 1)) - 1
for iteration in xrange(10000):
output = 1 / (1 + exp(-(dot(training_set_inputs, synaptic_weights))))
synaptic_weights += dot(training_set_inputs.T, (training_set_outputs - output) * output * (1 - output))
print 1 / (1 + exp(-(dot(array([1, 0, 0]), synaptic_weights))))
@miloharper
miloharper / training_set.py
Created July 20, 2015 12:49
How to express the training set as a matrix in Python.
training_set_inputs = array([[0, 0, 1], [1, 1, 1], [1, 0, 1], [0, 1, 1]])
training_set_outputs = array([[0, 1, 1, 0]]).T
@miloharper
miloharper / main.py
Created July 20, 2015 12:21
A simple neural network written in Python.
from numpy import exp, array, random, dot
class NeuralNetwork():
def __init__(self):
# Seed the random number generator, so it generates the same numbers
# every time the program runs.
random.seed(1)
# We model a single neuron, with 3 input connections and 1 output connection.