Skip to content

Instantly share code, notes, and snippets.

@pgerbes1
Created September 15, 2015 20:06
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save pgerbes1/7abef6585d4e0e3a9118 to your computer and use it in GitHub Desktop.
Save pgerbes1/7abef6585d4e0e3a9118 to your computer and use it in GitHub Desktop.
Basic Neural Net For Number Recognition
import random
import math
import numpy as np
### Could also use tanh or any other smooth/differentiable function is this class
def sigmoid(t):
return 1 / (1 + math.exp(-t))
def neuron_output(weights, inputs):
return sigmoid(np.dot(weights, inputs))
def feed_forward(neural_network, input_vector):
outputs = []
for layer in neural_network:
input_with_bias = input_vector + [1]
output = [neuron_output(neuron, input_with_bias)
for neuron in layer]
outputs.append(output)
input_vector = output
return(outputs)
#### The key to neural nets. Without backprop we'll never get anywhere interesting.
def backpropagate(network,input_vector,targets):
hidden_outputs, outputs = feed_forward(network,input_vector)
output_deltas = [output * (1 - output) * (output - target)
for output, target in zip(outputs,targets)]
for i, output_neuron in enumerate(network[-1]):
for j, hidden_output in enumerate(hidden_outputs + [1]):
output_neuron[j] -= output_deltas[i] * hidden_output
hidden_deltas = [hidden_output * (1 - hidden_output) *
np.dot(output_deltas, [n[i] for n in output_layer])
for i, hidden_output in enumerate(hidden_outputs)]
for i, hidden_neuron in enumerate(network[0]):
for j, input in enumerate(input_vector + [1]):
hidden_neuron[j] -= hidden_deltas[i] * input
zero_digit = [1,1,1,1,1,
1,0,0,0,1,
1,0,0,0,1,
1,0,0,0,1,
1,1,1,1,1]
one_digit = [0,0,1,0,0,
0,0,1,0,0,
0,0,1,0,0,
0,0,1,0,0,
0,0,1,0,0]
two_digit = [1,1,1,1,1,
0,0,0,0,1,
1,1,1,1,1,
1,0,0,0,0,
1,1,1,1,1]
three_digit = [1,1,1,1,1,
0,0,0,0,1,
1,1,1,1,1,
0,0,0,0,1,
1,1,1,1,1]
inputs = [zero_digit,one_digit,two_digit,three_digit]
targets = [[1 if i == j else 0 for i in range(4)]
for j in range(4)]
random.seed(0)
input_size = 25
num_hidden = 5
output_size = 4
hidden_layer = [[random.random() for _ in range(input_size + 1)]
for _ in range(num_hidden)]
output_layer = [[random.random() for _ in range(num_hidden + 1)]
for _ in range(output_size)]
network = [hidden_layer,output_layer]
for _ in range(10000):
for input_vector, target_vector in zip(inputs,targets):
backpropagate(network,input_vector,target_vector)
def predict(input):
return feed_forward(network, input)[-1]
#### Make a "squishy one"
test_number = [0,1,1,0,0,
0,0,1,0,0,
0,0,1,0,0,
0,0,1,0,0,
0,0,1,0,0]
prediction = predict(test_number)
## Should hopefully predict "one_digit"
print(prediction)
####
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment