Skip to content

Instantly share code, notes, and snippets.

@cookiengineer
Last active January 4, 2017 17:22
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save cookiengineer/0a529c115f7cbeaa91934a1b7ae7ad0f to your computer and use it in GitHub Desktop.
Save cookiengineer/0a529c115f7cbeaa91934a1b7ae7ad0f to your computer and use it in GitHub Desktop.
Quick 'n dirty Backpropagated Neural Network implementation
const _LEARNING_RATE = 0.3;
const _LEARNING_MOMENTUM = 0.9;
const _random = function() {
return (Math.random() * 2) - 1;
};
const Brain = function(network) {
if (network === undefined) {
network = [6, [6, 6], 1];
}
this.layers = [];
// Generate the Input Layer
// - has 3 Neurons by default
// - Input Neurons have no weights
let input_layer = [];
for (let i = 0; i < network[0]; i++) {
input_layer.push({
bias: 1,
delta: 0,
value: _random(),
history: [],
weights: []
});
}
this.layers.push(input_layer);
// Generate Hidden Layers
// - have 2 Neurons by default
// - Neuron's weights represent previous layer's neurons
// - each and every Neuron is cross-connected
let prev_layer = input_layer;
for (let h = 0; h < network[1].length; h++) {
let hidden_layer = [];
for (let l = 0; l < network[1][h]; l++) {
let weights = new Array(prev_layer.length);
for (let w = 0, wl = weights.length; w < wl; w++) {
weights[w] = _random();
}
hidden_layer.push({
bias: 1,
delta: 0,
value: _random(),
history: new Array(weights.length).fill(0),
weights: weights
});
}
this.layers.push(hidden_layer);
prev_layer = hidden_layer;
}
// Generate Output Layer
// - has 1 Neuron by default
// - Move Up (> 0.5) or Move Down (<= 0.5), that is the question
// - each and every Neuron is cross-connected
let output_layer = [];
for (let o = 0; o < network[2]; o++) {
let weights = new Array(prev_layer.length);
for (let w = 0, wl = weights.length; w < wl; w++) {
weights[w] = _random();
}
output_layer.push({
bias: 1,
delta: 0,
value: _random(),
history: new Array(weights.length).fill(0),
weights: weights
});
}
this.layers.push(output_layer);
};
Brain.prototype = {
compute: function(inputs) {
let layers = this.layers;
let input_layer = layers[0];
for (let i = 0, il = inputs.length; i < il; i++) {
input_layer[i].value = inputs[i];
}
let prev_layer = layers[0];
for (let l = 1, ll = layers.length; l < ll; l++) {
let current_layer = layers[l];
for (let n = 0; n < current_layer.length; n++) {
let neuron = current_layer[n];
let value = 0;
// Calculate the processed value via weights
// - track the inputs from previous layer
// - so we save time in learn() method
for (let p = 0, pl = prev_layer.length; p < pl; p++) {
value += prev_layer[p].value * neuron.weights[p];
}
value += neuron.bias;
// Hardcoded Activation Function
neuron.value = (1 / (1 + Math.exp((-1 * value) / 1)));
}
prev_layer = layers[l];
}
let outputs = [];
let output_layer = layers[layers.length - 1];
for (let o = 0, ol = output_layer.length; o < ol; o++) {
outputs.push(output_layer[o].value);
}
return outputs;
},
learn: function(inputs, outputs) {
this.compute(inputs);
let layers = this.layers;
// Calculate Gradient for Output Layer
let output_layer = layers[layers.length - 1];
for (let o = 0, ol = output_layer.length; o < ol; o++) {
let neuron = output_layer[o];
let value = neuron.value;
let delta = value * (1 - value) * (outputs[o] - value);
neuron.delta = delta;
}
// Calculate Gradients for Hidden Layers and Input Layer
for (let l = layers.length - 2; l >= 0; l--) {
let current_layer = layers[l];
let next_layer = layers[l + 1];
for (let n = 0, nl = current_layer.length; n < nl; n++) {
let neuron = current_layer[n];
let value = neuron.value;
let error = 0.0;
for (let x = 0; x < next_layer.length; x++) {
let next_neuron = next_layer[x];
error += next_neuron.weights[n] * next_neuron.delta;
}
neuron.delta = value * (1 - value) * error;
}
}
// Calculate weights for Input Layer
let input_layer = layers[0];
for (let i = 0, il = input_layer.length; i < il; i++) {
let neuron = input_layer[i];
neuron.bias += _LEARNING_RATE * neuron.delta;
for (let w = 0, wl = neuron.weights.length; w < wl; w++) {
let delta = _LEARNING_RATE * neuron.delta * input[w];
neuron.weights += delta * _LEARNING_MOMENTUM * neuron.history[w];
neuron.history[w] = delta;
}
}
// Calculate weights for Hidden Layers and Output Layer
for (let l = 1, ll = layers.length; l < ll; l++) {
let current_layer = layers[l];
let prev_layer = layers[l - 1];
for (let n = 0, nl = current_layer.length; n < nl; n++) {
let neuron = current_layer[n];
neuron.bias += _LEARNING_RATE * neuron.delta;
for (let w = 0, wl = neuron.weights.length; w < wl; w++) {
let delta = _LEARNING_RATE * neuron.delta * prev_layer[w].value;
neuron.weights[w] += delta + _LEARNING_MOMENTUM * neuron.history[w];
neuron.history[w] = delta;
}
}
}
}
};
let brain = new Brain([2, [2], 1]);
let training = [
{ inputs: [ 1, 1 ], outputs: [ 0 ] },
{ inputs: [ 0, 0 ], outputs: [ 0 ] },
{ inputs: [ 1, 0 ], outputs: [ 1 ] },
{ inputs: [ 0, 1 ], outputs: [ 1 ] }
];
// Training Phase
for (let i = 0; i < 20000; i++) {
training.forEach(function(dataset) {
brain.learn(dataset.inputs, dataset.outputs);
});
}
// Testing Phase
training.forEach(function(dataset) {
let result = brain.compute(dataset.inputs);
console.log(dataset.inputs.join(',') + ' ===> ' + result);
});
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment