Skip to content

Instantly share code, notes, and snippets.

@Elyx0
Created April 26, 2017 07:21
Show Gist options
  • Save Elyx0/ef2cc44831ee4733485459a3ad0e881c to your computer and use it in GitHub Desktop.
Save Elyx0/ef2cc44831ee4733485459a3ad0e881c to your computer and use it in GitHub Desktop.
// Simplified from: https://github.com/Elyx0/rosenblattperceptronjs/blob/master/src/Perceptron.js
class Perceptron {
constructor(bias=1,learningRate=0.1,weights=[]) {
this.bias = bias;
this.learningRate = learningRate;
this.weights = weights;
this.trainingSet = [];
}
init(inputs,bias=this.bias) {
// Initialize randomly and appending bias weight
this.weights = [...inputs.map(i => Math.random()), bias];
}
adjustWeights(inputs,expected) {
// ...
const actual = this.evaluate(inputs);
if (actual == expected) return true; // Correct weights, return and don't touch anything.
// Otherwise update each weight by adding the error * learningRate relative to the input
this.weights = this.weights.map((w,i) => w += this.delta(actual, expected,inputs[i]));
// ...
}
// Calculates the difference between actual and expected for a given input
delta(actual, expected, input,learningRate=this.learningRate) {
const error = expected - actual; // How far off were we
return error * learningRate * input;
}
// Sum inputs * weights
weightedSum(inputs=this.inputs,weights=this.weights) {
return inputs.map((inp,i) => inp * weights[i]).reduce((x,y) => x+y,0);
}
// Evaluate using the current weights
evaluate(inputs) {
return this.activate(this.weightedSum(inputs));
}
// Heaviside as the activation function
activate(value) {
return value >= 0 ? 1 : 0;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment