Skip to content

Instantly share code, notes, and snippets.

@leefreemanxyz
Created October 23, 2017 10:05
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
Star You must be signed in to star a gist
Save leefreemanxyz/b6d4ea79c8de773d1ee5e009dac87aed to your computer and use it in GitHub Desktop.
const training = [
[0,3], [1,6], [2,8], [3,7], [9, 100]
]
// alpha
const learningRate = 0.05
const hypothesis = (x,params) => params[1] * x + params[0]
const cost = (training,params) => {
let sum = 0;
for (var i = 0; i < training.length; i++) {
sum += Math.pow(hypothesis(training[i][0],params) - training[i][1], 2);
}
console.log(sum/(2*training.length))
return sum / (2 * training.length);
}
const summedError1 = (training,params) => {
let sum1 = 0
for(let i=0; i<training.length; i++){
sum1 += hypothesis(training[i][0],params) - training[i][1]
}
return sum1 / training.length
}
const summedError2 = (training,params) => {
let sum2 = 0
for(let i=0; i<training.length; i++){
sum2 += (hypothesis(training[i][0], params) - training[i][1]) * training[i][0]
}
return sum2/training.length
}
const gradientDescent = (training, params, learningRate, iterations) => {
let count = 0
while (count < iterations) {
let temp0 = params[0] - (learningRate * summedError1(training,params))
let temp1 = params[1] - (learningRate * summedError2(training,params))
params[0] = temp0
params[1] = temp1
count++
cost(training,params)
}
console.log(params)
}
gradientDescent(training,[0,0],learningRate,1000)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment