Skip to content

Instantly share code, notes, and snippets.

@hyyking
Last active July 24, 2019 11:16
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save hyyking/5bb2051383845cf7064b5cf611d00286 to your computer and use it in GitHub Desktop.
Save hyyking/5bb2051383845cf7064b5cf611d00286 to your computer and use it in GitHub Desktop.
Go linear regression
package main;
import(
"fmt"
)
type LinearFunction struct {
a float64
b float64
}
func (f *LinearFunction) aPartialDerivative(xi, yi float64) float64 {
// Compute (xi, yi) derivative of a
return -2 * xi * (yi - (f.a * xi + f.b));
}
func (f *LinearFunction) bPartialDerivative(xi, yi float64) float64 {
// Compute (xi, yi) derivative of b
return -2 * (yi - (f.a * xi + f.b));
}
func (f *LinearFunction) GradientDescent(x, y []float64, alpha float64) {
// Trying to minimise the mean square error
// := (1/N)*sum((y_i- a*x_i+b)^2)
var N int = len(x);
var aa float64 = 0;
var bb float64 = 0;
// Sum of all derivatives
for i := 0; i < N; i++ {
aa += f.aPartialDerivative(x[i], y[i]);
bb += f.bPartialDerivative(x[i], y[i]);
}
// Progression Coefficient (step of the grandient descent)
var progcoef float64 = (alpha/float64(N))
f.a -= progcoef * aa
f.b -= progcoef * bb
}
func (f *LinearFunction) Train(x, y []float64, epochs int, alpha float64) {
// Compute the gradient descent each epoch
for e := 0; e < epochs; e++ {
f.GradientDescent(x, y, alpha)
}
}
func (f *LinearFunction) Predict(v float64) float64 {
// f(x) = (a*) * x + (b*)
return f.a * v + f.b;
}
func main() {
spending := []float64{1, 4, 15, 5, 8, 2, 13, 9, 6, 19};
sales := []float64{1, 2, 10, 5, 9, 16, 4, 13, 8, 7};
f := &LinearFunction{a:0, b:0};
f.Train(spending, sales, 5000, .01);
fmt.Println(f.Predict(8));
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment