Skip to content

Instantly share code, notes, and snippets.

@kballenegger
Created February 19, 2012 04:16
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save kballenegger/48a7006e138460b65173 to your computer and use it in GitHub Desktop.
Save kballenegger/48a7006e138460b65173 to your computer and use it in GitHub Desktop.
training_set = [[3,2],
[1,2],
[0,1],
[4,3]]
# machine learning stuff
def h x, theta0, theta1
theta0 + theta1 * x
end
def J theta0, theta1, training_set
sum = 0
training_set.each do |e|
sum += (h(e[0],theta0,theta1) - e[1]) ** 2
end
1.0 / (2 * training_set.count) * sum
end
# calculus
def derivative precision_magnitude, &f
raise "can only create derivaties of single-argument functions" unless f.arity == 1
dx = 10 ** (0-precision_magnitude)
lambda { |x| (f.call(x + dx) - f.call(x)) / dx }
end
# gradient descent
def gradient_descent learning_rate, precision_magnitude, &f
thetas = []
(1..f.arity).each { thetas.push 0 }
good = false
until good
new_thetas = thetas
good = true
thetas.each_index do |j|
prime = (derivative(precision_magnitude) { |x| tmp_thetas = thetas; tmp_thetas[j] = x; f.call(tmp_thetas) }).call(thetas[j])
new_thetas[j] = thetas[j] - learning_rate * prime
good = false if prime.abs > (10 ** (0-precision_magnitude))
end
thetas = new_thetas
end
thetas
end
# use case
thetas = gradient_descent 0.1, 10 do |theta0, theta1|
J theta0, theta1, training_set
end
thetas.map! { |e| e.round(6) }
p thetas
solution = lambda { |x|
h x, thetas[0], thetas[1]
}
puts solution.call(3)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment