Skip to content

Instantly share code, notes, and snippets.

@cenan
Forked from ltk/gradient_descent.rb
Last active August 29, 2015 14:11
Show Gist options
  • Save cenan/e1777d05123f3c1e7922 to your computer and use it in GitHub Desktop.
Save cenan/e1777d05123f3c1e7922 to your computer and use it in GitHub Desktop.
def params
[0,0]
end
def examples
[
{
:x => 1,
:y => 3
},
{
:x => 2,
:y => 5
}
]
end
def hypothesis(x, params)
params[0] + params[1] * x
end
def cost_function(examples, params)
examples.inject(0) do |result, example|
result + (hypothesis(example[:x], params) - example[:y])**2
end / (2 * examples.count)
end
def gradient_descent(examples, params, learning_rate)
1000.times do
temp0 = params[0] - ( learning_rate * summed_error_0(examples, params))
temp1 = params[1] - ( learning_rate * summed_error_1(examples, params))
params[0] = temp0
params[1] = temp1
end
params
end
def summed_error_0(examples, params)
examples.inject(0) do |result, example|
result + (hypothesis(example[:x], params) - example[:y])
end / examples.count
end
def summed_error_1(examples, params)
examples.inject(0) do |result, example|
result + (hypothesis(example[:x], params) - example[:y]) * example[:x]
end / examples.count
end
optimized_params = gradient_descent(examples, params, 0.5)
puts optimized_params
puts cost_function(examples, optimized_params)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment