Skip to content

Instantly share code, notes, and snippets.

@leefreemanxyz
Created October 23, 2017 10:05
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save leefreemanxyz/c7ae0319270b2760625a8004d36d8da3 to your computer and use it in GitHub Desktop.
Save leefreemanxyz/c7ae0319270b2760625a8004d36d8da3 to your computer and use it in GitHub Desktop.
def params
[0,0]
end
def examples
[
{
:x => 1,
:y => 3
},
{
:x => 2,
:y => 8
}
]
end
def hypothesis(x, params)
params[0] + params[1] * x
end
def cost_function(examples, params)
examples.inject(0) do |result, example|
result + (hypothesis(example[:x], params) - example[:y])**2
end / (2 * examples.count)
end
def gradient_descent(examples, params, learning_rate)
1000.times do
temp0 = params[0] - ( learning_rate * summed_error_0(examples, params))
temp1 = params[1] - ( learning_rate * summed_error_1(examples, params))
params[0] = temp0
params[1] = temp1
end
params
end
def summed_error_0(examples, params)
examples.inject(0) do |result, example|
result + (hypothesis(example[:x], params) - example[:y])
end / examples.count
end
def summed_error_1(examples, params)
examples.inject(0) do |result, example|
result + (hypothesis(example[:x], params) - example[:y]) * example[:x]
end / examples.count
end
optimized_params = gradient_descent(examples, params, 0.5)
puts optimized_params
puts cost_function(examples, optimized_params)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment