Last active
March 1, 2016 12:55
-
-
Save dongguosheng/dc2b01aa1768c71c65fd to your computer and use it in GitHub Desktop.
toy code about gradient descent, newton method.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding: gbk -*- | |
import numpy as np | |
def foo(x): | |
return x**2 + 2*x + 1 | |
def g(x): | |
return 2*x + 2 | |
def h(x): | |
return 2.0 | |
def gradient_descent(x0, lr=0.1): | |
x_last = x0 + 1 | |
cnt = 0 | |
print 'start x0: %f' % x0 | |
for _ in xrange(100): | |
x0 -= lr * g(x0) | |
print '%d, %f' % (cnt, x0) | |
cnt += 1 | |
if abs(x0 - x_last) < 0.001: | |
break | |
else: | |
x_last = x0 | |
return x0 | |
def newton_method(x0): | |
x_last = x0 + 1 | |
cnt = 0 | |
print 'start x0: %f' % x0 | |
for _ in xrange(100): | |
x0 -= g(x0) / h(x0) | |
print '%d, %f' % (cnt, x0) | |
cnt += 1 | |
if abs(x0 - x_last) < 0.001: | |
break | |
else: | |
x_last = x0 | |
return x0 | |
def main(): | |
x0 = np.random.rand() | |
gradient_descent(x0) | |
newton_method(x0) | |
if __name__ == '__main__': | |
main() |
need to use line search to decide step size(learning rate)
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
there's no learning rate in newton method.