Skip to content

Instantly share code, notes, and snippets.

View wbaek's full-sized avatar
🤔

Woonhyuk (clint) Baek wbaek

🤔
  • 17:07 (UTC +09:00)
View GitHub Profile
@wbaek
wbaek / numerical_optimization_univariate_elimination_fibonacci.py
Last active August 29, 2015 14:13
NumericalOptimization/univariate/elimination/fibonacci.py
#-*- coding: utf-8 -*-
def _verbose(a, b, x1, x2, f1, f2):
print 'a=%.5f, b=%.5f, x1=%.5f, f(x1)=%.5f, x2=%.5f, f(x2)=%.5f'%(a, b, x1, f1, x2, f2)
def minimize(function, a, b, N, epsilon=1e-5, verbose=False):
fibonacci_numubers = [1.0 for i in range(N+1)]
for i in range(N+1):
fibonacci_numubers[i] = (fibonacci_numubers[i-1] + fibonacci_numubers[i-2]) if i >= 2 else 1.0
L = b-a
@wbaek
wbaek / numerical_optimization_univariate_elimination_golden_section.py
Created January 16, 2015 15:49
NumericalOptimization/univariate/elimination/golden_section.py
#-*- coding: utf-8 -*-
import math
def _verbose(a, b, x1, x2, f1, f2):
print 'a=%.5f, b=%.5f, x1=%.5f, f(x1)=%.5f, x2=%.5f, f(x2)=%.5f'%(a, b, x1, f1, x2, f2)
def minimize(function, a, b, epsilon=1e-5, verbose=False, golden_ratio=(-1.0+math.sqrt(5.0))/2.0):
L = b-a
while L > epsilon:
L = abs(b-a)
@wbaek
wbaek / numerical_optimization_univariate_root-finding_newton.py
Created January 16, 2015 15:55
NumericalOptimization/univariate/root-finding/newton.py
#-*- coding: utf-8 -*-
def _verbose(param, score):
print 'x=%.5f, f(x)=%.5f'%(param, score)
def root_finding(function, derivate, x, epsilon=1e-5, verbose=False):
score, delta = 1, 1
while (abs(score) > epsilon) and (abs(delta) > epsilon):
score = function(x)
delta = score / derivate(x)
if verbose: _verbose(x, score)
@wbaek
wbaek / numerical_optimization_univariate_root-finding_secant.py
Created January 16, 2015 15:57
NumericalOptimization/univariate/root-finding/secant.py
#-*- coding: utf-8 -*-
def _verbose(params, score):
print 'a=%.5f, b=%.5f, f(x)=%.5f'%(params[0], params[1], score)
def approximate_derivate(function, a, b):
return (function(a) - function(b)) / (a - b)
def root_finding(function, a, b, epsilon=1e-5, verbose=False):
params = [a, b]
score, delta = 1, 1
@wbaek
wbaek / numerical_optimization_univariate_root-finding_bisection.py
Created January 16, 2015 15:59
NumericalOptimization/univariate/root-finding/bisection.py
#-*- coding: utf-8 -*-
def _verbose(params, scores):
print 'a=%.5f, b=%.5f, x=%.5f, f(x)=%.5f'%(params[0], params[1], params[2], scores[-1])
def root_finding(function, a, b, epsilon=1e-5, verbose=False):
params = [a, b, (a+b)/2.0]
scores = [0, 0, 1]
while (abs(scores[-1]) > epsilon) and (b-a > epsilon):
params = [a, b, (a+b)/2.0]
scores = [function(param) for param in params]
@wbaek
wbaek / numerical_optimization_univariate_elimination_seek_bound.py
Created January 16, 2015 16:49
NumericalOptimization/univariate/elimination/seek_bound.py
#-*- coding: utf-8 -*-
def _verbose(params, scores):
print 'params=' + ','.join(['%.2f'%p for p in params]) + ', scores=' + ','.join(['%.2f'%s for s in scores])
import random
def seek_bound(function, x=random.random(), d=random.random(), verbose=False):
params = [x-d, x, x+d]
scores = [function(p) for p in params]
if verbose: _verbose(params, scores)
@wbaek
wbaek / numerical_optimization_multivariate_melder_mead.py
Created January 18, 2015 13:27
NumericalOptimization/multivariate/melder_mead.py
#-*- coding: utf-8 -*-
def _verbose(i, params, scores):
params_str = ', '.join(['(%s)'%(','.join(['%.2f'%p for p in param])) for param in params])
scores_str = ', '.join(['%.2f'%s for s in scores])
print 'iter=%03d, params=[%s], scores=[%s]'%(i, params_str, scores_str)
def minimize(function, initial, alpha=1.0, beta=2.0, gamma=0.5, epsilon=1e-5, repeat=int(1e6), verbose=False):
import numpy
def center_of_params(params):
return tuple(numpy.average(numpy.array(params[:-1]).transpose(), 1))
@wbaek
wbaek / numerical_optimization_multivariate_powell.py
Created January 20, 2015 16:09
NumericalOptimization/multivariate/powell.py
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../')
from univariate.elimination import seek_bound
from univariate.elimination import golden_section
def minimize(function, initial, epsilon=1e-6, repeat=int(1e4), verbose=False):
import numpy
dimension = len(initial)
unit_vector_list = [ numpy.array([1.0 if i == j else 0.0 for j in range(dimension)])
@wbaek
wbaek / numerical_optimization_multivariate_steepest_descent.py
Last active August 29, 2015 14:13
NumericalOptimization/multivariate/steepest_descent.py
import numpy
def check_wolfe_condition(score_pt, score_alpha_pt, gradient_pt, gradient_alpha_pt, alpha, direction,
constant1=0.6, constant2=0.8):
if score_alpha_pt - score_pt <= constant1 * alpha * numpy.dot(gradient_pt, direction) \
and numpy.dot(gradient_alpha_pt, direction) >= constant2 * numpy.dot(gradient_pt, direction):
return True
else:
return False
def find_step_length(function, derivate, pt, direction,
@wbaek
wbaek / numerical_optimization_multivariate_newton.py
Last active August 29, 2015 14:14
NumericalOptimization/multivariate/newton.py
#-*- coding: utf-8 -*-
def minimize(function, derivate, derivate_2nd, initial, epsilon=1e-6, repeat=int(1e4), verbose=False):
pt = numpy.array( initial )
step_length = 1.0
for i in range(repeat):
gradient = numpy.array(derivate( pt ))
hessian = numpy.array(derivate_2nd( pt ))
inverted_hessian = numpy.linalg.inv( hessian )
direction = - numpy.dot( inverted_hessian, gradient)