Skip to content

Instantly share code, notes, and snippets.

@ebenolson
Last active December 15, 2015 22:06
Show Gist options
  • Save ebenolson/931e879ed38f257253d2 to your computer and use it in GitHub Desktop.
Save ebenolson/931e879ed38f257253d2 to your computer and use it in GitHub Desktop.
require 'optim'
cj = require('cjson')
a = torch.Tensor({0.1,0.2,0.3})
x0 = torch.Tensor({1, 1, 1})
function f(x)
return torch.sum(torch.cmul(a, torch.pow(x, 2)))
end
function df(x)
return torch.cmul(a, x*2)
end
function fdf(x)
return f(x), df(x)
end
config = {
learningRate = 0.1
}
x = x0:clone()
for i=1,10 do
optim.sgd(fdf, x, config)
end
print("'sgd': " .. cj.encode(torch.totable(x)))
config = {
learningRate = 0.1,
momentum = 0.5,
dampening = 0
}
x = x0:clone()
for i=1,10 do
optim.sgd(fdf, x, config)
end
print("'momentum': " .. cj.encode(torch.totable(x)))
config = {
learningRate = 0.1,
momentum = 0.5,
dampening = 0,
nesterov = 1,
}
x = x0:clone()
for i=1,10 do
optim.sgd(fdf, x, config)
end
print("'nesterov_momentum': " .. cj.encode(torch.totable(x)))
config = {
learningRate = 0.1,
}
x = x0:clone()
for i=1,10 do
optim.adagrad(fdf, x, config)
end
print("'adagrad': " .. cj.encode(torch.totable(x)))
config = {
learningRate = 0.01,
alpha = 0.9,
epsilon = 1e-6,
}
x = x0:clone()
for i=1,10 do
optim.rmsprop(fdf, x, config)
end
print("'rmsprop': " .. cj.encode(torch.totable(x)))
config = {
learningRate = 0.5,
rho = 0.95,
epsilon = 1e-6,
}
x = x0:clone()
for i=1,10 do
optim.adadelta(fdf, x, config)
end
print("'adadelta': " .. cj.encode(torch.totable(x)))
config = {
learningRate = 0.01,
lambda = 1,
}
x = x0:clone()
for i=1,10 do
optim.adam(fdf, x, config)
end
print("'adam': " .. cj.encode(torch.totable(x)))
config = {
learningRate = 0.01,
beta1 = 0.9,
beta2 = 0.999,
epsilon = 1e-8,
}
x = x0:clone()
for i=1,10 do
optim.adamax(fdf, x, config)
end
print("'adamax': " .. cj.encode(torch.totable(x)))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment