Skip to content

Instantly share code, notes, and snippets.

@chiral
Created December 2, 2015 15:19
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save chiral/f647b63aca50af6214ea to your computer and use it in GitHub Desktop.
Save chiral/f647b63aca50af6214ea to your computer and use it in GitHub Desktop.
example of trivial optimization problem on 2-d circle for the purpose of comparison between ceres-solver and TensorFlow
#include "ceres/ceres.h"
#include "glog/logging.h"
using ceres::CostFunction;
using ceres::AutoDiffCostFunction;
using ceres::Problem;
using ceres::Solver;
using ceres::Solve;
struct CostFunctor {
template <typename T>
bool operator()(const T* const x, T* residual) const {
residual[0] = T(1.0) - cos(x[0]);
residual[1] = T(1.0) - sin(x[0]);
return true;
}
};
int main(int argc, char** argv) {
google::InitGoogleLogging(argv[0]);
const double pi = 3.1415926535;
double x[1] = {0};
Problem problem;
CostFunction* cost_function =
new AutoDiffCostFunction<CostFunctor, 2, 1>(new CostFunctor);
problem.AddResidualBlock(cost_function, NULL, x);
Solver::Options options;
options.minimizer_progress_to_stdout = true;
Solver::Summary summary;
Solve(options, &problem, &summary);
std::cout << summary.BriefReport() << "\n";
std::cout << "opt_x:" << x[0] << "\n";
std::cout << "ans_x:" << pi/4 << "\n";
return 0;
}
import tensorflow as tf
import numpy as np
o = np.array([1,1])
theta = tf.Variable(tf.zeros([1]))
xy = tf.concat(0,[tf.cos(theta),tf.sin(theta)])
loss = tf.reduce_mean(tf.square(xy - o))
optimizer = tf.train.AdagradOptimizer(0.5)
train = optimizer.minimize(loss)
init = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init)
for step in xrange(201):
sess.run(train)
if step % 20 == 0:
print step, sess.run(theta)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment