Skip to content

Instantly share code, notes, and snippets.

@unnonouno

unnonouno/adagrad_test.cpp

Last active Aug 29, 2015
Embed
What would you like to do?
AdaGrad with SSE
#include <sys/time.h>
#include <cmath>
#include <ctime>
#include <cstdlib>
#include <iostream>
#include <vector>
void ada_grad(
float eta,
const std::vector<float>& g,
std::vector<float>& sum,
std::vector<float>& x) {
for (std::size_t i = 0, size = x.size(); i < size; ++i) {
sum[i] += g[i] * g[i];
x[i] += eta / std::sqrt(sum[i]) * g[i];
}
}
int main() {
std::size_t dimension = 1000;
float learning_rate = 0.1;
std::size_t iteration = 1000000;
srand(0);
std::vector<float> g;
for (size_t i = 0; i < dimension; ++i) {
g.push_back(rand());
}
std::vector<float> sums(dimension, 1.0e-10f);
std::vector<float> r(dimension);
timeval begin, end;
::gettimeofday(&begin, NULL);
for (std::size_t i = 0; i < iteration; ++i) {
ada_grad(learning_rate, g, sums, r);
}
::gettimeofday(&end, NULL);
double elapsed = 1000.0 * (end.tv_sec - begin.tv_sec)
+ (end.tv_usec - begin.tv_usec) / 1000.0;
std::cout << elapsed << " msec" << std::endl;
std::cout << r[0] << std::endl;
}
% g++ -Ofast adagrad_test.cpp -march=native
% ./a.out
540.679 msec
200.099
% g++ -O2 adagrad_test.cpp -march=native
% ./a.out
7744.53 msec
200.099
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment