Last active
January 26, 2018 14:41
-
-
Save fabiovila/4c7f609544b288dba4e35bff6e7ff1ea to your computer and use it in GitHub Desktop.
Another neural network for iris dataset. Simplified version with fixed layers, although you can change the size.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include <iostream> | |
#include <unordered_map> | |
#include "armadillo" | |
#include <algorithm> | |
#include <cctype> | |
#include <iostream> | |
#include <string> | |
#include <vector> | |
#include <fstream> | |
#include <string> | |
using namespace arma; | |
using namespace std; | |
// g++ -c -g -std=c++14 -march=native -I. -O2 -fPIC `pkg-config --cflags --libs Qt5Core` --pipe nn-iris.cpp -o nn-iris.o | |
// g++ -g -std=c++14 -march=native -I. -O2 -fPIC -lpthread -lm -larmadillo -lstdc++ `pkg-config --cflags --libs Qt5Core` -o nn-iris nn-iris.o -I. | |
class sgd { | |
private: | |
mat wx, wi, wo; | |
mat bx, bi, bo; | |
double alpha; | |
double momentum; | |
public: | |
sgd() { alpha = 0.01; momentum = 0.9;} | |
void layers(int i, int h, int o) { | |
wx = mat(i, h).randn(); bx = mat(1,h).ones(); | |
wo = mat(h, o).randn(); bo = mat(1,o).ones(); | |
}; | |
inline mat sigmoid (const mat &f) { return 1.0 / (1.0 + trunc_exp(-f)); } | |
inline mat dsigmoid (const mat &f) { return f % (1.0 - f); } | |
inline mat tanh (mat &f) { return arma::tanh(f); } | |
inline mat dtanh (mat &f) { return 1.0 - arma::pow(f,2); } | |
rowvec prev (const rowvec &x){ | |
mat xh = sigmoid( x * wx + bx); | |
mat ho = sigmoid( xh * wo + bo); | |
return ho; | |
} | |
double trainstep (const mat &iX, const mat &iY) { | |
vec list = linspace(0,iX.n_rows - 1, iX.n_rows); | |
list = shuffle(list); | |
double esum = 0; | |
for (auto xy: list) { | |
mat x = iX.row(xy); mat y = iY.row(xy); | |
// forward step | |
mat xh = sigmoid( x * wx + bx); | |
mat ho = sigmoid( xh * wo + bo); | |
// output error | |
mat e = ho - y; | |
esum += accu(pow(e,2)); | |
rowvec beta; | |
mat d; | |
// backward step | |
beta = e % dsigmoid(ho); | |
d = xh.t() * beta; | |
wo -= d * alpha; | |
bo -= beta * alpha; | |
e = e * wo.t(); // backpropagate error | |
beta = e % dsigmoid(xh); | |
d = x.t() * beta; | |
wx -= d * alpha; | |
bx -= beta * alpha; | |
}; | |
return esum; | |
} | |
}; | |
void printm(const rowvec& M) { | |
for(uint i = 0; i < M.n_cols; i++) { | |
printf ("%02.4f ", M(i)); | |
//cout << M(i) << ' '; | |
} | |
} | |
int main(int argc, char** argv) { | |
arma_rng::set_seed(9); | |
sgd s; | |
// Iris data Test | |
FILE *fp = fopen("iris.data.csv", "r"); | |
vector<vector<double>> vY; | |
vector<double> vA,vB,vC,vD; | |
unordered_map<string, int> uLabel; | |
string Label; | |
float a,b,c,d; | |
char l[255]; | |
int yhot = 0; | |
while (fscanf(fp,"%f,%f,%f,%f,%s",&a,&b,&c,&d,l) == 5){ | |
vA.push_back(a); | |
vB.push_back(b); | |
vC.push_back(c); | |
vD.push_back(d); | |
Label = l; | |
if ( uLabel.find(Label) == uLabel.end()) { | |
uLabel[Label] = yhot; | |
yhot = yhot + 1; | |
} | |
vector<double> onehot = {0,0,0}; | |
onehot[uLabel[Label]] = 1; | |
vY.push_back(onehot); | |
} | |
cout << "Read: " << vA.size() << " lines" << endl; | |
mat X(vA.size(),4); | |
mat Y(vY.size(),3); | |
uint i; | |
for (i = 0; i < X.n_rows; i++) { | |
X.row(i) = rowvec ({vA[i],vB[i],vC[i],vD[i]}); | |
Y.row(i) = rowvec ({vY[i]}); | |
} | |
s.layers(4,8,3); | |
double e = 1.0; | |
uint epochs = 60000; | |
for (i = 0; i < epochs && e > 0.001; i++) { | |
e = s.trainstep(X,Y); | |
if ( i % 1000 == 0) { cout << "Epoch: " << i << " Error: " << e << endl << flush;} | |
} | |
cout << "Epoch: " << i << " Error: " << e << endl << flush; | |
double acc = 0; | |
for (i = 0; i < X.n_rows; i++){ | |
rowvec p = s.prev(X.row(i)); | |
acc += accu(pow(p - Y.row(i),2)); | |
printm(Y.row(i)); cout << " | "; | |
printm(p); | |
cout << "\n"; | |
} | |
return 0; | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment