Skip to content

Instantly share code, notes, and snippets.

@SinghKislay
Created March 11, 2019 01:14
Show Gist options
  • Save SinghKislay/313e0064d13f0d538e0a3b3a6abfcca7 to your computer and use it in GitHub Desktop.
Save SinghKislay/313e0064d13f0d538e0a3b3a6abfcca7 to your computer and use it in GitHub Desktop.
#include <mlpack/core.hpp>
#include <mlpack/core/data/split_data.hpp>
#include <mlpack/core/optimizers/sgd/sgd.hpp>
#include <mlpack/core/optimizers/adam/adam_update.hpp>
#include <mlpack/methods/ann/layer/layer.hpp>
#include <"ffn_hack.hpp">
#include "utils.hpp"
using namespace mlpack;
using namespace mlpack::ann;
using namespace mlpack::optimization;
using namespace arma;
using namespace std;
template<typename OutputLayer, typename initialization, typename optimizerType>
class ResNet{
private:
double RATIO;
int ITERATIONS_PER_CYCLE;
int CYCLES;
double STEP_SIZE;
int BATCH_SIZE;
FFN<OutputLayer, initialization> model;
optimizerType optimizer;
mat tempDataset;
mat train, valid;
int iter=0;
public:
mat output;
Resnet(double RATIO,int ITERATIONS_PER_CYCLE,int CYCLES,double STEP_SIZE,int BATCH_SIZE);
mat LoadData(const char* path);
template<typename layer,typename... Args>
void conv_forward(mat input,Args... args){
model.Add<layer >(...args);
prevInput=input;
//we are going just one step further.
model.Forward(input,output,iter,iter);
iter=iter+1;
return output;
}
void train(double stepSize,mat Target){
mat params = this->Parameters();
optimizer.Initialize(params.n_rows,params.n_cols);
optimizer.Update(this->output,stepSize,Params);
}
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment