Skip to content

Instantly share code, notes, and snippets.

@kris-singh
Created July 21, 2017 14:23
Show Gist options
  • Save kris-singh/8e6b0afd01d303e1b75eae04b90b82a9 to your computer and use it in GitHub Desktop.
Save kris-singh/8e6b0afd01d303e1b75eae04b90b82a9 to your computer and use it in GitHub Desktop.
#include <mlpack/core.hpp>
#include <mlpack/core/optimizers/rmsprop/rmsprop.hpp>
#include <mlpack/methods/ann/init_rules/gaussian_init.hpp>
#include <mlpack/methods/rbm/binary_layer.hpp>
#include <mlpack/methods/rbm/spike_slab_layer.hpp>
#include <mlpack/methods/rbm/rbm.hpp>
#include <mlpack/methods/rbm/binary_rbm.hpp>
#include <mlpack/methods/rbm/ssRBM.hpp>
#include <mlpack/methods/softmax_regression/softmax_regression.hpp>
#include <mlpack/core/optimizers/minibatch_sgd/minibatch_sgd.hpp>
#include <mlpack/core/optimizers/sgd/sgd.hpp>
#include <mlpack/core/optimizers/lbfgs/lbfgs.hpp>
using namespace mlpack;
using namespace mlpack::rbm;
using namespace mlpack::optimization;
using namespace mlpack::regression;
int main()
{
// Hidden layer size 192
int hiddenLayerSize = 100;
arma::mat trainData;
trainData.load("batch1patches.txt");
trainData.resize(192, 800);
std::cout << "batch rows = " << trainData.n_rows << std::endl;
std::cout << "batch columns = " << trainData.n_cols << std::endl;
// Intialise weight 10^-4
GaussianInitialization gaussian(0, 1e-4);
double radius = 0;
double tempRadius = 0;
for (size_t i = 0; i < trainData.n_cols; i++)
{
tempRadius = arma::norm(trainData.col(i));
if (radius < tempRadius)
radius = tempRadius;
}
// Set pool size
size_t poolSize = 1;
std::cout << "n rows = " << trainData.n_rows << std::endl;
std::cout << "poolSize = " << poolSize << std::endl;
BinaryLayer<> visible(64, 100, 1);
BinaryLayer<> hidden(100, 64, 0);
BinaryRBM binary_rbm(visible, hidden);
SpikeSlabLayer<> spikeVisible(64, 100, 3, radius,
1);
SpikeSlabLayer<> spikeHidden(100, 64, 3, radius, 0);
ssRBM ss_rbm(spikeVisible, spikeHidden);
trainData = trainData.rows(0, 99);
std::cout << arma::size(trainData) << std::endl;
// ssRBM Policy
RBM<GaussianInitialization, ssRBM> modelssRBM(trainData, gaussian, ss_rbm,
2, true, true);
MiniBatchSGD msgd(10, 0.06, trainData.n_cols * 20, 0, true);
std::cout << "Here" << std::endl;
modelssRBM.Reset();
/*
// Lambda bias = 0
modelssRBM.Policy().VisibleLayer().LambdaBias().fill(0);
// Spike Bias = -1
modelssRBM.Policy().VisibleLayer().SpikeBias().fill(-1);
// slab bias fixed at 1.5
modelssRBM.Policy().VisibleLayer().SlabBias().fill(1.5);
// Train the models
std::cout << "Train" << std::endl;
modelssRBM.Train(trainData, msgd);
*/
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment