Skip to content

Instantly share code, notes, and snippets.

@palashahuja
Last active March 8, 2016 20:42
Show Gist options
  • Save palashahuja/2c527d518f1e133c4d28 to your computer and use it in GitHub Desktop.
Save palashahuja/2c527d518f1e133c4d28 to your computer and use it in GitHub Desktop.
dropconnect test implementation
template<
typename PerformanceFunction,
typename OutputLayerType,
typename PerformanceFunctionType,
typename MatType = arma::mat
>
void BuildDropConnectNetwork(MatType& trainData,
MatType& trainLabels,
MatType& testData,
MatType& testLabels,
const size_t hiddenLayerSize,
const size_t maxEpochs,
const double classificationErrorThreshold)
{
/*
* Construct a feed forward network with trainData.n_rows input nodes,
* hiddenLayerSize hidden nodes and trainLabels.n_rows output nodes. The
* network struct that looks like:
*
* Input Hidden DropConnect Output
* Layer Layer Layer Layer
* +-----+ +-----+ +-----+ +-----+
* | | | | | | | |
* | +------>| +------>| +------>| |
* | | +>| | | | | |
* +-----+ | +--+--+ +-----+ +-----+
* |
* Bias |
* Layer |
* +-----+ |
* | | |
* | +-----+
* | |
* +-----+
*
*
*/
LinearLayer<> inputLayer(trainData.n_rows, hiddenLayerSize);
BiasLayer<> biasLayer(hiddenLayerSize);
BaseLayer<PerformanceFunction> hiddenLayer0;
LinearLayer<> hiddenLayer1(hiddenLayerSize, trainLabels.n_rows);
DropConnectLayer<> dropConnectLayer0(hiddenLayer1, 0.1, true);
BaseLayer<PerformanceFunction> outputLayer;
OutputLayerType classOutputLayer;
std::cout << dropConnectLayer0.Weights() << std::endl;
auto modules = std::tie(inputLayer, biasLayer, hiddenLayer0,
dropConnectLayer0, outputLayer);
FFN<decltype(modules), decltype(classOutputLayer), RandomInitialization,
PerformanceFunctionType> net(modules, classOutputLayer);
RMSprop<decltype(net)> opt(net, 0.01, 0.88, 1e-8,
maxEpochs * trainData.n_cols, 1e-18);
net.Train(trainData, trainLabels, opt);
std::cout << dropConnectLayer0.Weights() << std::endl;
MatType prediction;
net.Predict(testData, prediction);
size_t error = 0;
for(size_t i = 0; i < testData.n_cols; i++) {
if (arma::sum(arma::sum(
arma::abs(prediction.col(i) - testLabels.col(i)))) == 0)
{
error++;
}
}
double classificationError = 1 - double(error) / testData.n_cols;
BOOST_REQUIRE_LE(classificationError, classificationErrorThreshold);
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment