Skip to content

Instantly share code, notes, and snippets.

@lorenzob
Created March 15, 2017 13:23
Show Gist options
  • Save lorenzob/28b788f8a44d89e67919e55da36c57f8 to your computer and use it in GitHub Desktop.
Save lorenzob/28b788f8a44d89e67919e55da36c57f8 to your computer and use it in GitHub Desktop.
double learningRate = 0.01;
int numHiddenNodes = 64*2;
int numInnerNodes = 32;
int numOutputs = 2;
double regularization = 0.1;
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(seed)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.learningRate(learningRate)
.updater(Updater.NESTEROVS).momentum(0.9)
.regularization(true).l2(regularization)
.list()
.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
.weightInit(WeightInit.RELU_UNIFORM)
.activation(Activation.LEAKYRELU)
//.dropOut(0.9)
.build())
.layer(1, new DenseLayer.Builder().nIn(numHiddenNodes).nOut(numHiddenNodes)
.weightInit(WeightInit.RELU_UNIFORM)
.activation(Activation.LEAKYRELU)
//.dropOut(0.5)
.build())
.layer(2, new DenseLayer.Builder().nIn(numHiddenNodes).nOut(numInnerNodes)
.weightInit(WeightInit.RELU_UNIFORM)
.activation(Activation.LEAKYRELU)
//.dropOut(0.5)
.build())
.layer(3, new DenseLayer.Builder().nIn(numInnerNodes).nOut(numInnerNodes)
.weightInit(WeightInit.XAVIER)
.activation(Activation.LEAKYRELU)
.build())
.layer(4, new DenseLayer.Builder().nIn(numInnerNodes).nOut(numInnerNodes)
.weightInit(WeightInit.XAVIER)
.activation(Activation.TANH)
.build())
.layer(5, new OutputLayer.Builder(LossFunction.MSE)
.weightInit(WeightInit.XAVIER)
.activation(Activation.SOFTMAX)
.nIn(numInnerNodes).nOut(numOutputs).build())
.pretrain(false).backprop(true).build();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment