Skip to content

Instantly share code, notes, and snippets.

@ChrisHayduk
Created May 1, 2016 20:12
Show Gist options
  • Save ChrisHayduk/ab912d2464b4c370dcff5b5896482bf8 to your computer and use it in GitHub Desktop.
Save ChrisHayduk/ab912d2464b4c370dcff5b5896482bf8 to your computer and use it in GitHub Desktop.
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder()
.seed(123)
.iterations(iterations)
.regularization(true).l2(.0005) //Arbitrary
.learningRate(0.005)
.weightInit(WeightInit.XAVIER)
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(Updater.NESTEROVS).momentum(0.5)
.list(6)
.layer(0, new ConvolutionLayer.Builder(5,5)
.nIn(11*200).nOut(20).activation("identity").padding(2,2).stride(1,1).dropOut(0.5).build()
)
.layer(1,new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG)
.kernelSize(4,4).stride(1,1).build()
)
.layer(2, new ConvolutionLayer.Builder(5,5)
.nOut(20).activation("identity").padding(2,2).stride(1,1).dropOut(0.5).build()
)
.layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG)
.kernelSize(2,2).stride(1,1).build()
)
.layer(4, new DenseLayer.Builder().activation("relu").nOut(120).build())
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nOut(num_people).activation("softmax").build())
.backprop(true).pretrain(false);
new ConvolutionLayerSetup(builder,11,200,1);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment