Skip to content

Instantly share code, notes, and snippets.

@MartinSeeler
Created April 6, 2017 13:35
Show Gist options
  • Save MartinSeeler/87fa8278ed23bf3b2bbccda376763a73 to your computer and use it in GitHub Desktop.
Save MartinSeeler/87fa8278ed23bf3b2bbccda376763a73 to your computer and use it in GitHub Desktop.
NeuralNetConfiguration Upgrade 0.4.2 to 0.8.0
val conf = new NeuralNetConfiguration.Builder()
.seed(seed)
.iterations(1)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.learningRate(0.01)
.updater(Updater.NESTEROVS)
.momentum(0.4)
.list
.layer(0, new DenseLayer.Builder().nIn(18).nOut(50)
.weightInit(WeightInit.XAVIER)
.activation(Activation.RELU)
.build())
.layer(1, new DenseLayer.Builder().nIn(50).nOut(30)
.weightInit(WeightInit.UNIFORM) // <- ?
.activation(Activation.TANH)
.build())
.layer(2, new DenseLayer.Builder().nIn(30).nOut(10)
.weightInit(WeightInit.UNIFORM) // <- ?
.activation(Activation.SIGMOID)
.build())
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
.weightInit(WeightInit.UNIFORM) // <- ?
.activation(Activation.SOFTMAX)
.nIn(10).nOut(3).build())
.pretrain(false).backprop(true)
.build
val conf = new NeuralNetConfiguration.Builder()
.seed(seed)
.iterations(1)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.learningRate(0.01)
.updater(Updater.NESTEROVS)
.momentum(0.4)
.list
.layer(0, new DenseLayer.Builder().nIn(18).nOut(50)
.weightInit(WeightInit.XAVIER)
.activation("relu")
.build())
.layer(1, new DenseLayer.Builder().nIn(50).nOut(30)
.weightInit(WeightInit.NORMALIZED)
.activation("tanh")
.build())
.layer(2, new DenseLayer.Builder().nIn(30).nOut(10)
.weightInit(WeightInit.NORMALIZED)
.activation("sigmoid")
.build())
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
.weightInit(WeightInit.NORMALIZED)
.activation("softmax")
.nIn(10).nOut(3).build())
.pretrain(false).backprop(true)
.build
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment