Skip to content

Instantly share code, notes, and snippets.

@agibsonccc
Created August 9, 2017 16:47
Show Gist options
  • Save agibsonccc/678a6430cc8492263d84784204b03899 to your computer and use it in GitHub Desktop.
Save agibsonccc/678a6430cc8492263d84784204b03899 to your computer and use it in GitHub Desktop.
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(Updater.ADAM).adamMeanDecay(0.9).adamVarDecay(0.999)
.weightInit(WeightInit.XAVIER)
.learningRate(0.01)
.regularization(true).l2(0.01).l2Bias(0.01)
.graphBuilder()
.addInputs("in")
.addLayer("dense0", new DenseLayer.Builder().nIn(inputSize).nOut(layerSize)
.activation(Activation.TANH)
.dropOut(0)
.build(), "in")
.addLayer("blstm1", new GravesBidirectionalLSTM.Builder()
.nIn(layerSize).nOut(layerSize)
.dropOut(dropout ? 0.5 : 0)
.activation(Activation.TANH)
.build(), "dense0")
.addLayer("blstm2", new GravesBidirectionalLSTM.Builder()
.nIn(layerSize).nOut(layerSize)
.dropOut(dropout ? 0.5 : 0)
.activation(Activation.TANH)
.build(), "blstm1")
.addLayer("poolMax", new GlobalPoolingLayer.Builder(PoolingType.MAX).build(), "blstm2")
.addLayer("poolAvg", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "blstm2")
.addLayer("dense", new DenseLayer.Builder()
.nIn(2 * layerSize).nOut(layerSize)
.weightInit(WeightInit.XAVIER)
.activation(Activation.TANH)
.dropOut(dropout ? 0.5 : 0)
.build(), "poolMax", "poolAvg")
.addLayer("out", new OutputLayer.Builder()
.nIn(layerSize).nOut(1)
.activation(Activation.IDENTITY)
.lossFunction(LossFunctions.LossFunction.MEAN_ABSOLUTE_ERROR)
.build(), "dense")
.setInputTypes(InputType.recurrent(inputSize))
.setOutputs("out")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment