Skip to content

Instantly share code, notes, and snippets.

@Joshuaalbert
Last active June 10, 2017 00:24
Show Gist options
  • Save Joshuaalbert/c34366d5c20413aa29f0699b903d3f55 to your computer and use it in GitHub Desktop.
Save Joshuaalbert/c34366d5c20413aa29f0699b903d3f55 to your computer and use it in GitHub Desktop.
gives a null pointer at some point
package com.tactico.tm.bugs;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.BackpropType;
import org.deeplearning4j.nn.conf.ComputationGraphConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.Updater;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
import org.deeplearning4j.nn.weights.WeightInit;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.lossfunctions.LossFunctions;
public class ValidationError {
public static void main (String[] args){
ComputationGraphConfiguration.GraphBuilder confBuilder = new NeuralNetConfiguration.Builder()
.iterations(1)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.learningRate(0.1)
.updater(Updater.ADAGRAD)
.weightInit(WeightInit.XAVIER)
.regularization(true).useDropConnect(true)
.dropOut(0.5)
.graphBuilder()
.setInputTypes(InputType.recurrent(1))
.addInputs("input1");
confBuilder.addLayer("output", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE)
.nIn(1)
.nOut(1)
.activation(Activation.SIGMOID)
.build(), "input1");
confBuilder.setOutputs("output");
ComputationGraphConfiguration cgconf = confBuilder.pretrain(false)
.backprop(true).backpropType(BackpropType.Standard)
.build();
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment