Created
March 22, 2018 10:13
-
-
Save PreetiAsh/401bc64cd29fe717bb642b971133983d to your computer and use it in GitHub Desktop.
I have gradle and source code and added required gradle dependency to gradle file. but syill getting exception InputSplit class not found exception.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//Gradle of my project | |
apply plugin: 'java-library' | |
dependencies { | |
implementation fileTree(include: ['*.jar'], dir: 'libs') | |
compile group: 'org.deeplearning4j', name: 'deeplearning4j-core', version: '0.9.1' | |
compile group: 'org.datavec', name: 'datavec-api', version: '0.9.1' | |
testCompile group: 'org.nd4j', name: 'nd4j-native-platform', version: '0.9.1' | |
} | |
sourceCompatibility = "1.7" | |
targetCompatibility = "1.7" | |
//+++++++++++++++code of my project============= | |
package com.ncs; | |
import org.datavec.api.records.reader.RecordReader; | |
import org.datavec.api.records.reader.impl.csv.CSVRecordReader; | |
import org.datavec.api.split.FileSplit; | |
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator; | |
import org.deeplearning4j.eval.Evaluation; | |
import org.deeplearning4j.eval.EvaluationBinary; | |
import org.deeplearning4j.nn.api.OptimizationAlgorithm; | |
import org.deeplearning4j.nn.api.Updater; | |
import org.deeplearning4j.nn.conf.MultiLayerConfiguration; | |
import org.deeplearning4j.nn.conf.NeuralNetConfiguration; | |
import org.deeplearning4j.nn.conf.layers.DenseLayer; | |
import org.deeplearning4j.nn.conf.layers.OutputLayer; | |
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; | |
import org.deeplearning4j.nn.weights.WeightInit; | |
import org.deeplearning4j.optimize.listeners.ScoreIterationListener; | |
import org.nd4j.linalg.activations.Activation; | |
import org.nd4j.linalg.api.ndarray.INDArray; | |
import org.nd4j.linalg.dataset.DataSet; | |
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; | |
import org.nd4j.linalg.lossfunctions.LossFunctions; | |
import java.io.File; | |
import java.io.IOException; | |
public class JavaClass { | |
public static void main(String arhg[]) throws IOException, InterruptedException { | |
int seed = 123; | |
double learningRate = 0.01; | |
int batchSize = 50; | |
int nEpochs = 50; | |
int numInputs =3; | |
int numOutputs =3; | |
int numHiddenNodes = 20; | |
//load the training data1 | |
RecordReader r1 = new CSVRecordReader(); | |
r1.initialize(new FileSplit(new File("D:\\anuragBySagar.csv"))); | |
DataSetIterator train1 = new RecordReaderDataSetIterator(r1,batchSize,0,3); | |
//"E:\\dl4j-examples-master\\dl4j-examples\\src\\main\\java\\MLPLinearClassifier\\linear_data_eval.csv" | |
//"F:\\testing\\hello2.csv" | |
RecordReader rrTest = new CSVRecordReader(); | |
rrTest.initialize(new FileSplit(new File("D:\\anuragBySagar.csv"))); | |
DataSetIterator testIter = new RecordReaderDataSetIterator(rrTest,batchSize,0,3); | |
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() | |
.seed(seed) | |
.iterations(1) | |
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) | |
.learningRate(learningRate) | |
.updater(org.deeplearning4j.nn.conf.Updater.NESTEROVS) | |
.list() | |
.layer(0,new DenseLayer.Builder() | |
.nIn(numInputs) | |
.nOut(numHiddenNodes) | |
.weightInit(WeightInit.XAVIER) | |
.activation(Activation.RELU) | |
.build()) | |
.layer(1,new DenseLayer.Builder() | |
.nIn(numHiddenNodes) | |
.nOut(numHiddenNodes) | |
.weightInit(WeightInit.XAVIER) | |
.activation(Activation.RELU | |
) | |
.build()) | |
.layer(2,new DenseLayer.Builder() | |
.nIn(numHiddenNodes) | |
.nOut(numHiddenNodes) | |
.weightInit(WeightInit.XAVIER) | |
.activation(Activation.RELU | |
) | |
.build()) | |
.layer(3,new DenseLayer.Builder() | |
.nIn(numHiddenNodes) | |
.nOut(numHiddenNodes) | |
.weightInit(WeightInit.XAVIER) | |
.activation(Activation.RELU | |
) | |
.build()) | |
.layer(4,new DenseLayer.Builder() | |
.nIn(numHiddenNodes) | |
.nOut(numHiddenNodes) | |
.weightInit(WeightInit.XAVIER) | |
.activation(Activation.RELU | |
) | |
.build()) | |
.layer(5,new DenseLayer.Builder() | |
.nIn(numHiddenNodes) | |
.nOut(numHiddenNodes) | |
.weightInit(WeightInit.XAVIER) | |
.activation(Activation.RELU) | |
.build()) | |
.layer(6, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)//XENT | |
.weightInit(WeightInit.XAVIER) | |
.activation(Activation.SOFTMAX) | |
.nIn(numHiddenNodes) | |
.nOut(numOutputs) | |
.build() | |
) | |
.pretrain(false).backprop(true).build(); | |
// System.out.println(conf.toJson()); | |
MultiLayerNetwork model = new MultiLayerNetwork(conf); | |
model.init(); | |
model.setListeners(new ScoreIterationListener(10)); | |
for (int n = 0; n < nEpochs; n++) { | |
model.fit(train1); | |
} | |
System.out.println("Evaluate model......."); | |
// EvaluationBinary eval = new EvaluationBinary(); | |
Evaluation eval = new Evaluation(numOutputs); | |
while(testIter.hasNext()){ | |
DataSet t = testIter.next(); | |
INDArray features = t.getFeatureMatrix(); | |
INDArray lables = t.getLabels(); | |
INDArray predicted = model.output(features,false); | |
eval.eval(lables,predicted); | |
} | |
System.out.println(">>>>>>>>>>>>>>>>>>>>>>>>>>"+eval.stats()); | |
System.out.println(eval.accuracy()); | |
// here match the calculated accuracy with range ( accuracy > = 0.9950 < = 1 ) | |
if(eval.accuracy()>0.9950){ | |
System.out.println(true); | |
} | |
else System.out.println(false); | |
System.out.println("<<<<<<<<<<<<<<<<<<<<<<<<<<"); | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment