Skip to content

Instantly share code, notes, and snippets.

@agibsonccc
Created October 2, 2020 13:38
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save agibsonccc/d232723ef36d8cb425c58c8750dec50e to your computer and use it in GitHub Desktop.
Save agibsonccc/d232723ef36d8cb425c58c8750dec50e to your computer and use it in GitHub Desktop.
package org.example;
/* *****************************************************************************
* Copyright (c) 2020 Konduit K.K.
* Copyright (c) 2015-2019 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
import java.util.Date;
import org.datavec.api.records.reader.RecordReader;
import org.datavec.api.records.reader.impl.csv.CSVRecordReader;
import org.datavec.api.split.FileSplit;
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
import org.deeplearning4j.datasets.iterator.AsyncShieldDataSetIterator;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.PerformanceListener;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.nd4j.evaluation.classification.Evaluation;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.learning.config.Nesterovs;
import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction;
import java.io.File;
import java.util.concurrent.TimeUnit;
/**
* "Linear" Data Classification Example
* <p>
* Based on the data from Jason Baldridge:
* https://github.com/jasonbaldridge/try-tf/tree/master/simdata
*
* @author Josh Patterson
* @author Alex Black (added plots)
*/
@SuppressWarnings("DuplicatedCode")
public class scs {
public static boolean visualize = true;
public static String dataLocalPath;
public static void main(String[] args) throws Exception {
int seed = 123;
double learningRate = 0.01;
int batchSize = 1000;
int nEpochs = 20;
int numInputs = 7;
int numOutputs = 6;
int numHiddenNodes = 20;
//dataLocalPath = ''
//Load the training data:
RecordReader rr = new CSVRecordReader();
rr.initialize(new FileSplit(new File( "scs_TRAIN.csv")));
DataSetIterator trainIter = new RecordReaderDataSetIterator(rr, batchSize, 7,6);
DataSetIterator trainI = new AsyncShieldDataSetIterator(trainIter);
//Load the test/evaluation data:
RecordReader rrTest = new CSVRecordReader();
rrTest.initialize(new FileSplit(new File("scs_TEST.csv")));
DataSetIterator testIter = new RecordReaderDataSetIterator(rrTest, batchSize, 7,6);
DataSetIterator testI = new AsyncShieldDataSetIterator(testIter);
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.seed(seed)
.weightInit(WeightInit.XAVIER)
.updater(new Nesterovs(learningRate, 0.9))
.list()
.layer(new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
.activation(Activation.RELU)
.dropOut(0.25)
.build())
//.layer(new DropoutLayer(0.5))
.layer(new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD)
.activation(Activation.SOFTMAX)
.nIn(numHiddenNodes).nOut(numOutputs).build())
.build();
Date date_start = new Date();
System.out.println(date_start.toString());
MultiLayerNetwork model = new MultiLayerNetwork(conf);
model.init();
model.setListeners(new PerformanceListener(10000,true,true)); //Print score every 100 parameter updates
for(int i = 0; i < nEpochs; i++) {
System.out.println("Begin epoch " + i);
model.fit(trainI);
System.out.println("End epoch " + i);
}
System.out.println("Evaluate model....");
Evaluation eval = new Evaluation(numOutputs);
while (testI.hasNext()) {
DataSet t = testI.next();
INDArray features = t.getFeatures();
INDArray labels = t.getLabels();
INDArray predicted = model.output(features, false);
eval.eval(labels, predicted);
}
//An alternate way to do the above loop
//Evaluation evalResults = model.evaluate(testIter);
//Print the evaluation statistics
System.out.println(eval.stats());
Date date_end = new Date();
System.out.println("\n****************Example finished********************");
System.out.println("\n******Time start******\n");
System.out.println(date_start.toString());
System.out.println("\n******Time end******\n");
System.out.println(date_end.toString());
//Training is complete. Code that follows is for plotting the data & predictions only
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment