Skip to content

Instantly share code, notes, and snippets.

@AlexDBlack
Last active April 30, 2019 11:38
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save AlexDBlack/fc14f2eef7244fe8eecc3690137e89eb to your computer and use it in GitHub Desktop.
Save AlexDBlack/fc14f2eef7244fe8eecc3690137e89eb to your computer and use it in GitHub Desktop.
package org.deeplearning4j;
import org.deeplearning4j.datasets.iterator.ExistingDataSetIterator;
import org.deeplearning4j.nn.conf.ConvolutionMode;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.ConvolutionLayer;
import org.deeplearning4j.nn.conf.layers.GlobalPoolingLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.conf.layers.PoolingType;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.junit.Test;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.learning.config.Adam;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class Temp {
@Test
public void test(){
MultiLayerConfiguration config = new NeuralNetConfiguration.Builder()
.weightInit(WeightInit.RELU)
.activation(Activation.LEAKYRELU)
.updater(new Adam(0.01))
.convolutionMode(ConvolutionMode.Same)
.l2(0.001)
.list()
.layer(new ConvolutionLayer.Builder()
.kernelSize(3, 50)
.stride(1, 50)
.nIn(1)
.nOut(100)
.build())
.layer(new GlobalPoolingLayer.Builder()
.poolingType(PoolingType.MAX)
.dropOut(0.7)
.build())
.layer(new OutputLayer.Builder()
.lossFunction(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX)
.nIn(100)
.nOut(10)
.build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(config);
net.init();
List<long[]> shapes = Arrays.asList(
new long[]{64,1,368,50},
new long[]{64,1,512,50},
new long[]{64,1,512,50},
new long[]{64,1,512,50},
new long[]{64,1,512,50},
new long[]{64,1,512,50},
new long[]{64,1,368,50},
new long[]{64,1,443,50},
new long[]{64,1,436,50},
new long[]{64,1,469,50},
new long[]{64,1,376,50},
new long[]{64,1,403,50},
new long[]{64,1,350,50},
new long[]{64,1,419,50},
new long[]{64,1,441,50},
new long[]{64,1,512,50},
new long[]{64,1,402,50});
List<DataSet> l = new ArrayList<>();
for( long[] s : shapes){
l.add(new DataSet(Nd4j.rand(DataType.FLOAT, s), TestUtils.randomOneHot(s[0], 10).castTo(DataType.FLOAT)));
}
net.setListeners(new ScoreIterationListener(1));
for( int i=0; i<100; i++ ){
net.fit(new ExistingDataSetIterator(l));
System.out.println("EPOCH: " + i);
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment