Skip to content

Instantly share code, notes, and snippets.

bikashg

Block or report user

Report or block bikashg

Hide content and notifications from this user.

Learn more about blocking users

Contact Support about this user’s behavior.

Learn more about reporting abuse

Report abuse
View GitHub Profile
View gist:08481dbe322520f56e324c58ad9d0e16
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>bikash</groupId>
<artifactId>MYExamples</artifactId>
<version>0.1-SNAPSHOT</version>
View gist:434f6d63441ecb352792689bed29ccb8
ArrayList<Thread> myThreads = new ArrayList<Thread>();
for each epoch {
net.fit(trainSplitDataSetIterator);
Thread t = new Thread(new MYClass(net)); // The MYClass will use net to calculate some scores; it doesn't modify/update net
myThreads.add(t);
t.start();
}
for (Thread t:myThreads) {
View gist:84ddeb0eb07b28a3b5961eaeb9d77e28
ArrayList<Thread> myThreads = new ArrayList<Thread>();
for each epoch {
net.fit(trainSplitDataSetIterator);
Thread t = new Thread(new MYClass(net)); // The MYClass will use net to calculate some scores; it doesn't modify/update net
myThreads.add(t);
t.start();
}
for (Thread t:myThreads) {
t.join();
View gist:0e1e4b15d4e815c04b7d1be79892d477
ArrayList<Thread> myThreads = new ArrayList<Thread>();
for each epoch {
net.fit(trainSplitDataSetIterator);
Thread t = new Thread(new MYClass(net)); // The MYClass will use net to calculate some scores; it doesn't modify/update net
myThreads.add(t);
t.start();
}
for (Thread t:myThreads) {
t.join();
View gist:115eef8c92e2643748a4b697ce09f91f
21:41:35.355 [Thread-261] WARN org.nd4j.jita.allocator.context.impl.LimitedContextPool - Can't allocate new context, sleeping...
21:41:38.984 [Thread-261] WARN org.nd4j.jita.allocator.context.impl.LimitedContextPool - Can't allocate new context, sleeping...
21:41:42.706 [Thread-261] WARN org.nd4j.jita.allocator.context.impl.LimitedContextPool - Can't allocate new context, sleeping...
21:41:46.377 [Thread-261] WARN org.nd4j.jita.allocator.context.impl.LimitedContextPool - Can't allocate new context, sleeping...
21:41:50.107 [Thread-261] WARN org.nd4j.jita.allocator.context.impl.LimitedContextPool - Can't allocate new context, sleeping...
21:41:53.820 [Thread-261] WARN org.nd4j.jita.allocator.context.impl.LimitedContextPool - Can't allocate new context, sleeping...
21:41:57.583 [Thread-261] WARN org.nd4j.jita.allocator.context.impl.LimitedContextPool - Can't allocate new context, sleeping...
21:42:01.345 [Thread-261] WARN org.nd4j.jita.allocator.context.impl.LimitedContextPool - Can't allocate new context, slee
View gist:c76436121e719269174f47cf20ffea89
public static void main(String[] args) throws IOException, InterruptedException {
int vocab_size = 14; // I set it manually here
String datasetBaseDir = "x_DataSet";
int numLinesToSkip = 1; // I used the first line in input/output files to place my own comment; so don't want to read it.
String fileDelimiter = ","; // All my input/output timesteps are single values for now.
int minCountExamples = 0;
int maxCountExamples = 1; // For now, saying that we have only 2 examples.
View gist:436bfa48a8803bbc25f87820b25c1833
MultiDataSetIterator iterator;
RecordReaderMultiDataSetIterator.Builder datasetBuilder = new RecordReaderMultiDataSetIterator.Builder(miniBatchSize)
.addReader("encoderInput", encoder_input_reader)
.addReader("decoderInput", decoder_input_Reader)
.addReader("decoderOutput", decoder_output_Reader);
datasetBuilder.addInputOneHot("encoderInput", 0, vocab_size);
datasetBuilder.addInputOneHot("decoderInput", 0, vocab_size);
datasetBuilder.addOutputOneHot("decoderOutput", 0, vocab_size);
View gist:132bd63011636d07719fbd6fd97ef534
ComputationGraphConfiguration configuration = new NeuralNetConfiguration.Builder()
.weightInit(WeightInit.XAVIER)
.learningRate(0.25)
.updater(Updater.RMSPROP)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1) // Nb. of iterations = 1
.seed(123)
.graphBuilder()
//These are the names of the two inputs to the computation graph.
.addInputs("nlPhrase", "owlAxiom")
.setInputTypes(InputType.recurrent(vocab.size()), InputType.recurrent(vocab.size())) // The size of the vocab.
View gist:2edda31ee73249269e50fd2f952c21a7
Exception in thread "main" java.lang.IllegalStateException: Cannot do backward pass: all epsilons not set
at org.deeplearning4j.nn.graph.vertex.impl.LayerVertex.doBackward(LayerVertex.java:98)
at org.deeplearning4j.nn.graph.ComputationGraph.calcBackpropGradients(ComputationGraph.java:1203)
at org.deeplearning4j.nn.graph.ComputationGraph.computeGradientAndScore(ComputationGraph.java:969)
at org.deeplearning4j.optimize.solvers.BaseOptimizer.gradientAndScore(BaseOptimizer.java:151)
at org.deeplearning4j.optimize.solvers.StochasticGradientDescent.optimize(StochasticGradientDescent.java:54)
at org.deeplearning4j.optimize.Solver.optimize(Solver.java:51)
at org.deeplearning4j.nn.graph.ComputationGraph.fit(ComputationGraph.java:833)
at org.deeplearning4j.nn.graph.ComputationGraph.fit(ComputationGraph.java:740)
at My_Examples.RNN.Seq2Seq.LSTM.NL2OWL.main(NL2OWL.java:212)
View gist:19ccbac2e03bc0c7e3239fbd2d716fc1
Running for epochs : 0
09:11:00.135 [main] INFO org.nd4j.nativeblas.Nd4jBlas - Number of threads used for BLAS: 4
09:11:01.558 [main] INFO o.d.o.l.ScoreIterationListener - Score at iteration 0 is 72.24593505859374
09:11:02.996 [main] INFO o.d.o.l.ScoreIterationListener - Score at iteration 1 is 13.998971557617187
09:11:03.688 [main] INFO o.d.o.l.ScoreIterationListener - Score at iteration 2 is 1.2472177505493165
Running for epochs : 1
09:11:05.214 [main] INFO o.d.o.l.ScoreIterationListener - Score at iteration 3 is 71.99242553710937
09:11:06.477 [main] INFO o.d.o.l.ScoreIterationListener - Score at iteration 4 is 13.928001403808594
09:11:07.151 [main] INFO o.d.o.l.ScoreIterationListener - Score at iteration 5 is 1.23978271484375
Running for epochs : 2
You can’t perform that action at this time.