Skip to content

Instantly share code, notes, and snippets.

@dan-lind
Last active August 7, 2016 19:59
Show Gist options
  • Save dan-lind/4ed924a90b7841ac26d85c95d5b88826 to your computer and use it in GitHub Desktop.
Save dan-lind/4ed924a90b7841ac26d85c95d5b88826 to your computer and use it in GitHub Desktop.
package kaggle;
import org.datavec.api.records.reader.RecordReader;
import org.datavec.api.records.reader.impl.csv.CSVRecordReader;
import org.datavec.api.split.FileSplit;
import org.deeplearning4j.arbiter.DL4JConfiguration;
import org.deeplearning4j.arbiter.MultiLayerSpace;
import org.deeplearning4j.arbiter.data.DataSetIteratorProvider;
import org.deeplearning4j.arbiter.layers.DenseLayerSpace;
import org.deeplearning4j.arbiter.layers.OutputLayerSpace;
import org.deeplearning4j.arbiter.optimize.api.CandidateGenerator;
import org.deeplearning4j.arbiter.optimize.api.OptimizationResult;
import org.deeplearning4j.arbiter.optimize.api.ParameterSpace;
import org.deeplearning4j.arbiter.optimize.api.data.DataProvider;
import org.deeplearning4j.arbiter.optimize.api.saving.ResultReference;
import org.deeplearning4j.arbiter.optimize.api.saving.ResultSaver;
import org.deeplearning4j.arbiter.optimize.api.score.ScoreFunction;
import org.deeplearning4j.arbiter.optimize.api.termination.MaxCandidatesCondition;
import org.deeplearning4j.arbiter.optimize.api.termination.MaxTimeCondition;
import org.deeplearning4j.arbiter.optimize.api.termination.TerminationCondition;
import org.deeplearning4j.arbiter.optimize.candidategenerator.RandomSearchGenerator;
import org.deeplearning4j.arbiter.optimize.config.OptimizationConfiguration;
import org.deeplearning4j.arbiter.optimize.parameter.continuous.ContinuousParameterSpace;
import org.deeplearning4j.arbiter.optimize.parameter.integer.IntegerParameterSpace;
import org.deeplearning4j.arbiter.optimize.runner.IOptimizationRunner;
import org.deeplearning4j.arbiter.optimize.runner.LocalOptimizationRunner;
import org.deeplearning4j.arbiter.optimize.runner.listener.runner.LoggingOptimizationRunnerStatusListener;
import org.deeplearning4j.arbiter.optimize.ui.ArbiterUIServer;
import org.deeplearning4j.arbiter.optimize.ui.listener.UIOptimizationRunnerStatusListener;
import org.deeplearning4j.arbiter.saver.local.multilayer.LocalMultiLayerNetworkSaver;
import org.deeplearning4j.arbiter.scoring.multilayer.TestSetAccuracyScoreFunction;
import org.deeplearning4j.arbiter.task.MultiLayerNetworkTaskCreator;
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
import org.deeplearning4j.datasets.iterator.IteratorDataSetIterator;
import org.deeplearning4j.datasets.iterator.MultipleEpochsIterator;
import org.deeplearning4j.datasets.iterator.impl.ListDataSetIterator;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.SplitTestAndTrain;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.dataset.api.iterator.TestDataSetIterator;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import java.io.File;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Created by danlin on 2016-07-15.
*/
public class TitanicOptimization {
/**
* This is a basic hyperparameter optimization example using Arbiter to conduct random search on two network hyperparameters.
* The two hyperparameters are learning rate and layer size, and the search is conducted for a simple multi-layer perceptron
* on MNIST data.
*
* Note that this example has a UI, but it (currently) does not start automatically.
* By default, the UI is accessible at http://localhost:8080/arbiter
*
* @author Alex Black
*/
public static void main(String[] args) throws Exception {
ParameterSpace<Double> learningRateHyperparam = new ContinuousParameterSpace(0.01, 0.1); //Values will be generated uniformly at random between 0.0001 and 0.1 (inclusive)
// ParameterSpace<Integer> layerSizeHyperparam = new IntegerParameterSpace(2,12); //Integer values will be generated uniformly at random between 16 and 256 (inclusive)
// ParameterSpace<Integer> layerSizeHyperparam2 = new IntegerParameterSpace(2,12); //Integer values will be generated uniformly at random between 16 and 256 (inclusive)
MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder()
//These next few options: fixed values for all models
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.iterations(1)
.regularization(true)
.l2(1e-4)
.weightInit(WeightInit.XAVIER)
.activation("relu")
//Learning rate: this is something we want to test different values for
.learningRate(learningRateHyperparam)
.addLayer( new DenseLayerSpace.Builder()
.nIn(12) //Fixed input: 12
.nOut(8)
.build())
.addLayer( new DenseLayerSpace.Builder()
.nIn(8)
.nOut(4)
.build())
.addLayer( new OutputLayerSpace.Builder()
.nIn(4)
.nOut(2)
.activation("softmax")
.lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.build())
.pretrain(false).backprop(true).build();
//Now: We need to define a few configuration options
// (a) How are we going to generate candidates? (random search or grid search)
CandidateGenerator<DL4JConfiguration> candidateGenerator = new RandomSearchGenerator<>(hyperparameterSpace); //Alternatively: new GridSearchCandidateGenerator<>(hyperparameterSpace, 5, GridSearchCandidateGenerator.Mode.RandomOrder);
// (b) How are going to provide data? For now, we'll use a simple built-in data provider for DataSetIterators
RecordReader recordReader = new CSVRecordReader(0,",");
recordReader.initialize(new FileSplit(new File("/Users/danlin/git/dl4j-lab/dl4j/src/main/resources/updated/train0.csv")));
//Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network
int labelIndex = 0; //5 values in each row of the iris.txt CSV: 4 input features followed by an integer label (class) index. Labels are the 5th value (index 4) in each row
int numClasses = 2; //2 classes (types of iris flowers) in the iris data set. Classes have integer values 0, 1 or 2
int batchSize = 891; //Iris data set: 150 examples total. We are loading all of them into one DataSet (not recommended for large data sets)
DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader,batchSize,labelIndex,numClasses);
DataSet allData = iterator.next();
allData.shuffle();
SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.65); //Use 65% of data for training
DataSetIterator titanicTrain = new MultipleEpochsIterator(5, new ListDataSetIterator(testAndTrain.getTrain().asList()));
DataSetIterator titanicTest = new ListDataSetIterator(testAndTrain.getTest().asList());
DataProvider<DataSetIterator> dataProvider = new DataSetIteratorProvider(titanicTrain, titanicTest);
// (c) How we are going to save the models that are generated and tested?
// In this example, let's save them to disk the working directory
// This will result in examples being saved to arbiterExample/0/, arbiterExample/1/, arbiterExample/2/, ...
String baseSaveDirectory = "arbiterExample/";
File f = new File(baseSaveDirectory);
if(f.exists()) f.delete();
f.mkdir();
ResultSaver<DL4JConfiguration,MultiLayerNetwork,Object> modelSaver = new LocalMultiLayerNetworkSaver<>(baseSaveDirectory);
// (d) What are we actually trying to optimize?
// In this example, let's use classification accuracy on the test set
ScoreFunction<MultiLayerNetwork,DataSetIterator> scoreFunction = new TestSetAccuracyScoreFunction();
// (e) When should we stop searching? Specify this with termination conditions
// For this example, we are stopping the search at 15 minutes or 20 candidates - whichever comes first
TerminationCondition[] terminationConditions = {new MaxTimeCondition(15, TimeUnit.MINUTES), new MaxCandidatesCondition(30)};
//Given these configuration options, let's put them all together:
OptimizationConfiguration<DL4JConfiguration, MultiLayerNetwork, DataSetIterator, Object> configuration
= new OptimizationConfiguration.Builder<DL4JConfiguration, MultiLayerNetwork, DataSetIterator, Object>()
.candidateGenerator(candidateGenerator)
.dataProvider(dataProvider)
.modelSaver(modelSaver)
.scoreFunction(scoreFunction)
.terminationConditions(terminationConditions)
.build();
//And set up execution locally on this machine:
IOptimizationRunner<DL4JConfiguration,MultiLayerNetwork,Object> runner
= new LocalOptimizationRunner<>(configuration, new MultiLayerNetworkTaskCreator<>());
//Start the UI
ArbiterUIServer server = ArbiterUIServer.getInstance();
runner.addListeners(new UIOptimizationRunnerStatusListener(server));
//Start the hyperparameter optimization
runner.execute();
//Print out some basic stats regarding the optimization procedure
StringBuilder sb = new StringBuilder();
sb.append("Best score: ").append(runner.bestScore()).append("\n")
.append("Index of model with best score: ").append(runner.bestScoreCandidateIndex()).append("\n")
.append("Number of configurations evaluated: ").append(runner.numCandidatesCompleted()).append("\n");
System.out.println(sb.toString());
//Get all results, and print out details of the best result:
int indexOfBestResult = runner.bestScoreCandidateIndex();
List<ResultReference<DL4JConfiguration,MultiLayerNetwork,Object>> allResults = runner.getResults();
OptimizationResult<DL4JConfiguration,MultiLayerNetwork,Object> bestResult = allResults.get(indexOfBestResult).getResult();
MultiLayerNetwork bestModel = bestResult.getResult();
System.out.println("\n\nConfiguration of best model:\n");
System.out.println(bestModel.getLayerWiseConfigurations().toJson());
//Note: UI server will shut down once execution is complete, as JVM will exit
//So do a Thread.sleep(1 minute) to keep JVM alive, so that network configurations can be viewed
Thread.sleep(60000);
System.exit(0);
}
}
0,0,0,1,1,0,22,1,0,7.25,0,0,1
1,1,0,0,0,1,38,1,0,71.2833,1,0,0
1,0,0,1,0,1,26,0,0,7.925,0,0,1
1,1,0,0,0,1,35,1,0,53.1,0,0,1
0,0,0,1,1,0,35,0,0,8.05,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,8.4583,0,1,0
0,1,0,0,1,0,54,0,0,51.8625,0,0,1
0,0,0,1,1,0,2,3,1,21.075,0,0,1
1,0,0,1,0,1,27,0,2,11.1333,0,0,1
1,0,1,0,0,1,14,1,0,30.0708,1,0,0
1,0,0,1,0,1,4,1,1,16.7,0,0,1
1,1,0,0,0,1,58,0,0,26.55,0,0,1
0,0,0,1,1,0,20,0,0,8.05,0,0,1
0,0,0,1,1,0,39,1,5,31.275,0,0,1
0,0,0,1,0,1,14,0,0,7.8542,0,0,1
1,0,1,0,0,1,55,0,0,16,0,0,1
0,0,0,1,1,0,2,4,1,29.125,0,1,0
1,0,1,0,1,0,29.69911764705882,0,0,13,0,0,1
0,0,0,1,0,1,31,1,0,18,0,0,1
1,0,0,1,0,1,29.69911764705882,0,0,7.225,1,0,0
0,0,1,0,1,0,35,0,0,26,0,0,1
1,0,1,0,1,0,34,0,0,13,0,0,1
1,0,0,1,0,1,15,0,0,8.0292,0,1,0
1,1,0,0,1,0,28,0,0,35.5,0,0,1
0,0,0,1,0,1,8,3,1,21.075,0,0,1
1,0,0,1,0,1,38,1,5,31.3875,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.225,1,0,0
0,1,0,0,1,0,19,3,2,263,0,0,1
1,0,0,1,0,1,29.69911764705882,0,0,7.8792,0,1,0
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
0,1,0,0,1,0,40,0,0,27.7208,1,0,0
1,1,0,0,0,1,29.69911764705882,1,0,146.5208,1,0,0
1,0,0,1,0,1,29.69911764705882,0,0,7.75,0,1,0
0,0,1,0,1,0,66,0,0,10.5,0,0,1
0,1,0,0,1,0,28,1,0,82.1708,1,0,0
0,1,0,0,1,0,42,1,0,52,0,0,1
1,0,0,1,1,0,29.69911764705882,0,0,7.2292,1,0,0
0,0,0,1,1,0,21,0,0,8.05,0,0,1
0,0,0,1,0,1,18,2,0,18,0,0,1
1,0,0,1,0,1,14,1,0,11.2417,1,0,0
0,0,0,1,0,1,40,1,0,9.475,0,0,1
0,0,1,0,0,1,27,1,0,21,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,1,0,0
1,0,1,0,0,1,3,1,2,41.5792,1,0,0
1,0,0,1,0,1,19,0,0,7.8792,0,1,0
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
0,0,0,1,1,0,29.69911764705882,1,0,15.5,0,1,0
1,0,0,1,0,1,29.69911764705882,0,0,7.75,0,1,0
0,0,0,1,1,0,29.69911764705882,2,0,21.6792,1,0,0
0,0,0,1,0,1,18,1,0,17.8,0,0,1
0,0,0,1,1,0,7,4,1,39.6875,0,0,1
0,0,0,1,1,0,21,0,0,7.8,0,0,1
1,1,0,0,0,1,49,1,0,76.7292,1,0,0
1,0,1,0,0,1,29,1,0,26,0,0,1
0,1,0,0,1,0,65,0,1,61.9792,1,0,0
1,1,0,0,1,0,29.69911764705882,0,0,35.5,0,0,1
1,0,1,0,0,1,21,0,0,10.5,0,0,1
0,0,0,1,1,0,28.5,0,0,7.2292,1,0,0
1,0,1,0,0,1,5,1,2,27.75,0,0,1
0,0,0,1,1,0,11,5,2,46.9,0,0,1
0,0,0,1,1,0,22,0,0,7.2292,1,0,0
1,1,0,0,0,1,38,0,0,80,0,0,1
0,1,0,0,1,0,45,1,0,83.475,0,0,1
0,0,0,1,1,0,4,3,2,27.9,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,27.7208,1,0,0
1,0,0,1,1,0,29.69911764705882,1,1,15.2458,1,0,0
1,0,1,0,0,1,29,0,0,10.5,0,0,1
0,0,0,1,1,0,19,0,0,8.1583,0,0,1
1,0,0,1,0,1,17,4,2,7.925,0,0,1
0,0,0,1,1,0,26,2,0,8.6625,0,0,1
0,0,1,0,1,0,32,0,0,10.5,0,0,1
0,0,0,1,0,1,16,5,2,46.9,0,0,1
0,0,1,0,1,0,21,0,0,73.5,0,0,1
0,0,0,1,1,0,26,1,0,14.4542,1,0,0
1,0,0,1,1,0,32,0,0,56.4958,0,0,1
0,0,0,1,1,0,25,0,0,7.65,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
1,0,1,0,1,0,0.83,0,2,29,0,0,1
1,0,0,1,0,1,30,0,0,12.475,0,0,1
0,0,0,1,1,0,22,0,0,9,0,0,1
1,0,0,1,1,0,29,0,0,9.5,0,0,1
1,0,0,1,0,1,29.69911764705882,0,0,7.7875,0,1,0
0,1,0,0,1,0,28,0,0,47.1,0,0,1
1,0,1,0,0,1,17,0,0,10.5,0,0,1
1,0,0,1,0,1,33,3,0,15.85,0,0,1
0,0,0,1,1,0,16,1,3,34.375,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
1,1,0,0,0,1,23,3,2,263,0,0,1
0,0,0,1,1,0,24,0,0,8.05,0,0,1
0,0,0,1,1,0,29,0,0,8.05,0,0,1
0,0,0,1,1,0,20,0,0,7.8542,0,0,1
0,1,0,0,1,0,46,1,0,61.175,0,0,1
0,0,0,1,1,0,26,1,2,20.575,0,0,1
0,0,0,1,1,0,59,0,0,7.25,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
0,1,0,0,1,0,71,0,0,34.6542,1,0,0
1,1,0,0,1,0,23,0,1,63.3583,1,0,0
1,0,1,0,0,1,34,0,1,23,0,0,1
0,0,1,0,1,0,34,1,0,26,0,0,1
0,0,0,1,0,1,28,0,0,7.8958,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
0,1,0,0,1,0,21,0,1,77.2875,0,0,1
0,0,0,1,1,0,33,0,0,8.6542,0,0,1
0,0,0,1,1,0,37,2,0,7.925,0,0,1
0,0,0,1,1,0,28,0,0,7.8958,0,0,1
1,0,0,1,0,1,21,0,0,7.65,0,0,1
1,0,0,1,1,0,29.69911764705882,0,0,7.775,0,0,1
0,0,0,1,1,0,38,0,0,7.8958,0,0,1
1,0,0,1,0,1,29.69911764705882,1,0,24.15,0,1,0
0,1,0,0,1,0,47,0,0,52,0,0,1
0,0,0,1,0,1,14.5,1,0,14.4542,1,0,0
0,0,0,1,1,0,22,0,0,8.05,0,0,1
0,0,0,1,0,1,20,1,0,9.825,0,0,1
0,0,0,1,0,1,17,0,0,14.4583,1,0,0
0,0,0,1,1,0,21,0,0,7.925,0,0,1
0,0,0,1,1,0,70.5,0,0,7.75,0,1,0
0,0,1,0,1,0,29,1,0,21,0,0,1
0,1,0,0,1,0,24,0,1,247.5208,1,0,0
0,0,0,1,0,1,2,4,2,31.275,0,0,1
0,0,1,0,1,0,21,2,0,73.5,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
0,0,1,0,1,0,32.5,1,0,30.0708,1,0,0
1,0,1,0,0,1,32.5,0,0,13,0,0,1
0,1,0,0,1,0,54,0,1,77.2875,0,0,1
1,0,0,1,1,0,12,1,0,11.2417,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,7.75,0,1,0
1,0,0,1,1,0,24,0,0,7.1417,0,0,1
1,0,0,1,0,1,29.69911764705882,1,1,22.3583,1,0,0
0,0,0,1,1,0,45,0,0,6.975,0,0,1
0,0,0,1,1,0,33,0,0,7.8958,1,0,0
0,0,0,1,1,0,20,0,0,7.05,0,0,1
0,0,0,1,0,1,47,1,0,14.5,0,0,1
1,0,1,0,0,1,29,1,0,26,0,0,1
0,0,1,0,1,0,25,0,0,13,0,0,1
0,0,1,0,1,0,23,0,0,15.0458,1,0,0
1,1,0,0,0,1,19,0,2,26.2833,0,0,1
0,1,0,0,1,0,37,1,0,53.1,0,0,1
0,0,0,1,1,0,16,0,0,9.2167,0,0,1
0,1,0,0,1,0,24,0,0,79.2,1,0,0
0,0,0,1,0,1,29.69911764705882,0,2,15.2458,1,0,0
1,0,0,1,0,1,22,0,0,7.75,0,0,1
1,0,0,1,0,1,24,1,0,15.85,0,0,1
0,0,0,1,1,0,19,0,0,6.75,0,1,0
0,0,1,0,1,0,18,0,0,11.5,0,0,1
0,0,1,0,1,0,19,1,1,36.75,0,0,1
1,0,0,1,1,0,27,0,0,7.7958,0,0,1
0,0,0,1,0,1,9,2,2,34.375,0,0,1
0,0,1,0,1,0,36.5,0,2,26,0,0,1
0,0,1,0,1,0,42,0,0,13,0,0,1
0,0,1,0,1,0,51,0,0,12.525,0,0,1
1,1,0,0,0,1,22,1,0,66.6,0,0,1
0,0,0,1,1,0,55.5,0,0,8.05,0,0,1
0,0,0,1,1,0,40.5,0,2,14.5,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.3125,0,0,1
0,1,0,0,1,0,51,0,1,61.3792,1,0,0
1,0,0,1,0,1,16,0,0,7.7333,0,1,0
0,0,0,1,1,0,30,0,0,8.05,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,8.6625,0,0,1
0,0,0,1,1,0,29.69911764705882,8,2,69.55,0,0,1
0,0,0,1,1,0,44,0,1,16.1,0,0,1
1,0,1,0,0,1,40,0,0,15.75,0,0,1
0,0,0,1,1,0,26,0,0,7.775,0,0,1
0,0,0,1,1,0,17,0,0,8.6625,0,0,1
0,0,0,1,1,0,1,4,1,39.6875,0,0,1
1,0,0,1,1,0,9,0,2,20.525,0,0,1
1,1,0,0,0,1,29.69911764705882,0,1,55,0,0,1
0,0,0,1,0,1,45,1,4,27.9,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,25.925,0,0,1
0,0,0,1,1,0,28,0,0,56.4958,0,0,1
0,1,0,0,1,0,61,0,0,33.5,0,0,1
0,0,0,1,1,0,4,4,1,29.125,0,1,0
1,0,0,1,0,1,1,1,1,11.1333,0,0,1
0,0,0,1,1,0,21,0,0,7.925,0,0,1
0,1,0,0,1,0,56,0,0,30.6958,1,0,0
0,0,0,1,1,0,18,1,1,7.8542,0,0,1
0,0,0,1,1,0,29.69911764705882,3,1,25.4667,0,0,1
0,1,0,0,0,1,50,0,0,28.7125,1,0,0
0,0,1,0,1,0,30,0,0,13,0,0,1
0,0,0,1,1,0,36,0,0,0,0,0,1
0,0,0,1,0,1,29.69911764705882,8,2,69.55,0,0,1
0,0,1,0,1,0,29.69911764705882,0,0,15.05,1,0,0
0,0,0,1,1,0,9,4,2,31.3875,0,0,1
1,0,1,0,1,0,1,2,1,39,0,0,1
1,0,0,1,0,1,4,0,2,22.025,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,50,0,0,1
1,0,0,1,0,1,29.69911764705882,1,0,15.5,0,1,0
1,1,0,0,1,0,45,0,0,26.55,0,0,1
0,0,0,1,1,0,40,1,1,15.5,0,1,0
0,0,0,1,1,0,36,0,0,7.8958,0,0,1
1,0,1,0,0,1,32,0,0,13,0,0,1
0,0,1,0,1,0,19,0,0,13,0,0,1
1,0,0,1,0,1,19,1,0,7.8542,0,0,1
1,0,1,0,1,0,3,1,1,26,0,0,1
1,1,0,0,0,1,44,0,0,27.7208,1,0,0
1,1,0,0,0,1,58,0,0,146.5208,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,7.75,0,1,0
0,0,0,1,1,0,42,0,1,8.4042,0,0,1
1,0,0,1,0,1,29.69911764705882,0,0,7.75,0,1,0
0,0,1,0,0,1,24,0,0,13,0,0,1
0,0,0,1,1,0,28,0,0,9.5,0,0,1
0,0,0,1,1,0,29.69911764705882,8,2,69.55,0,0,1
0,0,0,1,1,0,34,0,0,6.4958,0,0,1
0,0,0,1,1,0,45.5,0,0,7.225,1,0,0
1,0,0,1,1,0,18,0,0,8.05,0,0,1
0,0,0,1,0,1,2,0,1,10.4625,0,0,1
0,0,0,1,1,0,32,1,0,15.85,0,0,1
1,0,0,1,1,0,26,0,0,18.7875,1,0,0
1,0,0,1,0,1,16,0,0,7.75,0,1,0
1,1,0,0,1,0,40,0,0,31,1,0,0
0,0,0,1,1,0,24,0,0,7.05,0,0,1
1,0,1,0,0,1,35,0,0,21,0,0,1
0,0,0,1,1,0,22,0,0,7.25,0,0,1
0,0,1,0,1,0,30,0,0,13,0,0,1
0,0,0,1,1,0,29.69911764705882,1,0,7.75,0,1,0
1,1,0,0,0,1,31,1,0,113.275,1,0,0
1,0,0,1,0,1,27,0,0,7.925,0,0,1
0,0,1,0,1,0,42,1,0,27,0,0,1
1,1,0,0,0,1,32,0,0,76.2917,1,0,0
0,0,1,0,1,0,30,0,0,10.5,0,0,1
1,0,0,1,1,0,16,0,0,8.05,0,0,1
0,0,1,0,1,0,27,0,0,13,0,0,1
0,0,0,1,1,0,51,0,0,8.05,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
1,1,0,0,1,0,38,1,0,90,0,0,1
0,0,0,1,1,0,22,0,0,9.35,0,0,1
1,0,1,0,1,0,19,0,0,10.5,0,0,1
0,0,0,1,1,0,20.5,0,0,7.25,0,0,1
0,0,1,0,1,0,18,0,0,13,0,0,1
0,0,0,1,0,1,29.69911764705882,3,1,25.4667,0,0,1
1,1,0,0,0,1,35,1,0,83.475,0,0,1
0,0,0,1,1,0,29,0,0,7.775,0,0,1
0,0,1,0,1,0,59,0,0,13.5,0,0,1
1,0,0,1,0,1,5,4,2,31.3875,0,0,1
0,0,1,0,1,0,24,0,0,10.5,0,0,1
0,0,0,1,0,1,29.69911764705882,0,0,7.55,0,0,1
0,0,1,0,1,0,44,1,0,26,0,0,1
1,0,1,0,0,1,8,0,2,26.25,0,0,1
0,0,1,0,1,0,19,0,0,10.5,0,0,1
0,0,1,0,1,0,33,0,0,12.275,0,0,1
0,0,0,1,0,1,29.69911764705882,1,0,14.4542,1,0,0
1,0,0,1,0,1,29.69911764705882,1,0,15.5,0,1,0
0,0,1,0,1,0,29,0,0,10.5,0,0,1
0,0,0,1,1,0,22,0,0,7.125,0,0,1
0,0,0,1,1,0,30,0,0,7.225,1,0,0
0,1,0,0,1,0,44,2,0,90,0,1,0
0,0,0,1,0,1,25,0,0,7.775,0,0,1
1,0,1,0,0,1,24,0,2,14.5,0,0,1
1,1,0,0,1,0,37,1,1,52.5542,0,0,1
0,0,1,0,1,0,54,1,0,26,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.25,0,0,1
0,0,0,1,0,1,29,1,1,10.4625,0,0,1
0,1,0,0,1,0,62,0,0,26.55,0,0,1
0,0,0,1,1,0,30,1,0,16.1,0,0,1
0,0,0,1,0,1,41,0,2,20.2125,0,0,1
1,0,0,1,0,1,29,0,2,15.2458,1,0,0
1,1,0,0,0,1,29.69911764705882,0,0,79.2,1,0,0
1,1,0,0,0,1,30,0,0,86.5,0,0,1
1,1,0,0,0,1,35,0,0,512.3292,1,0,0
1,0,1,0,0,1,50,0,1,26,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.75,0,1,0
1,0,0,1,1,0,3,4,2,31.3875,0,0,1
0,1,0,0,1,0,52,1,1,79.65,0,0,1
0,1,0,0,1,0,40,0,0,0,0,0,1
0,0,0,1,0,1,29.69911764705882,0,0,7.75,0,1,0
0,0,1,0,1,0,36,0,0,10.5,0,0,1
0,0,0,1,1,0,16,4,1,39.6875,0,0,1
1,0,0,1,1,0,25,1,0,7.775,0,0,1
1,1,0,0,0,1,58,0,1,153.4625,0,0,1
1,1,0,0,0,1,35,0,0,135.6333,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,31,0,0,1
1,0,0,1,1,0,25,0,0,0,0,0,1
1,0,1,0,0,1,41,0,1,19.5,0,0,1
0,1,0,0,1,0,37,0,1,29.7,1,0,0
1,0,0,1,0,1,29.69911764705882,0,0,7.75,0,1,0
1,1,0,0,0,1,63,1,0,77.9583,0,0,1
0,0,0,1,0,1,45,0,0,7.75,0,0,1
0,0,1,0,1,0,29.69911764705882,0,0,0,0,0,1
0,0,0,1,1,0,7,4,1,29.125,0,1,0
1,0,0,1,0,1,35,1,1,20.25,0,0,1
0,0,0,1,1,0,65,0,0,7.75,0,1,0
0,0,0,1,1,0,28,0,0,7.8542,0,0,1
0,0,0,1,1,0,16,0,0,9.5,0,0,1
1,0,0,1,1,0,19,0,0,8.05,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,26,0,0,1
0,0,0,1,1,0,33,0,0,8.6625,1,0,0
1,0,0,1,1,0,30,0,0,9.5,0,0,1
0,0,0,1,1,0,22,0,0,7.8958,0,0,1
1,0,1,0,1,0,42,0,0,13,0,0,1
1,0,0,1,0,1,22,0,0,7.75,0,1,0
1,1,0,0,0,1,26,0,0,78.85,0,0,1
1,1,0,0,0,1,19,1,0,91.0792,1,0,0
0,0,1,0,1,0,36,0,0,12.875,1,0,0
0,0,0,1,0,1,24,0,0,8.85,0,0,1
0,0,0,1,1,0,24,0,0,7.8958,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,27.7208,1,0,0
0,0,0,1,1,0,23.5,0,0,7.2292,1,0,0
0,1,0,0,0,1,2,1,2,151.55,0,0,1
1,1,0,0,1,0,29.69911764705882,0,0,30.5,0,0,1
1,1,0,0,0,1,50,0,1,247.5208,1,0,0
1,0,0,1,0,1,29.69911764705882,0,0,7.75,0,1,0
1,0,0,1,1,0,29.69911764705882,2,0,23.25,0,1,0
0,0,0,1,1,0,19,0,0,0,0,0,1
1,0,1,0,0,1,29.69911764705882,0,0,12.35,0,1,0
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
1,1,0,0,1,0,0.92,1,2,151.55,0,0,1
1,1,0,0,0,1,29.69911764705882,0,0,110.8833,1,0,0
1,1,0,0,0,1,17,1,0,108.9,1,0,0
0,0,1,0,1,0,30,1,0,24,1,0,0
1,1,0,0,0,1,30,0,0,56.9292,1,0,0
1,1,0,0,0,1,24,0,0,83.1583,1,0,0
1,1,0,0,0,1,18,2,2,262.375,1,0,0
0,0,1,0,0,1,26,1,1,26,0,0,1
0,0,0,1,1,0,28,0,0,7.8958,0,0,1
0,0,1,0,1,0,43,1,1,26.25,0,0,1
1,0,0,1,0,1,26,0,0,7.8542,0,0,1
1,0,1,0,0,1,24,1,0,26,0,0,1
0,0,1,0,1,0,54,0,0,14,0,0,1
1,1,0,0,0,1,31,0,2,164.8667,0,0,1
1,1,0,0,0,1,40,1,1,134.5,1,0,0
0,0,0,1,1,0,22,0,0,7.25,0,0,1
0,0,0,1,1,0,27,0,0,7.8958,0,0,1
1,0,1,0,0,1,30,0,0,12.35,0,1,0
1,0,1,0,0,1,22,1,1,29,0,0,1
0,0,0,1,1,0,29.69911764705882,8,2,69.55,0,0,1
1,1,0,0,0,1,36,0,0,135.6333,1,0,0
0,0,0,1,1,0,61,0,0,6.2375,0,0,1
1,0,1,0,0,1,36,0,0,13,0,0,1
1,0,0,1,0,1,31,1,1,20.525,0,0,1
1,1,0,0,0,1,16,0,1,57.9792,1,0,0
1,0,0,1,0,1,29.69911764705882,2,0,23.25,0,1,0
0,1,0,0,1,0,45.5,0,0,28.5,0,0,1
0,1,0,0,1,0,38,0,1,153.4625,0,0,1
0,0,0,1,1,0,16,2,0,18,0,0,1
1,1,0,0,0,1,29.69911764705882,1,0,133.65,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
0,1,0,0,1,0,29,1,0,66.6,0,0,1
1,1,0,0,0,1,41,0,0,134.5,1,0,0
1,0,0,1,1,0,45,0,0,8.05,0,0,1
0,1,0,0,1,0,45,0,0,35.5,0,0,1
1,0,1,0,1,0,2,1,1,26,0,0,1
1,1,0,0,0,1,24,3,2,263,0,0,1
0,0,1,0,1,0,28,0,0,13,0,0,1
0,0,1,0,1,0,25,0,0,13,0,0,1
0,0,1,0,1,0,36,0,0,13,0,0,1
1,0,1,0,0,1,24,0,0,13,0,0,1
1,0,1,0,0,1,40,0,0,13,0,0,1
1,0,0,1,0,1,29.69911764705882,1,0,16.1,0,0,1
1,0,0,1,1,0,3,1,1,15.9,0,0,1
0,0,0,1,1,0,42,0,0,8.6625,0,0,1
0,0,0,1,1,0,23,0,0,9.225,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,35,0,0,1
0,0,0,1,1,0,15,1,1,7.2292,1,0,0
0,0,0,1,1,0,25,1,0,17.8,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.225,1,0,0
0,0,0,1,1,0,28,0,0,9.5,0,0,1
1,1,0,0,0,1,22,0,1,55,0,0,1
0,0,1,0,0,1,38,0,0,13,0,0,1
1,0,0,1,0,1,29.69911764705882,0,0,7.8792,0,1,0
1,0,0,1,0,1,29.69911764705882,0,0,7.8792,0,1,0
0,0,0,1,1,0,40,1,4,27.9,0,0,1
0,0,1,0,1,0,29,1,0,27.7208,1,0,0
0,0,0,1,0,1,45,0,1,14.4542,1,0,0
0,0,0,1,1,0,35,0,0,7.05,0,0,1
0,0,0,1,1,0,29.69911764705882,1,0,15.5,0,1,0
0,0,0,1,1,0,30,0,0,7.25,0,0,1
1,1,0,0,0,1,60,1,0,75.25,1,0,0
1,0,0,1,0,1,29.69911764705882,0,0,7.2292,1,0,0
1,0,0,1,0,1,29.69911764705882,0,0,7.75,0,1,0
1,1,0,0,0,1,24,0,0,69.3,1,0,0
1,1,0,0,1,0,25,1,0,55.4417,1,0,0
0,0,0,1,1,0,18,1,0,6.4958,0,0,1
0,0,0,1,1,0,19,0,0,8.05,0,0,1
0,1,0,0,1,0,22,0,0,135.6333,1,0,0
0,0,0,1,0,1,3,3,1,21.075,0,0,1
1,1,0,0,0,1,29.69911764705882,1,0,82.1708,1,0,0
1,0,0,1,0,1,22,0,0,7.25,0,0,1
0,1,0,0,1,0,27,0,2,211.5,1,0,0
0,0,0,1,1,0,20,0,0,4.0125,1,0,0
0,0,0,1,1,0,19,0,0,7.775,0,0,1
1,1,0,0,0,1,42,0,0,227.525,1,0,0
1,0,0,1,0,1,1,0,2,15.7417,1,0,0
0,0,0,1,1,0,32,0,0,7.925,0,0,1
1,1,0,0,0,1,35,1,0,52,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
0,0,1,0,1,0,18,0,0,73.5,0,0,1
0,0,0,1,1,0,1,5,2,46.9,0,0,1
1,0,1,0,0,1,36,0,0,13,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.7292,0,1,0
1,0,1,0,0,1,17,0,0,12,1,0,0
1,1,0,0,1,0,36,1,2,120,0,0,1
1,0,0,1,1,0,21,0,0,7.7958,0,0,1
0,0,0,1,1,0,28,2,0,7.925,0,0,1
1,1,0,0,0,1,23,1,0,113.275,1,0,0
1,0,0,1,0,1,24,0,2,16.7,0,0,1
0,0,0,1,1,0,22,0,0,7.7958,0,0,1
0,0,0,1,0,1,31,0,0,7.8542,0,0,1
0,0,1,0,1,0,46,0,0,26,0,0,1
0,0,1,0,1,0,23,0,0,10.5,0,0,1
1,0,1,0,0,1,28,0,0,12.65,0,0,1
1,0,0,1,1,0,39,0,0,7.925,0,0,1
0,0,0,1,1,0,26,0,0,8.05,0,0,1
0,0,0,1,0,1,21,1,0,9.825,0,0,1
0,0,0,1,1,0,28,1,0,15.85,0,0,1
0,0,0,1,0,1,20,0,0,8.6625,0,0,1
0,0,1,0,1,0,34,1,0,21,0,0,1
0,0,0,1,1,0,51,0,0,7.75,0,0,1
1,0,1,0,1,0,3,1,1,18.75,0,0,1
0,0,0,1,1,0,21,0,0,7.775,0,0,1
0,0,0,1,0,1,29.69911764705882,3,1,25.4667,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,6.8583,0,1,0
1,1,0,0,0,1,33,1,0,90,0,1,0
0,0,1,0,1,0,29.69911764705882,0,0,0,0,0,1
1,0,0,1,1,0,44,0,0,7.925,0,0,1
0,0,0,1,0,1,29.69911764705882,0,0,8.05,0,0,1
1,0,1,0,0,1,34,1,1,32.5,0,0,1
1,0,1,0,0,1,18,0,2,13,0,0,1
0,0,1,0,1,0,30,0,0,13,0,0,1
0,0,0,1,0,1,10,0,2,24.15,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,1,0,0
0,0,0,1,1,0,21,0,0,7.7333,0,1,0
0,0,0,1,1,0,29,0,0,7.875,0,0,1
0,0,0,1,0,1,28,1,1,14.4,0,0,1
0,0,0,1,1,0,18,1,1,20.2125,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.25,0,0,1
1,0,1,0,0,1,28,1,0,26,0,0,1
1,0,1,0,0,1,19,0,0,26,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.75,0,1,0
1,0,0,1,1,0,32,0,0,8.05,0,0,1
1,1,0,0,1,0,28,0,0,26.55,0,0,1
1,0,0,1,0,1,29.69911764705882,1,0,16.1,0,0,1
1,0,1,0,0,1,42,1,0,26,0,0,1
0,0,0,1,1,0,17,0,0,7.125,0,0,1
0,1,0,0,1,0,50,1,0,55.9,0,0,1
1,1,0,0,0,1,14,1,2,120,0,0,1
0,0,0,1,0,1,21,2,2,34.375,0,0,1
1,0,1,0,0,1,24,2,3,18.75,0,0,1
0,1,0,0,1,0,64,1,4,263,0,0,1
0,0,1,0,1,0,31,0,0,10.5,0,0,1
1,0,1,0,0,1,45,1,1,26.25,0,0,1
0,0,0,1,1,0,20,0,0,9.5,0,0,1
0,0,0,1,1,0,25,1,0,7.775,0,0,1
1,0,1,0,0,1,28,0,0,13,0,0,1
1,0,0,1,1,0,29.69911764705882,0,0,8.1125,0,0,1
1,1,0,0,1,0,4,0,2,81.8583,0,0,1
1,0,1,0,0,1,13,0,1,19.5,0,0,1
1,1,0,0,1,0,34,0,0,26.55,0,0,1
1,0,0,1,0,1,5,2,1,19.2583,1,0,0
1,1,0,0,1,0,52,0,0,30.5,0,0,1
0,0,1,0,1,0,36,1,2,27.75,0,0,1
0,0,0,1,1,0,29.69911764705882,1,0,19.9667,0,0,1
0,1,0,0,1,0,30,0,0,27.75,1,0,0
1,1,0,0,1,0,49,1,0,89.1042,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
1,0,0,1,1,0,29,0,0,7.8958,1,0,0
0,1,0,0,1,0,65,0,0,26.55,0,0,1
1,1,0,0,0,1,29.69911764705882,1,0,51.8625,0,0,1
1,0,1,0,0,1,50,0,0,10.5,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.75,0,1,0
1,1,0,0,1,0,48,0,0,26.55,0,0,1
0,0,0,1,1,0,34,0,0,8.05,0,0,1
0,1,0,0,1,0,47,0,0,38.5,0,0,1
0,0,1,0,1,0,48,0,0,13,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
0,0,0,1,1,0,38,0,0,7.05,0,0,1
0,0,1,0,1,0,29.69911764705882,0,0,0,0,0,1
0,1,0,0,1,0,56,0,0,26.55,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.725,0,1,0
1,0,0,1,0,1,0.75,2,1,19.2583,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,7.25,0,0,1
0,0,0,1,1,0,38,0,0,8.6625,0,0,1
1,0,1,0,0,1,33,1,2,27.75,0,0,1
1,0,1,0,0,1,23,0,0,13.7917,1,0,0
0,0,0,1,0,1,22,0,0,9.8375,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,52,0,0,1
0,0,1,0,1,0,34,1,0,21,0,0,1
0,0,0,1,1,0,29,1,0,7.0458,0,0,1
0,0,0,1,1,0,22,0,0,7.5208,0,0,1
1,0,0,1,0,1,2,0,1,12.2875,0,0,1
0,0,0,1,1,0,9,5,2,46.9,0,0,1
0,0,1,0,1,0,29.69911764705882,0,0,0,0,0,1
0,0,0,1,1,0,50,0,0,8.05,0,0,1
1,0,0,1,0,1,63,0,0,9.5875,0,0,1
1,1,0,0,1,0,25,1,0,91.0792,1,0,0
0,0,0,1,0,1,29.69911764705882,3,1,25.4667,0,0,1
1,1,0,0,0,1,35,1,0,90,0,0,1
0,1,0,0,1,0,58,0,0,29.7,1,0,0
0,0,0,1,1,0,30,0,0,8.05,0,0,1
1,0,0,1,1,0,9,1,1,15.9,0,0,1
0,0,0,1,1,0,29.69911764705882,1,0,19.9667,0,0,1
0,0,0,1,1,0,21,0,0,7.25,0,0,1
0,1,0,0,1,0,55,0,0,30.5,0,0,1
0,1,0,0,1,0,71,0,0,49.5042,1,0,0
0,0,0,1,1,0,21,0,0,8.05,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,14.4583,1,0,0
1,1,0,0,0,1,54,1,0,78.2667,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,15.1,0,0,1
0,1,0,0,0,1,25,1,2,151.55,0,0,1
0,0,0,1,1,0,24,0,0,7.7958,0,0,1
0,0,0,1,1,0,17,0,0,8.6625,0,0,1
0,0,0,1,0,1,21,0,0,7.75,0,1,0
0,0,0,1,0,1,29.69911764705882,0,0,7.6292,0,1,0
0,0,0,1,0,1,37,0,0,9.5875,0,0,1
1,1,0,0,0,1,16,0,0,86.5,0,0,1
0,1,0,0,1,0,18,1,0,108.9,1,0,0
1,0,1,0,0,1,33,0,2,26,0,0,1
1,1,0,0,1,0,29.69911764705882,0,0,26.55,0,0,1
0,0,0,1,1,0,28,0,0,22.525,0,0,1
1,0,0,1,1,0,26,0,0,56.4958,0,0,1
1,0,0,1,1,0,29,0,0,7.75,0,1,0
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
1,1,0,0,1,0,36,0,0,26.2875,0,0,1
1,1,0,0,0,1,54,1,0,59.4,1,0,0
0,0,0,1,1,0,24,0,0,7.4958,0,0,1
0,1,0,0,1,0,47,0,0,34.0208,0,0,1
1,0,1,0,0,1,34,0,0,10.5,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,24.15,0,1,0
1,0,1,0,0,1,36,1,0,26,0,0,1
0,0,0,1,1,0,32,0,0,7.8958,0,0,1
1,1,0,0,0,1,30,0,0,93.5,0,0,1
0,0,0,1,1,0,22,0,0,7.8958,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.225,1,0,0
1,1,0,0,0,1,44,0,1,57.9792,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,7.2292,1,0,0
0,0,0,1,1,0,40.5,0,0,7.75,0,1,0
1,0,1,0,0,1,50,0,0,10.5,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,221.7792,0,0,1
0,0,0,1,1,0,39,0,0,7.925,0,0,1
0,0,1,0,1,0,23,2,1,11.5,0,0,1
1,0,1,0,0,1,2,1,1,26,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.2292,1,0,0
0,0,0,1,1,0,17,1,1,7.2292,1,0,0
1,0,0,1,0,1,29.69911764705882,0,2,22.3583,1,0,0
0,0,0,1,0,1,30,0,0,8.6625,0,0,1
1,0,1,0,0,1,7,0,2,26.25,0,0,1
0,1,0,0,1,0,45,0,0,26.55,0,0,1
1,1,0,0,0,1,30,0,0,106.425,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,14.5,0,0,1
1,1,0,0,0,1,22,0,2,49.5,1,0,0
1,1,0,0,0,1,36,0,2,71,0,0,1
0,0,0,1,0,1,9,4,2,31.275,0,0,1
0,0,0,1,0,1,11,4,2,31.275,0,0,1
1,0,1,0,1,0,32,1,0,26,0,0,1
0,1,0,0,1,0,50,1,0,106.425,1,0,0
0,1,0,0,1,0,64,0,0,26,0,0,1
1,0,1,0,0,1,19,1,0,26,0,0,1
1,0,1,0,1,0,29.69911764705882,0,0,13.8625,1,0,0
0,0,0,1,1,0,33,1,1,20.525,0,0,1
1,0,1,0,1,0,8,1,1,36.75,0,0,1
1,1,0,0,1,0,17,0,2,110.8833,1,0,0
0,0,1,0,1,0,27,0,0,26,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8292,0,1,0
1,0,0,1,1,0,22,0,0,7.225,1,0,0
1,0,0,1,0,1,22,0,0,7.775,0,0,1
0,1,0,0,1,0,62,0,0,26.55,0,0,1
1,1,0,0,0,1,48,1,0,39.6,1,0,0
0,1,0,0,1,0,29.69911764705882,0,0,227.525,1,0,0
1,1,0,0,0,1,39,1,1,79.65,0,0,1
1,0,0,1,0,1,36,1,0,17.4,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.75,0,1,0
0,0,0,1,1,0,40,0,0,7.8958,0,0,1
0,0,1,0,1,0,28,0,0,13.5,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
0,0,0,1,0,1,29.69911764705882,0,0,8.05,0,0,1
0,0,0,1,1,0,24,2,0,24.15,0,0,1
0,0,0,1,1,0,19,0,0,7.8958,0,0,1
0,0,0,1,0,1,29,0,4,21.075,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.2292,1,0,0
1,0,0,1,1,0,32,0,0,7.8542,0,0,1
1,0,1,0,1,0,62,0,0,10.5,0,0,1
1,1,0,0,0,1,53,2,0,51.4792,0,0,1
1,1,0,0,1,0,36,0,0,26.3875,0,0,1
1,0,0,1,0,1,29.69911764705882,0,0,7.75,0,1,0
0,0,0,1,1,0,16,0,0,8.05,0,0,1
0,0,0,1,1,0,19,0,0,14.5,0,0,1
1,0,1,0,0,1,34,0,0,13,0,0,1
1,1,0,0,0,1,39,1,0,55.9,0,0,1
0,0,0,1,0,1,29.69911764705882,1,0,14.4583,1,0,0
1,0,0,1,1,0,32,0,0,7.925,0,0,1
1,0,1,0,0,1,25,1,1,30,0,0,1
1,1,0,0,0,1,39,1,1,110.8833,1,0,0
0,0,1,0,1,0,54,0,0,26,0,0,1
0,1,0,0,1,0,36,0,0,40.125,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,8.7125,1,0,0
1,1,0,0,0,1,18,0,2,79.65,0,0,1
0,0,1,0,1,0,47,0,0,15,0,0,1
1,1,0,0,1,0,60,1,1,79.2,1,0,0
0,0,0,1,1,0,22,0,0,8.05,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
0,0,0,1,1,0,35,0,0,7.125,0,0,1
1,1,0,0,0,1,52,1,0,78.2667,1,0,0
0,0,0,1,1,0,47,0,0,7.25,0,0,1
0,0,0,1,0,1,29.69911764705882,0,2,7.75,0,1,0
0,0,1,0,1,0,37,1,0,26,0,0,1
0,0,0,1,1,0,36,1,1,24.15,0,0,1
1,0,1,0,0,1,29.69911764705882,0,0,33,0,0,1
0,0,0,1,1,0,49,0,0,0,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.225,1,0,0
1,1,0,0,1,0,49,1,0,56.9292,1,0,0
1,0,1,0,0,1,24,2,1,27,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,42.4,0,0,1
0,0,0,1,1,0,44,0,0,8.05,0,0,1
1,1,0,0,1,0,35,0,0,26.55,1,0,0
0,0,0,1,1,0,36,1,0,15.55,0,0,1
0,0,0,1,1,0,30,0,0,7.8958,0,0,1
1,1,0,0,1,0,27,0,0,30.5,0,0,1
1,0,1,0,0,1,22,1,2,41.5792,1,0,0
1,1,0,0,0,1,40,0,0,153.4625,0,0,1
0,0,0,1,0,1,39,1,5,31.275,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.05,0,0,1
1,0,0,1,0,1,29.69911764705882,1,0,15.5,0,1,0
0,0,0,1,1,0,29.69911764705882,0,0,7.75,0,1,0
0,0,0,1,1,0,35,0,0,8.05,0,0,1
1,0,1,0,0,1,24,1,2,65,0,0,1
0,0,0,1,1,0,34,1,1,14.4,0,0,1
0,0,0,1,0,1,26,1,0,16.1,0,0,1
1,0,1,0,0,1,4,2,1,39,0,0,1
0,0,1,0,1,0,26,0,0,10.5,0,0,1
0,0,0,1,1,0,27,1,0,14.4542,1,0,0
1,1,0,0,1,0,42,1,0,52.5542,0,0,1
1,0,0,1,1,0,20,1,1,15.7417,1,0,0
0,0,0,1,1,0,21,0,0,7.8542,0,0,1
0,0,0,1,1,0,21,0,0,16.1,0,0,1
0,1,0,0,1,0,61,0,0,32.3208,0,0,1
0,0,1,0,1,0,57,0,0,12.35,0,1,0
1,1,0,0,0,1,21,0,0,77.9583,0,0,1
0,0,0,1,1,0,26,0,0,7.8958,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.7333,0,1,0
1,1,0,0,1,0,80,0,0,30,0,0,1
0,0,0,1,1,0,51,0,0,7.0542,0,0,1
1,1,0,0,1,0,32,0,0,30.5,1,0,0
0,1,0,0,1,0,29.69911764705882,0,0,0,0,0,1
0,0,0,1,0,1,9,3,2,27.9,0,0,1
1,0,1,0,0,1,28,0,0,13,0,0,1
0,0,0,1,1,0,32,0,0,7.925,0,0,1
0,0,1,0,1,0,31,1,1,26.25,0,0,1
0,0,0,1,0,1,41,0,5,39.6875,0,0,1
0,0,0,1,1,0,29.69911764705882,1,0,16.1,0,0,1
0,0,0,1,1,0,20,0,0,7.8542,0,0,1
1,1,0,0,0,1,24,0,0,69.3,1,0,0
0,0,0,1,0,1,2,3,2,27.9,0,0,1
1,0,0,1,1,0,29.69911764705882,0,0,56.4958,0,0,1
1,0,0,1,0,1,0.75,2,1,19.2583,1,0,0
1,1,0,0,1,0,48,1,0,76.7292,1,0,0
0,0,0,1,1,0,19,0,0,7.8958,0,0,1
1,1,0,0,1,0,56,0,0,35.5,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,7.55,0,0,1
1,0,0,1,0,1,23,0,0,7.55,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
1,0,1,0,0,1,18,0,1,23,0,0,1
0,0,0,1,1,0,21,0,0,8.4333,0,0,1
1,0,0,1,0,1,29.69911764705882,0,0,7.8292,0,1,0
0,0,0,1,0,1,18,0,0,6.75,0,1,0
0,0,1,0,1,0,24,2,0,73.5,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
0,0,0,1,0,1,32,1,1,15.5,0,1,0
0,0,1,0,1,0,23,0,0,13,0,0,1
0,1,0,0,1,0,58,0,2,113.275,1,0,0
1,1,0,0,1,0,50,2,0,133.65,0,0,1
0,0,0,1,1,0,40,0,0,7.225,1,0,0
0,1,0,0,1,0,47,0,0,25.5875,0,0,1
0,0,0,1,1,0,36,0,0,7.4958,0,0,1
1,0,0,1,1,0,20,1,0,7.925,0,0,1
0,0,1,0,1,0,32,2,0,73.5,0,0,1
0,0,1,0,1,0,25,0,0,13,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.775,0,0,1
0,0,0,1,1,0,43,0,0,8.05,0,0,1
1,1,0,0,0,1,29.69911764705882,1,0,52,0,0,1
1,0,1,0,0,1,40,1,1,39,0,0,1
0,1,0,0,1,0,31,1,0,52,0,0,1
0,0,1,0,1,0,70,0,0,10.5,0,0,1
1,0,1,0,1,0,31,0,0,13,0,0,1
0,0,1,0,1,0,29.69911764705882,0,0,0,0,0,1
0,0,0,1,1,0,18,0,0,7.775,0,0,1
0,0,0,1,1,0,24.5,0,0,8.05,0,0,1
1,0,0,1,0,1,18,0,0,9.8417,0,0,1
0,0,0,1,0,1,43,1,6,46.9,0,0,1
1,1,0,0,1,0,36,0,1,512.3292,1,0,0
0,0,0,1,0,1,29.69911764705882,0,0,8.1375,0,1,0
1,1,0,0,1,0,27,0,0,76.7292,1,0,0
0,0,0,1,1,0,20,0,0,9.225,0,0,1
0,0,0,1,1,0,14,5,2,46.9,0,0,1
0,0,1,0,1,0,60,1,1,39,0,0,1
0,0,1,0,1,0,25,1,2,41.5792,1,0,0
0,0,0,1,1,0,14,4,1,39.6875,0,0,1
0,0,0,1,1,0,19,0,0,10.1708,0,0,1
0,0,0,1,1,0,18,0,0,7.7958,0,0,1
1,1,0,0,0,1,15,0,1,211.3375,0,0,1
1,1,0,0,1,0,31,1,0,57,0,0,1
1,0,0,1,0,1,4,0,1,13.4167,1,0,0
1,0,0,1,1,0,29.69911764705882,0,0,56.4958,0,0,1
0,0,0,1,1,0,25,0,0,7.225,1,0,0
0,1,0,0,1,0,60,0,0,26.55,0,0,1
0,0,1,0,1,0,52,0,0,13.5,0,0,1
0,0,0,1,1,0,44,0,0,8.05,0,0,1
1,0,0,1,0,1,29.69911764705882,0,0,7.7333,0,1,0
0,1,0,0,1,0,49,1,1,110.8833,1,0,0
0,0,0,1,1,0,42,0,0,7.65,0,0,1
1,1,0,0,0,1,18,1,0,227.525,1,0,0
1,1,0,0,1,0,35,0,0,26.2875,0,0,1
0,0,0,1,0,1,18,0,1,14.4542,1,0,0
0,0,0,1,1,0,25,0,0,7.7417,0,1,0
0,0,0,1,1,0,26,1,0,7.8542,0,0,1
0,0,1,0,1,0,39,0,0,26,0,0,1
1,0,1,0,0,1,45,0,0,13.5,0,0,1
1,1,0,0,1,0,42,0,0,26.2875,0,0,1
1,1,0,0,0,1,22,0,0,151.55,0,0,1
1,0,0,1,1,0,29.69911764705882,1,1,15.2458,1,0,0
1,1,0,0,0,1,24,0,0,49.5042,1,0,0
0,1,0,0,1,0,29.69911764705882,0,0,26.55,0,0,1
1,1,0,0,1,0,48,1,0,52,0,0,1
0,0,0,1,1,0,29,0,0,9.4833,0,0,1
0,0,1,0,1,0,52,0,0,13,0,0,1
0,0,0,1,1,0,19,0,0,7.65,0,0,1
1,1,0,0,0,1,38,0,0,227.525,1,0,0
1,0,1,0,0,1,27,0,0,10.5,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,15.5,0,1,0
0,0,0,1,1,0,33,0,0,7.775,0,0,1
1,0,1,0,0,1,6,0,1,33,0,0,1
0,0,0,1,1,0,17,1,0,7.0542,0,0,1
0,0,1,0,1,0,34,0,0,13,0,0,1
0,0,1,0,1,0,50,0,0,13,0,0,1
1,1,0,0,1,0,27,1,0,53.1,0,0,1
0,0,0,1,1,0,20,0,0,8.6625,0,0,1
1,0,1,0,0,1,30,3,0,21,0,0,1
1,0,0,1,0,1,29.69911764705882,0,0,7.7375,0,1,0
0,0,1,0,1,0,25,1,0,26,0,0,1
0,0,0,1,0,1,25,1,0,7.925,0,0,1
1,1,0,0,0,1,29,0,0,211.3375,0,0,1
0,0,0,1,1,0,11,0,0,18.7875,1,0,0
0,0,1,0,1,0,29.69911764705882,0,0,0,0,0,1
0,0,1,0,1,0,23,0,0,13,0,0,1
0,0,1,0,1,0,23,0,0,13,0,0,1
0,0,0,1,1,0,28.5,0,0,16.1,0,0,1
0,0,0,1,0,1,48,1,3,34.375,0,0,1
1,1,0,0,1,0,35,0,0,512.3292,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
1,1,0,0,1,0,29.69911764705882,0,0,30,0,0,1
0,1,0,0,1,0,36,1,0,78.85,0,0,1
1,1,0,0,0,1,21,2,2,262.375,1,0,0
0,0,0,1,1,0,24,1,0,16.1,0,0,1
1,0,0,1,1,0,31,0,0,7.925,0,0,1
0,1,0,0,1,0,70,1,1,71,0,0,1
0,0,0,1,1,0,16,1,1,20.25,0,0,1
1,0,1,0,0,1,30,0,0,13,0,0,1
0,1,0,0,1,0,19,1,0,53.1,0,0,1
0,0,0,1,1,0,31,0,0,7.75,0,1,0
1,0,1,0,0,1,4,1,1,23,0,0,1
1,0,0,1,1,0,6,0,1,12.475,0,0,1
0,0,0,1,1,0,33,0,0,9.5,0,0,1
0,0,0,1,1,0,23,0,0,7.8958,0,0,1
1,0,1,0,0,1,48,1,2,65,0,0,1
1,0,1,0,1,0,0.67,1,1,14.5,0,0,1
0,0,0,1,1,0,28,0,0,7.7958,0,0,1
0,0,1,0,1,0,18,0,0,11.5,0,0,1
0,0,0,1,1,0,34,0,0,8.05,0,0,1
1,1,0,0,0,1,33,0,0,86.5,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,14.5,0,0,1
0,0,0,1,1,0,41,0,0,7.125,0,0,1
1,0,0,1,1,0,20,0,0,7.2292,1,0,0
1,1,0,0,0,1,36,1,2,120,0,0,1
0,0,0,1,1,0,16,0,0,7.775,0,0,1
1,1,0,0,0,1,51,1,0,77.9583,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,39.6,1,0,0
0,0,0,1,0,1,30.5,0,0,7.75,0,1,0
0,0,0,1,1,0,29.69911764705882,1,0,24.15,0,1,0
0,0,0,1,1,0,32,0,0,8.3625,0,0,1
0,0,0,1,1,0,24,0,0,9.5,0,0,1
0,0,0,1,1,0,48,0,0,7.8542,0,0,1
0,0,1,0,0,1,57,0,0,10.5,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.225,1,0,0
1,0,1,0,0,1,54,1,3,23,0,0,1
0,0,0,1,1,0,18,0,0,7.75,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.75,0,1,0
1,0,0,1,0,1,5,0,0,12.475,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.7375,0,1,0
1,1,0,0,0,1,43,0,1,211.3375,0,0,1
1,0,0,1,0,1,13,0,0,7.2292,1,0,0
1,1,0,0,0,1,17,1,0,57,0,0,1
0,1,0,0,1,0,29,0,0,30,0,0,1
0,0,0,1,1,0,29.69911764705882,1,2,23.45,0,0,1
0,0,0,1,1,0,25,0,0,7.05,0,0,1
0,0,0,1,1,0,25,0,0,7.25,0,0,1
1,0,0,1,0,1,18,0,0,7.4958,0,0,1
0,0,0,1,1,0,8,4,1,29.125,0,1,0
1,0,0,1,1,0,1,1,2,20.575,0,0,1
0,1,0,0,1,0,46,0,0,79.2,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,7.75,0,1,0
0,0,1,0,1,0,16,0,0,26,0,0,1
0,0,0,1,0,1,29.69911764705882,8,2,69.55,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,30.6958,1,0,0
0,0,0,1,1,0,25,0,0,7.8958,0,0,1
0,0,1,0,1,0,39,0,0,13,0,0,1
1,1,0,0,0,1,49,0,0,25.9292,0,0,1
1,0,0,1,0,1,31,0,0,8.6833,0,0,1
0,0,0,1,1,0,30,0,0,7.2292,1,0,0
0,0,0,1,0,1,30,1,1,24.15,0,0,1
0,0,1,0,1,0,34,0,0,13,0,0,1
1,0,1,0,0,1,31,1,1,26.25,0,0,1
1,1,0,0,1,0,11,1,2,120,0,0,1
1,0,0,1,1,0,0.42,0,1,8.5167,1,0,0
1,0,0,1,1,0,27,0,0,6.975,0,0,1
0,0,0,1,1,0,31,0,0,7.775,0,0,1
0,1,0,0,1,0,39,0,0,0,0,0,1
0,0,0,1,0,1,18,0,0,7.775,0,0,1
0,0,1,0,1,0,39,0,0,13,0,0,1
1,1,0,0,0,1,33,1,0,53.1,0,0,1
0,0,0,1,1,0,26,0,0,7.8875,0,0,1
0,0,0,1,1,0,39,0,0,24.15,0,0,1
0,0,1,0,1,0,35,0,0,10.5,0,0,1
0,0,0,1,0,1,6,4,2,31.275,0,0,1
0,0,0,1,1,0,30.5,0,0,8.05,0,0,1
0,1,0,0,1,0,29.69911764705882,0,0,0,0,0,1
0,0,0,1,0,1,23,0,0,7.925,0,0,1
0,0,1,0,1,0,31,1,1,37.0042,1,0,0
0,0,0,1,1,0,43,0,0,6.45,0,0,1
0,0,0,1,1,0,10,3,2,27.9,0,0,1
1,1,0,0,0,1,52,1,1,93.5,0,0,1
1,0,0,1,1,0,27,0,0,8.6625,0,0,1
0,1,0,0,1,0,38,0,0,0,0,0,1
1,0,0,1,0,1,27,0,1,12.475,0,0,1
0,0,0,1,1,0,2,4,1,39.6875,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,6.95,0,1,0
0,0,0,1,1,0,29.69911764705882,0,0,56.4958,0,0,1
1,0,1,0,1,0,1,0,2,37.0042,1,0,0
1,0,0,1,1,0,29.69911764705882,0,0,7.75,0,1,0
1,1,0,0,0,1,62,0,0,80,0,0,1
1,0,0,1,0,1,15,1,0,14.4542,1,0,0
1,0,1,0,1,0,0.83,1,1,18.75,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.2292,1,0,0
0,0,0,1,1,0,23,0,0,7.8542,0,0,1
0,0,0,1,1,0,18,0,0,8.3,0,0,1
1,1,0,0,0,1,39,1,1,83.1583,1,0,0
0,0,0,1,1,0,21,0,0,8.6625,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,8.05,0,0,1
1,0,0,1,1,0,32,0,0,56.4958,0,0,1
1,1,0,0,1,0,29.69911764705882,0,0,29.7,1,0,0
0,0,0,1,1,0,20,0,0,7.925,0,0,1
0,0,1,0,1,0,16,0,0,10.5,0,0,1
1,1,0,0,0,1,30,0,0,31,1,0,0
0,0,0,1,1,0,34.5,0,0,6.4375,1,0,0
0,0,0,1,1,0,17,0,0,8.6625,0,0,1
0,0,0,1,1,0,42,0,0,7.55,0,0,1
0,0,0,1,1,0,29.69911764705882,8,2,69.55,0,0,1
0,0,0,1,1,0,35,0,0,7.8958,1,0,0
0,0,1,0,1,0,28,0,1,33,0,0,1
1,1,0,0,0,1,29.69911764705882,1,0,89.1042,1,0,0
0,0,0,1,1,0,4,4,2,31.275,0,0,1
0,0,0,1,1,0,74,0,0,7.775,0,0,1
0,0,0,1,0,1,9,1,1,15.2458,1,0,0
1,1,0,0,0,1,16,0,1,39.4,0,0,1
0,0,1,0,0,1,44,1,0,26,0,0,1
1,0,0,1,0,1,18,0,1,9.35,0,0,1
1,1,0,0,0,1,45,1,1,164.8667,0,0,1
1,1,0,0,1,0,51,0,0,26.55,0,0,1
1,0,0,1,0,1,24,0,3,19.2583,1,0,0
0,0,0,1,1,0,29.69911764705882,0,0,7.2292,1,0,0
0,0,0,1,1,0,41,2,0,14.1083,0,0,1
0,0,1,0,1,0,21,1,0,11.5,0,0,1
1,1,0,0,0,1,48,0,0,25.9292,0,0,1
0,0,0,1,0,1,29.69911764705882,8,2,69.55,0,0,1
0,0,1,0,1,0,24,0,0,13,0,0,1
1,0,1,0,0,1,42,0,0,13,0,0,1
1,0,1,0,0,1,27,1,0,13.8583,1,0,0
0,1,0,0,1,0,31,0,0,50.4958,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,9.5,0,0,1
1,0,0,1,1,0,4,1,1,11.1333,0,0,1
0,0,0,1,1,0,26,0,0,7.8958,0,0,1
1,1,0,0,0,1,47,1,1,52.5542,0,0,1
0,1,0,0,1,0,33,0,0,5,0,0,1
0,0,0,1,1,0,47,0,0,9,0,0,1
1,0,1,0,0,1,28,1,0,24,1,0,0
1,0,0,1,0,1,15,0,0,7.225,1,0,0
0,0,0,1,1,0,20,0,0,9.8458,0,0,1
0,0,0,1,1,0,19,0,0,7.8958,0,0,1
0,0,0,1,1,0,29.69911764705882,0,0,7.8958,0,0,1
1,1,0,0,0,1,56,0,1,83.1583,1,0,0
1,0,1,0,0,1,25,0,1,26,0,0,1
0,0,0,1,1,0,33,0,0,7.8958,0,0,1
0,0,0,1,0,1,22,0,0,10.5167,0,0,1
0,0,1,0,1,0,28,0,0,10.5,0,0,1
0,0,0,1,1,0,25,0,0,7.05,0,0,1
0,0,0,1,0,1,39,0,5,29.125,0,1,0
0,0,1,0,1,0,27,0,0,13,0,0,1
1,1,0,0,0,1,19,0,0,30,0,0,1
0,0,0,1,0,1,29.69911764705882,1,2,23.45,0,0,1
1,1,0,0,1,0,26,0,0,30,1,0,0
0,0,0,1,1,0,32,0,0,7.75,0,1,0
/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/bin/java -Didea.launcher.port=7532 "-Didea.launcher.bin.path=/Applications/IntelliJ IDEA CE.app/Contents/bin" -Dfile.encoding=UTF-8 -classpath "/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/charsets.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/deploy.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/ext/cldrdata.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/ext/dnsns.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/ext/jfxrt.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/ext/localedata.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/ext/nashorn.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/ext/sunec.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/ext/sunjce_provider.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/ext/sunpkcs11.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/ext/zipfs.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/javaws.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/jce.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/jfr.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/jfxswt.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/jsse.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/management-agent.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/plugin.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/resources.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/jre/lib/rt.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/lib/ant-javafx.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/lib/dt.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/lib/javafx-mx.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/lib/jconsole.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/lib/sa-jdi.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0_25.jdk/Contents/Home/lib/tools.jar:/Users/danlin/git/dl4j-lab/dl4j/target/classes:/Users/danlin/.m2/repository/org/nd4j/nd4j-native-platform/0.5.0/nd4j-native-platform-0.5.0.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-native/0.5.0/nd4j-native-0.5.0.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-native-api/0.5.0/nd4j-native-api-0.5.0.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-native/0.5.0/nd4j-native-0.5.0-macosx-x86_64.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-native/0.5.0/nd4j-native-0.5.0-linux-x86_64.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-native/0.5.0/nd4j-native-0.5.0-windows-x86_64.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-native/0.5.0/nd4j-native-0.5.0-linux-ppc64le.jar:/Users/danlin/.m2/repository/org/deeplearning4j/deeplearning4j-core/0.5.0/deeplearning4j-core-0.5.0.jar:/Users/danlin/.m2/repository/org/slf4j/slf4j-api/1.7.12/slf4j-api-1.7.12.jar:/Users/danlin/.m2/repository/ch/qos/logback/logback-classic/1.1.2/logback-classic-1.1.2.jar:/Users/danlin/.m2/repository/ch/qos/logback/logback-core/1.1.2/logback-core-1.1.2.jar:/Users/danlin/.m2/repository/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar:/Users/danlin/.m2/repository/commons-io/commons-io/2.4/commons-io-2.4.jar:/Users/danlin/.m2/repository/org/apache/commons/commons-compress/1.8/commons-compress-1.8.jar:/Users/danlin/.m2/repository/org/tukaani/xz/1.5/xz-1.5.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-api/0.5.0/nd4j-api-0.5.0.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-buffer/0.5.0/nd4j-buffer-0.5.0.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-common/0.5.0/nd4j-common-0.5.0.jar:/Users/danlin/.m2/repository/org/reflections/reflections/0.9.10/reflections-0.9.10.jar:/Users/danlin/.m2/repository/com/google/code/findbugs/annotations/2.0.1/annotations-2.0.1.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-context/0.5.0/nd4j-context-0.5.0.jar:/Users/danlin/.m2/repository/org/apache/commons/commons-lang3/3.3.1/commons-lang3-3.3.1.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/core/jackson-core/2.5.1/jackson-core-2.5.1.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/core/jackson-databind/2.5.1/jackson-databind-2.5.1.jar:/Users/danlin/.m2/repository/org/json/json/20131018/json-20131018.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/core/jackson-annotations/2.5.1/jackson-annotations-2.5.1.jar:/Users/danlin/.m2/repository/org/projectlombok/lombok/1.16.4/lombok-1.16.4.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.5.1/jackson-dataformat-yaml-2.5.1.jar:/Users/danlin/.m2/repository/org/yaml/snakeyaml/1.12/snakeyaml-1.12.jar:/Users/danlin/.m2/repository/org/datavec/datavec-nd4j-common/0.5.0/datavec-nd4j-common-0.5.0.jar:/Users/danlin/.m2/repository/org/datavec/datavec-api/0.5.0/datavec-api-0.5.0.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.4/jackson-datatype-joda-2.4.4.jar:/Users/danlin/.m2/repository/joda-time/joda-time/2.9.2/joda-time-2.9.2.jar:/Users/danlin/.m2/repository/org/freemarker/freemarker/2.3.23/freemarker-2.3.23.jar:/Users/danlin/.m2/repository/org/datavec/datavec-data-image/0.5.0/datavec-data-image-0.5.0.jar:/Users/danlin/.m2/repository/com/github/jai-imageio/jai-imageio-core/1.3.0/jai-imageio-core-1.3.0.jar:/Users/danlin/.m2/repository/com/twelvemonkeys/imageio/imageio-jpeg/3.1.1/imageio-jpeg-3.1.1.jar:/Users/danlin/.m2/repository/com/twelvemonkeys/imageio/imageio-core/3.1.1/imageio-core-3.1.1.jar:/Users/danlin/.m2/repository/com/twelvemonkeys/imageio/imageio-metadata/3.1.1/imageio-metadata-3.1.1.jar:/Users/danlin/.m2/repository/com/twelvemonkeys/common/common-lang/3.1.1/common-lang-3.1.1.jar:/Users/danlin/.m2/repository/com/twelvemonkeys/common/common-io/3.1.1/common-io-3.1.1.jar:/Users/danlin/.m2/repository/com/twelvemonkeys/common/common-image/3.1.1/common-image-3.1.1.jar:/Users/danlin/.m2/repository/com/twelvemonkeys/imageio/imageio-tiff/3.1.1/imageio-tiff-3.1.1.jar:/Users/danlin/.m2/repository/com/twelvemonkeys/imageio/imageio-psd/3.1.1/imageio-psd-3.1.1.jar:/Users/danlin/.m2/repository/com/twelvemonkeys/imageio/imageio-bmp/3.1.1/imageio-bmp-3.1.1.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp/1.2.3/javacpp-1.2.3.jar:/Users/danlin/.m2/repository/org/bytedeco/javacv/1.2/javacv-1.2.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/opencv/3.1.0-1.2/opencv-3.1.0-1.2.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/opencv/3.1.0-1.2/opencv-3.1.0-1.2-linux-x86_64.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/opencv/3.1.0-1.2/opencv-3.1.0-1.2-macosx-x86_64.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/opencv/3.1.0-1.2/opencv-3.1.0-1.2-windows-x86_64.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/opencv/3.1.0-1.2/opencv-3.1.0-1.2-linux-ppc64le.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/ffmpeg/3.0.2-1.2/ffmpeg-3.0.2-1.2.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/ffmpeg/3.0.2-1.2/ffmpeg-3.0.2-1.2-linux-x86_64.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/ffmpeg/3.0.2-1.2/ffmpeg-3.0.2-1.2-macosx-x86_64.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/ffmpeg/3.0.2-1.2/ffmpeg-3.0.2-1.2-windows-x86_64.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/ffmpeg/3.0.2-1.2/ffmpeg-3.0.2-1.2-linux-ppc64le.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/leptonica/1.73-1.2/leptonica-1.73-1.2.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/leptonica/1.73-1.2/leptonica-1.73-1.2-linux-x86_64.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/leptonica/1.73-1.2/leptonica-1.73-1.2-macosx-x86_64.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/leptonica/1.73-1.2/leptonica-1.73-1.2-windows-x86_64.jar:/Users/danlin/.m2/repository/org/bytedeco/javacpp-presets/leptonica/1.73-1.2/leptonica-1.73-1.2-linux-ppc64le.jar:/Users/danlin/.m2/repository/org/deeplearning4j/deeplearning4j-nlp/0.5.0/deeplearning4j-nlp-0.5.0.jar:/Users/danlin/.m2/repository/org/apache/directory/studio/org.apache.commons.codec/1.8/org.apache.commons.codec-1.8.jar:/Users/danlin/.m2/repository/commons-codec/commons-codec/1.8/commons-codec-1.8.jar:/Users/danlin/.m2/repository/it/unimi/dsi/dsiutils/2.2.2/dsiutils-2.2.2.jar:/Users/danlin/.m2/repository/it/unimi/dsi/fastutil/6.5.15/fastutil-6.5.15.jar:/Users/danlin/.m2/repository/com/martiansoftware/jsap/2.1/jsap-2.1.jar:/Users/danlin/.m2/repository/commons-configuration/commons-configuration/1.8/commons-configuration-1.8.jar:/Users/danlin/.m2/repository/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/Users/danlin/.m2/repository/commons-logging/commons-logging/1.1.1/commons-logging-1.1.1.jar:/Users/danlin/.m2/repository/commons-collections/commons-collections/20040616/commons-collections-20040616.jar:/Users/danlin/.m2/repository/org/cleartk/cleartk-snowball/2.0.0/cleartk-snowball-2.0.0.jar:/Users/danlin/.m2/repository/org/apache/lucene/lucene-snowball/3.0.3/lucene-snowball-3.0.3.jar:/Users/danlin/.m2/repository/org/cleartk/cleartk-util/2.0.0/cleartk-util-2.0.0.jar:/Users/danlin/.m2/repository/org/apache/uima/uimaj-core/2.5.0/uimaj-core-2.5.0.jar:/Users/danlin/.m2/repository/org/apache/uima/uimafit-core/2.0.0/uimafit-core-2.0.0.jar:/Users/danlin/.m2/repository/commons-logging/commons-logging-api/1.1/commons-logging-api-1.1.jar:/Users/danlin/.m2/repository/org/springframework/spring-core/3.1.2.RELEASE/spring-core-3.1.2.RELEASE.jar:/Users/danlin/.m2/repository/org/springframework/spring-asm/3.1.2.RELEASE/spring-asm-3.1.2.RELEASE.jar:/Users/danlin/.m2/repository/org/springframework/spring-context/3.1.2.RELEASE/spring-context-3.1.2.RELEASE.jar:/Users/danlin/.m2/repository/org/springframework/spring-aop/3.1.2.RELEASE/spring-aop-3.1.2.RELEASE.jar:/Users/danlin/.m2/repository/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/Users/danlin/.m2/repository/org/springframework/spring-expression/3.1.2.RELEASE/spring-expression-3.1.2.RELEASE.jar:/Users/danlin/.m2/repository/org/springframework/spring-beans/3.1.2.RELEASE/spring-beans-3.1.2.RELEASE.jar:/Users/danlin/.m2/repository/org/cleartk/cleartk-type-system/2.0.0/cleartk-type-system-2.0.0.jar:/Users/danlin/.m2/repository/org/cleartk/cleartk-opennlp-tools/2.0.0/cleartk-opennlp-tools-2.0.0.jar:/Users/danlin/.m2/repository/org/apache/opennlp/opennlp-maxent/3.0.3/opennlp-maxent-3.0.3.jar:/Users/danlin/.m2/repository/org/apache/opennlp/opennlp-tools/1.5.3/opennlp-tools-1.5.3.jar:/Users/danlin/.m2/repository/net/sf/jwordnet/jwnl/1.3.3/jwnl-1.3.3.jar:/Users/danlin/.m2/repository/org/apache/opennlp/opennlp-uima/1.5.3/opennlp-uima-1.5.3.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-assets/0.8.0/dropwizard-assets-0.8.0.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-core/0.8.0/dropwizard-core-0.8.0.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-util/0.8.0/dropwizard-util-0.8.0.jar:/Users/danlin/.m2/repository/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-jackson/0.8.0/dropwizard-jackson-0.8.0.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/datatype/jackson-datatype-jdk7/2.5.1/jackson-datatype-jdk7-2.5.1.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.5.1/jackson-datatype-guava-2.5.1.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/module/jackson-module-afterburner/2.5.1/jackson-module-afterburner-2.5.1.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-validation/0.8.0/dropwizard-validation-0.8.0.jar:/Users/danlin/.m2/repository/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/Users/danlin/.m2/repository/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/Users/danlin/.m2/repository/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/Users/danlin/.m2/repository/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/Users/danlin/.m2/repository/org/glassfish/javax.el/3.0.0/javax.el-3.0.0.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-configuration/0.8.0/dropwizard-configuration-0.8.0.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-logging/0.8.0/dropwizard-logging-0.8.0.jar:/Users/danlin/.m2/repository/io/dropwizard/metrics/metrics-logback/3.1.0/metrics-logback-3.1.0.jar:/Users/danlin/.m2/repository/org/slf4j/jul-to-slf4j/1.7.10/jul-to-slf4j-1.7.10.jar:/Users/danlin/.m2/repository/org/slf4j/log4j-over-slf4j/1.7.10/log4j-over-slf4j-1.7.10.jar:/Users/danlin/.m2/repository/org/slf4j/jcl-over-slf4j/1.7.10/jcl-over-slf4j-1.7.10.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/jetty-util/9.2.9.v20150224/jetty-util-9.2.9.v20150224.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-metrics/0.8.0/dropwizard-metrics-0.8.0.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-jersey/0.8.0/dropwizard-jersey-0.8.0.jar:/Users/danlin/.m2/repository/org/glassfish/jersey/core/jersey-server/2.16/jersey-server-2.16.jar:/Users/danlin/.m2/repository/org/glassfish/jersey/media/jersey-media-jaxb/2.16/jersey-media-jaxb-2.16.jar:/Users/danlin/.m2/repository/org/glassfish/jersey/ext/jersey-metainf-services/2.16/jersey-metainf-services-2.16.jar:/Users/danlin/.m2/repository/io/dropwizard/metrics/metrics-jersey2/3.1.0/metrics-jersey2-3.1.0.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.5.1/jackson-jaxrs-json-provider-2.5.1.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.5.1/jackson-jaxrs-base-2.5.1.jar:/Users/danlin/.m2/repository/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.5.1/jackson-module-jaxb-annotations-2.5.1.jar:/Users/danlin/.m2/repository/org/glassfish/jersey/containers/jersey-container-servlet/2.16/jersey-container-servlet-2.16.jar:/Users/danlin/.m2/repository/org/glassfish/jersey/containers/jersey-container-servlet-core/2.16/jersey-container-servlet-core-2.16.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/jetty-server/9.2.9.v20150224/jetty-server-9.2.9.v20150224.jar:/Users/danlin/.m2/repository/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/jetty-io/9.2.9.v20150224/jetty-io-9.2.9.v20150224.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/jetty-webapp/9.2.9.v20150224/jetty-webapp-9.2.9.v20150224.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/jetty-xml/9.2.9.v20150224/jetty-xml-9.2.9.v20150224.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/jetty-continuation/9.2.9.v20150224/jetty-continuation-9.2.9.v20150224.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-jetty/0.8.0/dropwizard-jetty-0.8.0.jar:/Users/danlin/.m2/repository/io/dropwizard/metrics/metrics-jetty9/3.1.0/metrics-jetty9-3.1.0.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/jetty-servlet/9.2.9.v20150224/jetty-servlet-9.2.9.v20150224.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/jetty-security/9.2.9.v20150224/jetty-security-9.2.9.v20150224.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/jetty-servlets/9.2.9.v20150224/jetty-servlets-9.2.9.v20150224.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/jetty-http/9.2.9.v20150224/jetty-http-9.2.9.v20150224.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-lifecycle/0.8.0/dropwizard-lifecycle-0.8.0.jar:/Users/danlin/.m2/repository/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/Users/danlin/.m2/repository/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/Users/danlin/.m2/repository/io/dropwizard/metrics/metrics-servlets/3.1.0/metrics-servlets-3.1.0.jar:/Users/danlin/.m2/repository/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/Users/danlin/.m2/repository/io/dropwizard/metrics/metrics-healthchecks/3.1.0/metrics-healthchecks-3.1.0.jar:/Users/danlin/.m2/repository/net/sourceforge/argparse4j/argparse4j/0.4.4/argparse4j-0.4.4.jar:/Users/danlin/.m2/repository/org/eclipse/jetty/toolchain/setuid/jetty-setuid-java/1.0.2/jetty-setuid-java-1.0.2.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-servlets/0.8.0/dropwizard-servlets-0.8.0.jar:/Users/danlin/.m2/repository/io/dropwizard/metrics/metrics-annotation/3.1.0/metrics-annotation-3.1.0.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-views-mustache/0.8.0/dropwizard-views-mustache-0.8.0.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-views/0.8.0/dropwizard-views-0.8.0.jar:/Users/danlin/.m2/repository/com/github/spullara/mustache/java/compiler/0.8.17/compiler-0.8.17.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-views-freemarker/0.8.0/dropwizard-views-freemarker-0.8.0.jar:/Users/danlin/.m2/repository/org/nd4j/nd4j-jackson/0.5.0/nd4j-jackson-0.5.0.jar:/Users/danlin/.m2/repository/org/deeplearning4j/deeplearning4j-ui/0.5.0/deeplearning4j-ui-0.5.0.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-client/0.8.0/dropwizard-client-0.8.0.jar:/Users/danlin/.m2/repository/org/glassfish/jersey/core/jersey-client/2.16/jersey-client-2.16.jar:/Users/danlin/.m2/repository/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/Users/danlin/.m2/repository/org/glassfish/hk2/hk2-api/2.4.0-b09/hk2-api-2.4.0-b09.jar:/Users/danlin/.m2/repository/org/glassfish/hk2/hk2-utils/2.4.0-b09/hk2-utils-2.4.0-b09.jar:/Users/danlin/.m2/repository/org/glassfish/hk2/external/aopalliance-repackaged/2.4.0-b09/aopalliance-repackaged-2.4.0-b09.jar:/Users/danlin/.m2/repository/org/glassfish/hk2/external/javax.inject/2.4.0-b09/javax.inject-2.4.0-b09.jar:/Users/danlin/.m2/repository/org/glassfish/hk2/hk2-locator/2.4.0-b09/hk2-locator-2.4.0-b09.jar:/Users/danlin/.m2/repository/org/javassist/javassist/3.18.1-GA/javassist-3.18.1-GA.jar:/Users/danlin/.m2/repository/io/dropwizard/metrics/metrics-httpclient/3.1.0/metrics-httpclient-3.1.0.jar:/Users/danlin/.m2/repository/org/apache/httpcomponents/httpclient/4.3.5/httpclient-4.3.5.jar:/Users/danlin/.m2/repository/org/apache/httpcomponents/httpcore/4.3.2/httpcore-4.3.2.jar:/Users/danlin/.m2/repository/org/glassfish/jersey/connectors/jersey-apache-connector/2.16/jersey-apache-connector-2.16.jar:/Users/danlin/.m2/repository/io/dropwizard/dropwizard-forms/0.8.0/dropwizard-forms-0.8.0.jar:/Users/danlin/.m2/repository/org/glassfish/jersey/media/jersey-media-multipart/2.16/jersey-media-multipart-2.16.jar:/Users/danlin/.m2/repository/org/glassfish/jersey/core/jersey-common/2.16/jersey-common-2.16.jar:/Users/danlin/.m2/repository/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/Users/danlin/.m2/repository/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.16/jersey-guava-2.16.jar:/Users/danlin/.m2/repository/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/Users/danlin/.m2/repository/org/jvnet/mimepull/mimepull/1.9.3/mimepull-1.9.3.jar:/Users/danlin/.m2/repository/com/google/guava/guava/19.0/guava-19.0.jar:/Users/danlin/.m2/repository/org/datavec/datavec-data-codec/0.4.0/datavec-data-codec-0.4.0.jar:/Users/danlin/.m2/repository/org/jcodec/jcodec/0.1.5/jcodec-0.1.5.jar:/Users/danlin/.m2/repository/jfree/jfreechart/1.0.13/jfreechart-1.0.13.jar:/Users/danlin/.m2/repository/jfree/jcommon/1.0.16/jcommon-1.0.16.jar:/Users/danlin/.m2/repository/org/deeplearning4j/arbiter-deeplearning4j/0.5.0/arbiter-deeplearning4j-0.5.0.jar:/Users/danlin/.m2/repository/org/deeplearning4j/arbiter-core/0.5.0/arbiter-core-0.5.0.jar:/Users/danlin/.m2/repository/args4j/args4j/2.33/args4j-2.33.jar:/Users/danlin/.m2/repository/org/deeplearning4j/deeplearning4j-ui-components/0.5.0/deeplearning4j-ui-components-0.5.0.jar:/Applications/IntelliJ IDEA CE.app/Contents/lib/idea_rt.jar" com.intellij.rt.execution.application.AppMain kaggle.TitanicOptimization
16/08/07 21:50:29 INFO o.r.Reflections: Reflections took 239 ms to scan 11 urls, producing 111 keys and 368 values
16/08/07 21:50:30 INFO o.d.a.s.l.m.LocalMultiLayerNetworkSaver: LocalMultiLayerNetworkSaver saving networks to local directory: arbiterExample/
INFO [2016-08-07 19:50:33,851] org.eclipse.jetty.util.log: Logging initialized @6033ms
INFO [2016-08-07 19:50:33,913] io.dropwizard.assets.AssetsBundle: Registering AssetBundle with name: assets for path /assets/*
INFO [2016-08-07 19:50:33,989] io.dropwizard.server.ServerFactory: Starting arbiter-ui
WARN [2016-08-07 19:50:34,079] org.eclipse.jetty.server.ServerConnector: Acceptors should be <= availableProcessors: ServerConnector@53ac845a{HTTP/1.1}{0.0.0.0:0}
INFO [2016-08-07 19:50:34,117] org.eclipse.jetty.setuid.SetUIDListener: Opened application@53ac845a{HTTP/1.1}{0.0.0.0:8080}
INFO [2016-08-07 19:50:34,117] org.eclipse.jetty.setuid.SetUIDListener: Opened admin@3cee53dc{HTTP/1.1}{0.0.0.0:8081}
INFO [2016-08-07 19:50:34,122] org.eclipse.jetty.server.Server: jetty-9.2.9.v20150224
INFO [2016-08-07 19:50:35,879] io.dropwizard.jersey.DropwizardResourceConfig: The following paths were found for the configured resources:
GET /results (org.deeplearning4j.arbiter.optimize.ui.resources.SummaryResultsResource)
POST /results/update (org.deeplearning4j.arbiter.optimize.ui.resources.SummaryResultsResource)
GET /config (org.deeplearning4j.arbiter.optimize.ui.resources.ConfigResource)
POST /config/update (org.deeplearning4j.arbiter.optimize.ui.resources.ConfigResource)
GET /modelResults/lastUpdate (org.deeplearning4j.arbiter.optimize.ui.resources.CandidateResultsResource)
GET /modelResults/{id} (org.deeplearning4j.arbiter.optimize.ui.resources.CandidateResultsResource)
POST /modelResults/update/{id} (org.deeplearning4j.arbiter.optimize.ui.resources.CandidateResultsResource)
GET /summary (org.deeplearning4j.arbiter.optimize.ui.resources.SummaryStatusResource)
POST /summary/update (org.deeplearning4j.arbiter.optimize.ui.resources.SummaryStatusResource)
GET /lastUpdate (org.deeplearning4j.arbiter.optimize.ui.resources.LastUpdateResource)
POST /lastUpdate/update (org.deeplearning4j.arbiter.optimize.ui.resources.LastUpdateResource)
GET /arbiter (org.deeplearning4j.arbiter.optimize.ui.ArbiterUIResource)
INFO [2016-08-07 19:50:35,886] org.eclipse.jetty.server.handler.ContextHandler: Started i.d.j.MutableServletContextHandler@10db6131{/,null,AVAILABLE}
INFO [2016-08-07 19:50:35,892] io.dropwizard.setup.AdminEnvironment: tasks =
POST /tasks/log-level (io.dropwizard.servlets.tasks.LogConfigurationTask)
POST /tasks/gc (io.dropwizard.servlets.tasks.GarbageCollectionTask)
WARN [2016-08-07 19:50:35,893] io.dropwizard.setup.AdminEnvironment:
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
! THIS APPLICATION HAS NO HEALTHCHECKS. THIS MEANS YOU WILL NEVER KNOW !
! IF IT DIES IN PRODUCTION, WHICH MEANS YOU WILL NEVER KNOW IF YOU'RE !
! LETTING YOUR USERS DOWN. YOU SHOULD ADD A HEALTHCHECK FOR EACH OF YOUR !
! APPLICATION'S DEPENDENCIES WHICH FULLY (BUT LIGHTLY) TESTS IT. !
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
INFO [2016-08-07 19:50:35,912] org.eclipse.jetty.server.handler.ContextHandler: Started i.d.j.MutableServletContextHandler@2f0bfe17{/,null,AVAILABLE}
INFO [2016-08-07 19:50:35,943] org.eclipse.jetty.server.ServerConnector: Started application@53ac845a{HTTP/1.1}{0.0.0.0:8080}
INFO [2016-08-07 19:50:35,946] org.eclipse.jetty.server.ServerConnector: Started admin@3cee53dc{HTTP/1.1}{0.0.0.0:8081}
INFO [2016-08-07 19:50:35,947] org.eclipse.jetty.server.Server: Started @8129ms
INFO [2016-08-07 19:50:36,924] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: BaseOptimizationRunner: execution started
INFO [2016-08-07 19:50:40,497] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:40,985] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:41,355] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:41,788] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:42,057] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:42,166] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 0, score = 0.6826923076923077
INFO [2016-08-07 19:50:42,167] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: New best score: 0.6826923076923077 (first completed model)
INFO [2016-08-07 19:50:42,498] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:42,725] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:42,952] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:43,161] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:43,315] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:43,364] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 1, score = 0.6121794871794872
INFO [2016-08-07 19:50:43,561] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:43,902] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:44,066] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:44,795] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:45,342] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:45,511] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 2, score = 0.6666666666666666
INFO [2016-08-07 19:50:46,218] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:48,108] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:48,450] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:48,674] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:48,976] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:49,090] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 3, score = 0.6730769230769231
INFO [2016-08-07 19:50:49,330] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:49,542] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:49,710] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:49,866] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:50,487] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:50,538] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 4, score = 0.6121794871794872
INFO [2016-08-07 19:50:50,758] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:50,904] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:51,046] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:51,186] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:51,327] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:51,363] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 5, score = 0.6121794871794872
INFO [2016-08-07 19:50:51,539] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:51,679] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:51,821] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:51,963] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:52,094] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:52,119] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 6, score = 0.6121794871794872
INFO [2016-08-07 19:50:52,352] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:52,494] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:52,637] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:52,784] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:52,929] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:52,962] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 7, score = 0.6121794871794872
INFO [2016-08-07 19:50:53,160] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:53,304] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:53,469] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:53,612] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:53,762] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:53,793] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 8, score = 0.6121794871794872
INFO [2016-08-07 19:50:53,974] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:54,117] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:54,297] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:54,444] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:54,600] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:54,628] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 9, score = 0.6121794871794872
INFO [2016-08-07 19:50:54,834] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:54,988] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:55,144] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:55,295] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:55,448] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:55,478] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 10, score = 0.6121794871794872
INFO [2016-08-07 19:50:55,662] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:55,811] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:55,961] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:56,794] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:57,104] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:57,141] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 11, score = 0.592948717948718
INFO [2016-08-07 19:50:57,358] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:57,493] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:57,632] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:57,783] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:57,976] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:58,009] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 12, score = 0.6121794871794872
INFO [2016-08-07 19:50:58,218] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:58,374] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:58,532] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:58,732] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:58,875] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:58,902] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 13, score = 0.6858974358974359
INFO [2016-08-07 19:50:58,902] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: New best score: 0.6858974358974359 (prev=0.6826923076923077)
INFO [2016-08-07 19:50:59,061] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:50:59,205] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:50:59,352] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:50:59,499] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:50:59,657] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:50:59,683] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 14, score = 0.6121794871794872
INFO [2016-08-07 19:50:59,876] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:00,090] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:00,307] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:00,446] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:00,585] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:00,610] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 15, score = 0.6121794871794872
INFO [2016-08-07 19:51:00,789] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:00,941] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:01,150] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:01,314] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:01,464] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:01,491] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 16, score = 0.6121794871794872
INFO [2016-08-07 19:51:01,740] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:01,916] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:02,056] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:02,216] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:02,409] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:02,439] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 17, score = 0.6121794871794872
INFO [2016-08-07 19:51:02,694] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:02,886] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:03,031] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:03,175] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:03,529] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:03,556] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 18, score = 0.6121794871794872
INFO [2016-08-07 19:51:03,728] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:03,869] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:04,009] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:04,196] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:04,373] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:04,397] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 19, score = 0.6121794871794872
INFO [2016-08-07 19:51:04,618] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:04,798] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:05,212] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:05,517] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:05,697] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:05,726] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 20, score = 0.6121794871794872
INFO [2016-08-07 19:51:05,987] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:06,190] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:06,566] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:06,937] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:07,248] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:07,283] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 21, score = 0.6121794871794872
INFO [2016-08-07 19:51:07,436] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:07,583] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:07,804] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:08,042] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:08,214] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:08,292] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 22, score = 0.6923076923076923
INFO [2016-08-07 19:51:08,292] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: New best score: 0.6923076923076923 (prev=0.6858974358974359)
INFO [2016-08-07 19:51:08,482] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:08,836] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:09,248] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:09,591] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:09,799] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:09,825] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 23, score = 0.6666666666666666
INFO [2016-08-07 19:51:10,225] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:10,594] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:10,780] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:11,063] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:11,332] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:11,372] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 24, score = 0.6121794871794872
INFO [2016-08-07 19:51:11,901] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:12,571] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:13,045] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:13,527] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:13,720] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:13,752] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 25, score = 0.6923076923076923
INFO [2016-08-07 19:51:13,938] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:14,113] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:14,335] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:14,560] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:14,717] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:14,752] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 26, score = 0.6923076923076923
INFO [2016-08-07 19:51:15,042] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:15,257] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:15,410] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:15,574] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:15,725] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:15,755] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 27, score = 0.6121794871794872
INFO [2016-08-07 19:51:16,239] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:16,470] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:16,728] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:16,918] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:17,113] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:17,146] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 28, score = 0.657051282051282
INFO [2016-08-07 19:51:17,438] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 1, number of batches completed 116
INFO [2016-08-07 19:51:17,658] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 2, number of batches completed 116
INFO [2016-08-07 19:51:17,808] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 3, number of batches completed 116
INFO [2016-08-07 19:51:17,957] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 4, number of batches completed 116
INFO [2016-08-07 19:51:18,099] org.deeplearning4j.datasets.iterator.MultipleEpochsIterator: Epoch 5, number of batches completed 116
INFO [2016-08-07 19:51:18,122] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Completed task 29, score = 0.6121794871794872
INFO [2016-08-07 19:51:18,130] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: BaseOptimizationRunner global termination condition hit: MaxCandidatesCondition(30)
INFO [2016-08-07 19:51:18,130] org.deeplearning4j.arbiter.optimize.runner.BaseOptimizationRunner: Optimization runner: execution complete
Best score: 0.6923076923076923
Index of model with best score: 22
Number of configurations evaluated: 30
Configuration of best model:
{
"backprop" : true,
"backpropType" : "Standard",
"confs" : [ {
"extraArgs" : [ 0 ],
"l1ByParam" : {
"b" : 0.0,
"W" : 0.0
},
"l2ByParam" : {
"b" : 0.0,
"W" : 1.0E-4
},
"layer" : {
"dense" : {
"activationFunction" : "relu",
"adamMeanDecay" : "NaN",
"adamVarDecay" : "NaN",
"biasInit" : 0.0,
"biasL1" : 0.0,
"biasL2" : 0.0,
"biasLearningRate" : 0.021107968650120837,
"dist" : null,
"dropOut" : 0.0,
"gradientNormalization" : "None",
"gradientNormalizationThreshold" : 1.0,
"l1" : 0.0,
"l2" : 1.0E-4,
"layerName" : null,
"learningRate" : 0.021107968650120837,
"learningRateSchedule" : null,
"momentum" : "NaN",
"momentumSchedule" : null,
"nin" : 12,
"nout" : 8,
"rho" : "NaN",
"rmsDecay" : "NaN",
"updater" : "SGD",
"weightInit" : "XAVIER"
}
},
"leakyreluAlpha" : 0.0,
"learningRateByParam" : {
"b" : 0.021107968650120837,
"W" : 0.021107968650120837
},
"learningRatePolicy" : "None",
"lrPolicyDecayRate" : "NaN",
"lrPolicyPower" : "NaN",
"lrPolicySteps" : "NaN",
"maxNumLineSearchIterations" : 5,
"miniBatch" : true,
"minimize" : true,
"numIterations" : 1,
"optimizationAlgo" : "STOCHASTIC_GRADIENT_DESCENT",
"seed" : 1470599467289,
"stepFunction" : null,
"useDropConnect" : false,
"useRegularization" : true,
"variables" : [ "W", "b" ]
}, {
"extraArgs" : [ 0 ],
"l1ByParam" : {
"b" : 0.0,
"W" : 0.0
},
"l2ByParam" : {
"b" : 0.0,
"W" : 1.0E-4
},
"layer" : {
"dense" : {
"activationFunction" : "relu",
"adamMeanDecay" : "NaN",
"adamVarDecay" : "NaN",
"biasInit" : 0.0,
"biasL1" : 0.0,
"biasL2" : 0.0,
"biasLearningRate" : 0.021107968650120837,
"dist" : null,
"dropOut" : 0.0,
"gradientNormalization" : "None",
"gradientNormalizationThreshold" : 1.0,
"l1" : 0.0,
"l2" : 1.0E-4,
"layerName" : null,
"learningRate" : 0.021107968650120837,
"learningRateSchedule" : null,
"momentum" : "NaN",
"momentumSchedule" : null,
"nin" : 8,
"nout" : 4,
"rho" : "NaN",
"rmsDecay" : "NaN",
"updater" : "SGD",
"weightInit" : "XAVIER"
}
},
"leakyreluAlpha" : 0.0,
"learningRateByParam" : {
"b" : 0.021107968650120837,
"W" : 0.021107968650120837
},
"learningRatePolicy" : "None",
"lrPolicyDecayRate" : "NaN",
"lrPolicyPower" : "NaN",
"lrPolicySteps" : "NaN",
"maxNumLineSearchIterations" : 5,
"miniBatch" : true,
"minimize" : true,
"numIterations" : 1,
"optimizationAlgo" : "STOCHASTIC_GRADIENT_DESCENT",
"seed" : 1470599467289,
"stepFunction" : null,
"useDropConnect" : false,
"useRegularization" : true,
"variables" : [ "W", "b" ]
}, {
"extraArgs" : [ ],
"l1ByParam" : {
"b" : 0.0,
"W" : 0.0
},
"l2ByParam" : {
"b" : 0.0,
"W" : 1.0E-4
},
"layer" : {
"output" : {
"activationFunction" : "softmax",
"adamMeanDecay" : "NaN",
"adamVarDecay" : "NaN",
"biasInit" : 0.0,
"biasL1" : 0.0,
"biasL2" : 0.0,
"biasLearningRate" : 0.021107968650120837,
"customLossFunction" : null,
"dist" : null,
"dropOut" : 0.0,
"gradientNormalization" : "None",
"gradientNormalizationThreshold" : 1.0,
"l1" : 0.0,
"l2" : 1.0E-4,
"layerName" : null,
"learningRate" : 0.021107968650120837,
"learningRateSchedule" : null,
"lossFunction" : "NEGATIVELOGLIKELIHOOD",
"momentum" : "NaN",
"momentumSchedule" : null,
"nin" : 4,
"nout" : 2,
"rho" : "NaN",
"rmsDecay" : "NaN",
"updater" : "SGD",
"weightInit" : "XAVIER"
}
},
"leakyreluAlpha" : 0.0,
"learningRateByParam" : {
"b" : 0.021107968650120837,
"W" : 0.021107968650120837
},
"learningRatePolicy" : "None",
"lrPolicyDecayRate" : "NaN",
"lrPolicyPower" : "NaN",
"lrPolicySteps" : "NaN",
"maxNumLineSearchIterations" : 5,
"miniBatch" : true,
"minimize" : true,
"numIterations" : 1,
"optimizationAlgo" : "STOCHASTIC_GRADIENT_DESCENT",
"seed" : 1470599467289,
"stepFunction" : null,
"useDropConnect" : false,
"useRegularization" : true,
"variables" : [ "W", "b" ]
} ],
"inputPreProcessors" : { },
"pretrain" : false,
"redistributeParams" : false,
"tbpttBackLength" : 20,
"tbpttFwdLength" : 20
}
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dl4j-lab</artifactId>
<groupId>com.danlin</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dl4j</artifactId>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.nd4j</groupId>
<artifactId>nd4j-native-platform</artifactId>
<version>${nd4j.version}</version>
</dependency>
<dependency>
<groupId>org.nd4j</groupId>
<artifactId>nd4j-cuda-7.5-platform</artifactId>
<version>${nd4j.version}</version>
</dependency>
<!--<dependency>-->
<!--<groupId>com.fasterxml.jackson.core</groupId>-->
<!--<artifactId>jackson-databind</artifactId>-->
<!--<version>2.4.4</version>-->
<!--</dependency>-->
</dependencies>
</dependencyManagement>
<dependencies>
<!-- ND4J backend. You need one in every DL4J project. Normally define artifactId as either "nd4j-native-platform" or "nd4j-cuda-7.5-platform" -->
<dependency>
<groupId>org.nd4j</groupId>
<artifactId>${nd4j.backend}</artifactId>
</dependency>
<!-- Core DL4J functionality -->
<dependency>
<groupId>org.deeplearning4j</groupId>
<artifactId>deeplearning4j-core</artifactId>
<version>${dl4j.version}</version>
</dependency>
<dependency>
<groupId>org.deeplearning4j</groupId>
<artifactId>deeplearning4j-nlp</artifactId>
<version>${dl4j.version}</version>
</dependency>
<!-- deeplearning4j-ui is used for HistogramIterationListener + visualization: see http://deeplearning4j.org/visualization -->
<dependency>
<groupId>org.deeplearning4j</groupId>
<artifactId>deeplearning4j-ui</artifactId>
<version>${dl4j.version}</version>
</dependency>
<!-- Force guava versions for using UI/HistogramIterationListener -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<!-- datavec-data-codec: used only in video example for loading video data -->
<dependency>
<artifactId>datavec-data-codec</artifactId>
<groupId>org.datavec</groupId>
<version>${datavec.version}</version>
</dependency>
<!-- Used in the feedforward/classification/MLP* and feedforward/regression/RegressionMathFunctions example -->
<dependency>
<groupId>jfree</groupId>
<artifactId>jfreechart</artifactId>
<version>${jfreechart.version}</version>
</dependency>
<!-- Arbiter: used for hyperparameter optimization examples -->
<dependency>
<groupId>org.deeplearning4j</groupId>
<artifactId>arbiter-deeplearning4j</artifactId>
<version>${arbiter.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>${exec-maven-plugin.version}</version>
<executions>
<execution>
<goals>
<goal>exec</goal>
</goals>
</execution>
</executions>
<configuration>
<executable>java</executable>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>${maven-shade-plugin.version}</version>
<configuration>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>${shadedClassifier}</shadedClassifierName>
<createDependencyReducedPom>true</createDependencyReducedPom>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>org/datanucleus/**</exclude>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.5.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.danlin</groupId>
<artifactId>dl4j-lab</artifactId>
<packaging>pom</packaging>
<version>1.0-SNAPSHOT</version>
<modules>
<module>datavec</module>
<module>dl4j</module>
</modules>
<properties>
<nd4j.backend>nd4j-native-platform</nd4j.backend>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<shadedClassifier>bin</shadedClassifier>
<java.version>1.7</java.version>
<nd4j.version>0.5.0</nd4j.version>
<dl4j.version>0.5.0</dl4j.version>
<datavec.version>0.4.0</datavec.version>
<arbiter.version>0.5.0</arbiter.version>
<guava.version>19.0</guava.version>
<jfreechart.version>1.0.13</jfreechart.version>
<maven-shade-plugin.version>2.4.3</maven-shade-plugin.version>
<exec-maven-plugin.version>1.4.0</exec-maven-plugin.version>
<scala.binary.version>2.10</scala.binary.version>
</properties>
</project>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment