Skip to content

Instantly share code, notes, and snippets.

@GOFAI
Created May 6, 2017 06:18
Show Gist options
  • Save GOFAI/dbde46ef90fd2a378826d942fe456046 to your computer and use it in GitHub Desktop.
Save GOFAI/dbde46ef90fd2a378826d942fe456046 to your computer and use it in GitHub Desktop.
DL4J MLPMnistSingleLayerExample with ABCL and JSS
(in-package :asdf)
(defsystem :dl4j :defsystem-depends-on (:abcl-asdf)
:components ((:mvn "org.deeplearning4j/deeplearning4j-core" :version "0.8.0")
(:mvn "org.nd4j/nd4j-native" :version "0.8.0")))
(asdf:load-system :dl4j)
(in-package :jss)
(defparameter *mnist-train* (new 'MnistDataSetIterator 128 +true+ 123))
(defparameter *mnist-test* (new 'MnistDataSetIterator 128 +false+ 123))
(format t "Build model....~%")
(defparameter *conf*
(let* ((conf (new 'NeuralNetConfiguration$Builder))
(denselayer (new 'DenseLayer$Builder))
(output (new 'OutputLayer$Builder)))
;;; make dense layer
(#"nIn" denselayer (* 28 28))
(#"nOut" denselayer 1000)
(#"activation" denselayer (#"valueOf" 'org.nd4j.linalg.activations.Activation "RELU"))
(#"weightInit" denselayer (#"valueOf" 'org.deeplearning4j.nn.weights.WeightInit "XAVIER"))
;;; make output layer
(#"nIn" output 1000)
(#"nOut" output 10)
(#"activation" output (#"valueOf" 'org.nd4j.linalg.activations.Activation "SOFTMAX"))
(#"weightInit" output (#"valueOf" 'org.deeplearning4j.nn.weights.WeightInit "XAVIER"))
;; DL4J documentation is wrong
(#"lossFunction" output
(#"valueOf" 'LossFunctions$LossFunction "NEGATIVELOGLIKELIHOOD"))
;;; make conf
(#"seed" conf 123)
(#"optimizationAlgo" conf (#"valueOf" 'org.deeplearning4j.nn.api.OptimizationAlgorithm "STOCHASTIC_GRADIENT_DESCENT"))
(#"iterations" conf 1)
(#"learningRate" conf 0.006)
(#"updater" conf (#"valueOf" 'org.deeplearning4j.nn.conf.Updater "NESTEROVS"))
(#"momentum" conf 0.9)
(#"regularization" conf +true+)
(#"l2" conf 1e-4)
(let ((listbuilder (#"list" conf)))
(#"layer" listbuilder 0 (#"build" denselayer))
(#"layer" listbuilder 1 (#"build" output))
(#"pretrain" listbuilder +false+)
(#"build" listbuilder))))
(defparameter *model* (new 'MultiLayerNetwork *conf*))
(#"init" *model*)
;; not sure why this doesn't work
;; (#"setListeners" *model* (new 'ScoreIterationListener 1))
(format t "Train model....~%")
(dotimes (epoch 15)
(#"fit" *model* *mnist-train*)
(format t "End of training epoch ~a, current model score: ~a~%"
(1+ epoch)
(#"score" *model*)))
(format t "Evaluate model....~%")
(defparameter *eval* (new 'Evaluation 10))
(loop while (#"hasNext" *mnist-test*)
do (let* ((next (#"next" *mnist-test*))
(output (#"output" *model* (#"getFeatureMatrix" next))))
(#"eval" *eval* (#"getLabels" next) output)))
(format t "~a~%****************Example finished********************" (#"stats" *eval*))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment