Skip to content

Instantly share code, notes, and snippets.

@no-defun-allowed
Created March 17, 2019 07:24
Show Gist options
  • Save no-defun-allowed/ec0baffaba63842da32da965beb32342 to your computer and use it in GitHub Desktop.
Save no-defun-allowed/ec0baffaba63842da32da965beb32342 to your computer and use it in GitHub Desktop.
A deeper neural network using Petalisp.
(ql:quickload :petalisp.examples)
(use-package '(:petalisp :petalisp.examples.linear-algebra))
;; We use this internal function in our hack over transpose and matmul.
(import 'petalisp.examples.linear-algebra::coerce-to-matrix)
(declaim (optimize (speed 3) (safety 1) (debug 0)))
;; I use these modified function definitions to produce the ranges
;; [0, n - 1] instead of [1, n], which makes handling them aside already
;; computed arrays much easier.
(defun transpose (x)
(reshape
(coerce-to-matrix x)
(τ (m n) ((1- n) (1- m)))))
(defun matmul (A B)
(β #'+
(α #'*
(reshape (coerce-to-matrix A) (τ (m n) (n (1- m) 0)))
(reshape (coerce-to-matrix B) (τ (n k) (n 0 (1- k)))))))
;; The sigmoid function and its derivative.
(defun nonlin (x)
(the single-float (/ (1+ (exp (- x))))))
(defun d-nonlin (y)
(the single-float (* y (- 1 y))))
(defun forward (layer input)
(α #'nonlin (matmul input layer)))
(defun deriv (input)
(compute (α #'d-nonlin input)))
;; This is a floaty equivalent of f(x, y, z) := x ^ y
(defvar *x* #2a((0.0 0.0 1.0)
(0.0 1.0 1.0)
(1.0 0.0 1.0)
(1.0 1.0 1.0)
(1.0 0.0 0.0)))
(defvar *y* #2a((0.0) (1.0) (1.0) (0.0) (1.0)))
(defun random-array (size)
"Create an array of single-floats of size SIZE. Values are
initialized between [-1.0, 1.0]."
(let* ((array (make-array size :element-type 'single-float))
(ref (make-array (reduce #'* size) :displaced-to array :element-type 'single-float)))
(dotimes (n (reduce #'* size) array)
(setf (aref ref n)
(1- (random 2.0))))))
(defvar *syn0* (random-array '(3 4))
"Our first layer in the neural network, taking 3 inputs and
emitting 4 hidden values.")
(defvar *syn1* (random-array '(4 1))
"Our second layer in the neural network, taking 4 hidden values
and emitting 1 output.")
(defun train (&optional (n 60000))
"Train the network for N epochs.
On my laptop, we are able to perform just under 1,000 epochs a second.
For reference, we can get about 26,500 epochs a second using numpy."
(dotimes (j n)
(let* ((l0 *x*)
(l1 (forward *syn0* l0))
(l2 (forward *syn1* l1))
(l2-error (α #'- *y* l2))
(l2-delta (α #'* l2-error (deriv l2)))
(l1-error (matmul l2-delta (transpose *syn1*)))
(l1-delta (α #'* l1-error (deriv l1)))
(new-syn1 (α #'+ *syn1* (matmul (transpose l1) l2-delta)))
(new-syn0 (α #'+ *syn0* (matmul (transpose l0) l1-delta))))
(when (zerop (mod j 1000))
(format t "Epoch ~d, loss ~d~%"
j
(/ (compute (β #'+ (β #'+ (α #'abs l2-error))))
(array-dimension *x* 0))))
(setf *syn1* (compute new-syn1))
(setf *syn0* (compute new-syn0)))))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment