Skip to content

Instantly share code, notes, and snippets.

@no-defun-allowed
Created April 19, 2019 02:21
Show Gist options
  • Save no-defun-allowed/bfff11299fe1bbeecb02fdbb5959d7e8 to your computer and use it in GitHub Desktop.
Save no-defun-allowed/bfff11299fe1bbeecb02fdbb5959d7e8 to your computer and use it in GitHub Desktop.
A half-baked neural network library using Petalisp.
(ql:quickload :petalisp.examples)
(use-package '(:petalisp :petalisp.examples.linear-algebra))
;; we use this internal function in our hack over transpose and matmul.
(import 'petalisp.examples.linear-algebra::coerce-to-matrix)
(declaim (optimize (speed 3) (safety 1) (debug 3)))
;; i use these modified function definitions to produce the ranges
;; [0, n - 1] instead of [1, n], which makes handling them aside already
;; computed arrays much easier.
(defun transpose (x)
(reshape
(coerce-to-matrix x)
(τ (m n) ((1- n) (1- m)))))
(defun matmul (a b)
(β #'+
(α #'*
(reshape (coerce-to-matrix a) (τ (m n) (n (1- m) 0)))
(reshape (coerce-to-matrix b) (τ (n k) (n 0 (1- k)))))))
(declaim (inline nonlin d-nonlin))
;; the sigmoid function and its derivative.
(defun nonlin (x)
(let ((x (max (min 60.0 x) -60.0)))
(/ (1+ (exp (- x))))))
(defun d-nonlin (y)
(* y (- 1 y)))
#|
;; the rectified linear-unit function and its derivative.
(defun nonlin (x)
(if (> x 0.0) x 0.0))
(defun d-nonlin (y)
(if (> y 0.0) 1.0 0.0))
|#
#| the hyperbolic tangent and its derivative.
(defun nonlin (x)
(tanh x))
(defun d-nonlin (y)
(- 1 (* y y)))
|#
(defun forward (layer input)
(α #'nonlin (matmul input layer)))
(defun deriv (input)
(α #'d-nonlin input))
(defun random-array (size &optional (magnitude 1.0))
"create an array of single-floats of size size. values are
initialized between [-magnitude, magnitude]."
(let* ((array (make-array size :element-type 'single-float))
(ref (make-array (if (listp size)
(reduce #'* size)
size)
:displaced-to array :element-type 'single-float)))
(dotimes (n (reduce #'* size) array)
(setf (aref ref n)
(- 1.0 (random (* 2.0 magnitude)))))))
(defun forward-propogate (layers input)
(let ((step input)
(history (list input)))
(dolist (layer layers)
(let ((result (forward layer step)))
(setf step result)
(push step history)))
(values step history)))
(defun back-propogate (layers inputs outputs &key (rate 1.0) (last-changes '()) (last-change-memory 0.25))
(assert (not (null layers)))
(multiple-value-bind (outputs* history)
(forward-propogate layers inputs)
(let ((layers (reverse layers))
(new-layers '())
(changes '())
(last-changes (reverse last-changes)))
(let* ((first-error (α #'- outputs outputs*))
(first-delta (α #'* rate first-error (deriv (pop history))))
(first-change (matmul (transpose (first history)) first-delta)))
(unless (null last-changes)
(setf first-change (α #'+ first-change (α #'* (pop last-changes) last-change-memory))))
(push (α #'+ first-change (first layers))
new-layers)
(push first-change changes)
(let ((error first-error)
(delta first-delta)
(last-layer (first layers)))
(dolist (layer (rest layers))
(assert (not (null history)))
(let ((point (pop history))
(last-change (pop last-changes)))
(setf error (matmul delta (transpose last-layer))
delta (α #'* rate error (deriv point)))
(let ((layer-change (matmul (transpose (first history)) delta)))
(unless (null last-change)
(setf layer-change (α #'+ layer-change (α #'* last-change last-change-memory))))
(push (α #'+ layer-change layer) new-layers)
(push layer-change changes))
(setf last-layer layer)))))
(values new-layers changes))))
(defun train (layers inputs outputs epochs &key (rate 0.25) (last-rate 0.05) (status-every 500) last)
(declare (fixnum epochs status-every))
(let ((last last))
(dotimes (n epochs)
(when (zerop (mod n status-every))
(format *debug-io* "calculating loss...")
(format *debug-io* "time: ~10,2f error: ~11,6e epochs: ~6d/~6d~%"
(float (/ (get-internal-real-time) internal-time-units-per-second))
(/ (compute (β #'+ (β #'+ (α #'expt (a #'- (forward-propogate layers inputs)
outputs)
2))))
(array-dimension inputs 0)
(array-dimension outputs 1))
n
epochs))
(multiple-value-bind (layers* last*)
(back-propogate layers inputs outputs
:rate rate
:last-changes last
:last-change-memory last-rate)
(setf layers (mapcar #'compute layers*)
last (mapcar #'compute last*))))
(values layers last)))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment