Created
July 27, 2015 00:02
-
-
Save pqnelson/10e832edffa22fbe1879 to your computer and use it in GitHub Desktop.
A simple linear-threshold neural network, with an XOR neural net factory.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
(ns user.neural-network | |
(:import [java.lang.reflect Array])) | |
;; So far, this handles linear threshold nodes, specifically examining | |
;; Minsky and Papert's XOR dilemma. The make-xor-network is a factory function | |
;; which produces such a network. | |
;; | |
;; It's fairly optimized, using deftypes and whatnot. One could add an | |
;; activation function for further generality, or add a logistic function | |
;; and add some training routines to further improve things. | |
(defn- sum | |
([coll] (reduce + coll)) | |
([v coll] (reduce + v coll))) | |
(defn- unit-step [x] | |
(if (neg? x) | |
0 | |
1)) | |
(definterface ILayer | |
(getWeights []) | |
(setWeight [node-idx idx v]) | |
(getOutputs []) | |
(setOutput [idx v]) | |
(getBias []) | |
(setBias [b]) | |
(netInput [node-idx inputs]) | |
(propagate [inputs])) | |
(deftype Layer [outputs ; an array of M doubles | |
weights ; an array of M arrays of N+1 doubles | |
^:volatile-mutable bias | |
output-fn] | |
ILayer | |
(getWeights [_] weights) | |
(setWeight [this node-idx idx v] (. Array (set (aget weights node-idx) | |
idx v))) | |
(getOutputs [_] outputs) | |
(setOutput [this idx v] (. Array (set outputs idx v))) | |
(getBias [_] bias) | |
(setBias [this b] (set! bias b)) | |
;; dot-product of weights and inputs | |
(netInput [_ node-idx inputs] (sum | |
(for [k (range (alength inputs))] | |
(* (aget inputs k) | |
(aget weights node-idx k))))) | |
;; set outputs based on array of N inputs | |
(propagate [this inputs] | |
(if (not= (alength (aget weights 0)) | |
(inc (alength inputs))) | |
(throw (ArrayIndexOutOfBoundsException.)) | |
(doseq [k (range (alength outputs))] | |
(.setOutput | |
this | |
k | |
(output-fn | |
(+ (* bias | |
(aget weights k (alength inputs))) | |
(.netInput this k inputs)))))))) | |
(defn make-layer [number-output-nodes number-input-nodes output-fn] | |
(Layer. (make-array Double/TYPE number-output-nodes) | |
(make-array Double/TYPE number-output-nodes (inc number-input-nodes)) | |
0.0 | |
output-fn)) | |
(defn make-xor-network [] | |
(let [l1 (doto (make-layer 1 2 unit-step) | |
(.setBias 0.4) | |
(.setWeight 0 0 1) | |
(.setWeight 0 1 1) | |
(.setWeight 0 2 -1)) | |
l2 (doto (make-layer 1 2 unit-step) | |
(.setBias 1.2) | |
(.setWeight 0 0 1) | |
(.setWeight 0 1 1) | |
(.setWeight 0 2 -1)) | |
l3 (doto (make-layer 1 2 unit-step) | |
(.setBias 0.5) | |
(.setWeight 0 0 0.6) | |
(.setWeight 0 1 -0.2) | |
(.setWeight 0 2 -1))] | |
(fn [x1 x2] | |
(.propagate l1 (into-array Double/TYPE [x1 x2])) | |
(.propagate l2 (into-array Double/TYPE [x1 x2])) | |
(.propagate l3 (into-array Double/TYPE | |
[(aget (.getOutputs l1) 0) | |
(aget (.getOutputs l2) 0)])) | |
(aget (.getOutputs l3) 0)))) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment