Skip to content

Instantly share code, notes, and snippets.

@matthieu
Created June 28, 2010 01:17
Show Gist options
  • Save matthieu/455329 to your computer and use it in GitHub Desktop.
Save matthieu/455329 to your computer and use it in GitHub Desktop.
package spike
import util.Random
class Neuron(nm: String, ns: List[Neuron], rnd: Random) {
val (a, b, rate) = (1.7159, 2.0/3.0, 0.1)
val dendrites = connect(ns)
val name = nm
// need to remember output and gather error for training
var (out, error, bias) = (0.0, 0.0, rnd.nextDouble * 2.0 - 1.0)
def input = {
error = 0.0 // error reset on new input
dendrites.map(_.input).sum + bias;
}
def output = {
out = a * tanh(b*input)
out
}
def expectation(expected: Double) = updateError(expected - out)
def updateError(delta: Double) {
error += delta
dendrites.foreach(_.updateError(delta))
}
def adjust {
val adjustment = error * deriv(out) * rate
dendrites.foreach(_.adjust(adjustment))
bias += adjustment
}
override def toString = name + "[" + dendrites.mkString(",") + "]\n "
// Derivative of our output function
private def deriv(out: Double) = a * b * (1-Math.pow(tanh(b*out), 2))
private def connect(ns: List[Neuron]): List[Dendrite] =
ns.map(n => new Dendrite(n, rnd.nextDouble * 2 * Math.pow(ns.size, -0.5) - 1))
// Hyperbolic tangent function
private def tanh(x: Double) = {
val exp = Math.exp(2*x)
(exp - 1) / (exp + 1)
}
}
// Dendrites are the neurons input connections, just like in your brain.
class Dendrite(n: Neuron, w: Double) {
// the input neuron
val neuron = n
// weight of the signal
var weight = w
def input = weight * neuron.out;
def updateError(delta: Double) {
neuron.updateError(delta * weight)
}
def adjust(adjustment: Double) {
weight += adjustment * neuron.out
}
override def toString = "--["+weight+"]-->"+neuron.name
}
class Net(layout: List[Int], rnd: Random) {
val layers = build(layout, rnd)
def output(ins: List[Double]) = {
layers.head.zip(ins).foreach { case (n, in) => n.out = in }
layers.tail.foldLeft(ins) { (z, l) => l.map(_.output) }
}
def train(ins: List[Double], outs: List[Double]) = {
val outputs = output(ins)
layers.last.zip(0 until outs.length).foreach {case (n, m) => n.expectation(outs(m))}
layers.foreach(_.foreach(_.adjust))
}
override def toString = layers.mkString("\n")
private def build(layout: List[Int], rnd: Random) =
layout.zip(1 to layout.size).foldLeft(List(List[Neuron]())) {
case (z, (n, l)) => buildLayer("L"+l, n, z.head, rnd) :: z
}.reverse.tail
private def buildLayer(name: String, n: Int, lower: List[Neuron], rnd: Random) =
(0 until n) map { n => new Neuron(name+"N"+n, lower, rnd) } toList
}
object XOR extends Application {
val net = new Net(List(2,3,2,1), new Random)
println(net)
for (i <- 1 to 150) {
net.train(List(1, 1), List(-1))
net.train(List(-1, -1), List(-1))
net.train(List(1, -1), List(1))
net.train(List(-1, 1), List(1))
if (i % 33 == 0) println(net)
}
println("Training done.")
println("** Output for (1,1) " + net.output(List(1, 1)))
println("** Output for (1,-1) " + net.output(List(1, -1)))
println("** Output for (-1,1) " + net.output(List(-1, 1)))
println("** Output for (-1,-1) " + net.output(List(-1, -1)))
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment