Skip to content

Instantly share code, notes, and snippets.

@devmachiine
Last active November 26, 2020 17:05
Show Gist options
  • Save devmachiine/0aa6db5dcb80e7f1492e8068b969fcd1 to your computer and use it in GitHub Desktop.
Save devmachiine/0aa6db5dcb80e7f1492e8068b969fcd1 to your computer and use it in GitHub Desktop.
Neural network in F#
//------------------------------- Define Network -------------------------------
type Activation =
| Linear
| Binary
| Tanh
| ReLU
type Neuron = { Weights: float list; Bias: float }
type Layer =
{ Neurons: Neuron list
Output: Activation }
type Network = { Layers: Layer list }
let rnd = System.Random()
let randomWeight _ = (2.0 * rnd.NextDouble()) - 1.0
let buildNeuron inputs =
{ Neuron.Weights = List.init inputs randomWeight
Bias = randomWeight () }
let buildLayer inputs neurons activation =
{ Layer.Neurons = List.init neurons (fun i -> buildNeuron inputs)
Output = activation }
let buildNetwork inputs layerSpecs =
let rec createLayers acc layerInputs specs =
match specs with
| [] -> List.rev acc
| (neurons, activation) :: nextSpecs ->
let newLayer =
buildLayer layerInputs neurons activation
createLayers (newLayer :: acc) neurons nextSpecs
{ Network.Layers = createLayers [] inputs layerSpecs }
//------------------------------- Compute Result -------------------------------
let transform activation =
match activation with
| Linear -> id
| Tanh -> tanh
| ReLU -> max 0.0
| Binary -> (fun x -> if x > 0.0 then 1.0 else 0.0)
let neuronOutput neuron inputs activation =
let weightedSum =
List.zip neuron.Weights inputs
|> List.sumBy (fun (w, i) -> w * i)
activation (weightedSum + neuron.Bias)
let layerOutput layer inputs =
let activation = (transform layer.Output)
layer.Neurons
|> List.map (fun neuron -> neuronOutput neuron inputs activation)
let networkOutput network inputs =
let rec feedForward data layers =
match layers with
| [] -> data
| layer :: remainingLayers ->
let nextLayerInput = layerOutput layer data
feedForward nextLayerInput remainingLayers
feedForward inputs network.Layers
//------------------------------- Compute Errors -------------------------------
type TrainingCase =
{ Inputs: float list
Outputs: float list }
let outputError (expected, computed): float =
let error = expected - computed
error * error
let caseError (expected, computed) =
List.zip expected computed
|> List.sumBy outputError
let networkError network trainingCases =
trainingCases
|> List.map (fun case -> (case.Outputs, networkOutput network case.Inputs))
|> List.sumBy caseError
//------------------------------- Train Network -------------------------------
type SearchConstraints = { MaxSeconds: float; MinError: float }
let mutateSearch (trainingSet: TrainingCase list) layerSpecs changeProbability searchConstraints =
let inputs =
trainingSet
|> List.map (fun case -> case.Inputs.Length)
|> List.max
let outputs =
trainingSet
|> List.map (fun case -> case.Outputs.Length)
|> List.max
let adjustedLayerSpecs =
match List.last layerSpecs with
| (neurons, _activation) when neurons = outputs -> layerSpecs
| _ -> layerSpecs @ [ (outputs, Linear) ]
let initialNetwork = buildNetwork inputs adjustedLayerSpecs
let maybeMutate x =
if rnd.NextDouble() < changeProbability then randomWeight () * 2.0 * x else x
let mutateNeuron n =
{ Weights = n.Weights |> List.map (fun w -> maybeMutate w)
Bias = maybeMutate n.Bias }
let mutateLayer l =
{ Neurons = l.Neurons |> List.map mutateNeuron
Output = l.Output }
let mutateNetwork n =
{ Layers = n.Layers |> List.map mutateLayer }
let stopwatch = System.Diagnostics.Stopwatch.StartNew()
let rec findNext bestNetwork oldError i =
match stopwatch.Elapsed.TotalSeconds with
| timeout when i % 10 = 0
&& timeout > searchConstraints.MaxSeconds ->
printfn "Timeout @ %A" timeout
printfn "iteration: %A" i
bestNetwork
| elapsed ->
let nn = mutateNetwork bestNetwork
let newError = networkError nn trainingSet
if newError < oldError then
if i % 10 = 0 then printfn "e %f i: %i" newError i
if newError > searchConstraints.MinError then
findNext nn newError (i + 1)
else
printfn "Stopped @ %A" elapsed
printfn "iteration: %A" i
nn
else
findNext bestNetwork oldError (i + 1)
findNext initialNetwork (networkError initialNetwork trainingSet) 0
//----------------------------------- Test -----------------------------------
let runTraining trainingData layerSpecs =
let trainedNetwork =
mutateSearch trainingData layerSpecs 0.07 { MaxSeconds = 5.0; MinError = 0.01 }
let formatFloats = List.map (fun f -> sprintf "%.2f" f)
trainingData
|> List.iter (fun case ->
printfn
"feedForward %A gives %A vs %A"
case.Inputs
(formatFloats (networkOutput trainedNetwork case.Inputs))
(formatFloats case.Outputs))
printfn "Network error: %A" (networkError trainedNetwork trainingData)
// ---------- Test 1/3 -> linear function ------------------------
let xorGate =
[ { Inputs = [ 1.0; 1.0 ]
Outputs = [ 0.0 ] }
{ Inputs = [ 0.0; 1.0 ]
Outputs = [ 1.0 ] }
{ Inputs = [ 1.0; 0.0 ]
Outputs = [ 1.0 ] }
{ Inputs = [ 0.0; 0.0 ]
Outputs = [ 0.0 ] } ]
runTraining xorGate [ (9, ReLU); (5, ReLU) ]
// ---------- Test 2/3 -> non-linear function --------------------
let tanData =
[ -1.75 .. 0.37 .. 1.75 ]
@ [ -999999.0; 999999.0 ]
@ [ -1000.0; 1000.0 ]
@ [ 0.0 ]
|> List.map (fun d -> { Inputs = [ d ]; Outputs = [ tanh d ] })
let xyReLU x y = List.init x (fun _ -> (y, ReLU))
runTraining tanData (xyReLU 7 5)
// ---------- Test 3/3 -> multiple-output ------------------------
let oodle i = if i % 2 = 0 then [ 9; 0 ] else [ 4; 7 ]
let oodleData =
[ 1 .. 4 ]
|> List.map (fun i ->
{ Inputs = [ float i ]
Outputs = oodle i |> List.map float })
runTraining oodleData ([ (17, ReLU); (9, ReLU) ] @ (xyReLU 5 5))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment