Skip to content

Instantly share code, notes, and snippets.

@Krutoy242
Forked from cassiozen/NeuralNetwork.lua
Last active June 5, 2019 10:25
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Krutoy242/e7b6618e8bc180acbe65b515a9ad1755 to your computer and use it in GitHub Desktop.
Save Krutoy242/e7b6618e8bc180acbe65b515a9ad1755 to your computer and use it in GitHub Desktop.
Lua Neural Network
--********************************************************
-- By Krutoy242
--
-- Based on
-- https://gist.github.com/cassiozen/de0dff87eb7ed599b5d0
--********************************************************
-- Redefine globals to locals for perfomance
local exp = math.exp
local ceil = math.ceil
--This is the Transfer function (in this case a sigmoid)
local NeuralNetwork = {}
local transfer = function(x) return 1 / (1 + exp(-x)) end
function NeuralNetwork.create(inputs, outputs, hiddenlayers, neurons)
inputs = inputs or 1
outputs = outputs or 1
hiddenlayers = hiddenlayers or ceil(inputs/2)
neurons = neurons or ceil(inputs*(2/3)+outputs)
--order goes network[layer][neuron][wieght]
local network = setmetatable({},{__index = NeuralNetwork});
network[1] = {} --Input Layer
for i = 1,inputs do
network[1][i] = {}
end
for i = 2,hiddenlayers+2 do --plus 2 represents the output layer (also need to skip input layer)
network[i] = {}
local neuronsInLayer = neurons
if i == hiddenlayers+2 then
neuronsInLayer = outputs
end
for j = 1,neuronsInLayer do
network[i][j] = {bias = math.random()*2-1}
local numNeuronInputs = #(network[i-1])
for k = 1,numNeuronInputs do
network[i][j][k] = math.random()*2-1 --return random number between -1 and 1
end
end
end
return network
end
function NeuralNetwork:forwardPropagate(...)
local arg = {...}
if #(arg) ~= #(self[1]) and type(arg[1]) ~= "table" then
error("Neural Network received "..#(arg).." input[s] (expected "..#(self[1]).." input[s])",2)
elseif type(arg[1]) == "table" and #(arg[1]) ~= #(self[1]) then
error("Neural Network received "..#(arg[1]).." input[s] (expected "..#(self[1]).." input[s])",2)
end
local isTableInput = type(arg[1]) == "table"
local outputs = {}
local layersCount = #self
local layer, neuron, result -- Forward declaration for perfomance
for i = 1, layersCount do
layer = self[i]
for j = 1,#layer do
neuron = layer[j]
if i == 1 then
if isTableInput then
neuron.result = arg[1][j]
else
neuron.result = arg[j]
end
else
result = neuron.bias
for k = 1,#neuron do
result = result + (neuron[k] * self[i-1][k].result)
end
result = transfer(result)
if i == layersCount then
outputs[#outputs+1] = result
end
neuron.result = result
end
end
end
return outputs
end
function NeuralNetwork:backwardPropagate(desiredOutputs, learningRate)
if #(desiredOutputs) ~= #(self[#self]) then
error("Neural Network received "..#(desiredOutputs).." desired output[s] (expected "..#(self[#self]).." desired output[s])",2)
end
learningRate = learningRate or .5
local layersCount = #self
local layer, neuron, delta, result, nextLayer, nextNeuron
for i = layersCount,2,-1 do --iterate backwards (nothing to calculate for input layer)
layer = self[i]
for j = 1,#layer do
neuron = layer[j]
result = neuron.result
if i == layersCount then --special calculations for output layer
neuron.delta = (desiredOutputs[j] - result) * result * (1 - result)
else
nextLayer = self[i+1]
delta = 0
for k = 1,#nextLayer do
nextNeuron = nextLayer[k]
delta = delta + nextNeuron[j]*nextNeuron.delta
end
neuron.delta = result * (1 - result) * delta
end
end
end
for i = 2,layersCount do
layer = self[i]
for j = 1,#layer do
neuron = layer[j]
delta = neuron.delta
neuron.bias = delta * learningRate
for k = 1,#neuron do
neuron[k] = neuron[k] + delta * learningRate * self[i-1][k].result
end
end
end
end
function NeuralNetwork:save(fileName)
local f = io.open(fileName, "w")
if not f then return false end
--[[
File specs:
|INFO| - should be FF BP NN
|I| - number of inputs
|O| - number of outputs
|HL| - number of hidden layers
|NHL| - number of neurons per hidden layer
|LR| - learning rate
|BW| - bias and weight values
]]--
local data = "|INFO|FF BP NN|I|"..
tostring(#(self[1]))..
"|O|"..tostring(#(self[#self]))..
"|HL|"..tostring(#self-2)..
"|NHL|"..tostring(#(self[2]))..
"|LR|"..tostring(self.learningRate).."|BW|"
for i = 2,#self do -- nothing to save for input layer
for j = 1,#(self[i]) do
local neuronData = tostring(self[i][j].bias).."{"
for k = 1,#(self[i][j]) do
neuronData = neuronData..tostring(self[i][j][k])
neuronData = neuronData..","
end
data = data..neuronData.."}"
end
end
data = data.."|END|"
f:write(data)
f:close()
return true
end
function NeuralNetwork.load(fileName)
local f = io.open(fileName, "r")
if not f then return nil end
local data = f:read()
f:close()
local dataPos = string.find(data,"|")+1
local currentChunk = string.sub(data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
local _inputs, _outputs, _hiddenLayers, neurons, learningrate
local biasWeights = {}
local errorExit = false
while currentChunk ~= "END" and not errorExit do
if currentChunk == "INFO" then
currentChunk = string.sub(data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
if currentChunk ~= "FF BP NN" then
errorExit = true
end
elseif currentChunk == "I" then
currentChunk = string.sub(data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
_inputs = tonumber(currentChunk)
elseif currentChunk == "O" then
currentChunk = string.sub(data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
_outputs = tonumber(currentChunk)
elseif currentChunk == "HL" then
currentChunk = string.sub(data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
_hiddenLayers = tonumber(currentChunk)
elseif currentChunk == "NHL" then
currentChunk = string.sub(data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
neurons = tonumber(currentChunk)
elseif currentChunk == "LR" then
currentChunk = string.sub(data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
learningrate = tonumber(currentChunk)
elseif currentChunk == "BW" then
currentChunk = string.sub(data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
local subPos = 1
local subChunk
for i = 1,_hiddenLayers+1 do
biasWeights[i] = {}
local neuronsInLayer = neurons
if i == _hiddenLayers+1 then
neuronsInLayer = _outputs
end
for j = 1,neuronsInLayer do
biasWeights[i][j] = {}
biasWeights[i][j].bias = tonumber(string.sub(currentChunk,subPos,string.find(currentChunk,"{",subPos)-1))
subPos = string.find(currentChunk,"{",subPos)+1
subChunk = string.sub(currentChunk, subPos, string.find(currentChunk,",",subPos)-1)
local maxPos = string.find(currentChunk,"}",subPos)
while subPos < maxPos do
table.insert(biasWeights[i][j],tonumber(subChunk))
subPos = string.find(currentChunk,",",subPos)+1
if string.find(currentChunk,",",subPos) ~= nil then
subChunk = string.sub(currentChunk, subPos, string.find(currentChunk,",",subPos)-1)
end
end
subPos = maxPos+1
end
end
end
currentChunk = string.sub(data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
end
if errorExit then
error("Failed to load Neural Network:"..currentChunk,2)
end
local network = setmetatable({
learningRate = learningrate
},{__index = NeuralNetwork});
network[1] = {} --Input Layer
for i = 1,_inputs do
network[1][i] = {}
end
for i = 2,_hiddenLayers+2 do --plus 2 represents the output layer (also need to skip input layer)
network[i] = {}
local neuronsInLayer = neurons
if i == _hiddenLayers+2 then
neuronsInLayer = _outputs
end
for j = 1,neuronsInLayer do
network[i][j] = {bias = biasWeights[i-1][j].bias}
local numNeuronInputs = #(network[i-1])
for k = 1,numNeuronInputs do
network[i][j][k] = biasWeights[i-1][j][k]
end
end
end
return network
end
return NeuralNetwork
@Krutoy242
Copy link
Author

Krutoy242 commented Jun 5, 2019

NeuralNetwork.create(inputs, outputs, hiddenlayers, neurons)

inputs - the number of input neurons
outputs - the number of output neurons
hiddenlayers - the number of hidden layers (Reccomended: 1)
neuronsPerLayer - the number of neurons in the hidden layers (not input/output layers) (Reccomended: 4)
return- a neural network object.

NeuralNetwork:forwardPropagate(...)

arguments == number of inputs for the NN object inputs whatever impulse and returns the learned results
return - a table containg the results

NeuralNetwork:backwardPropagate(output, learningRate)

output - a table that has the desired outputs for this training set
learningRate - the rate at which the neural network learns (Percentage eg: 0-1, smaller the value the longer it takes to teach, higher the value the less it takes to teach but it might not learn the sequence corectly)

NeuralNetwork:save(fileName)

fileName - file name string where all learned data would be saved

NeuralNetwork.load(fileName)

fileName - file name with previously saved data
return - a Neural Network object

Usage:

  • XOR:
math.randomseed(os.time())
local NeuralNetwork = require("NeuralNetwork")

local network = NeuralNetwork.create(2,1,1,4)

local xor = {
  {0,0},{0},
  {1,0},{1},
  {0,1},{1},
  {1,1},{0}
}

print("Training the neural network...")
 for _=1, 5000 do
  for i=1, 8,2 do
    network:forwardPropagate(xor[i])
    network:backwardPropagate(xor[i+1], 0.5)
  end
end

local function printNNresult()
  for i=1, 8,2 do
    print(xor[i][1].." "..xor[i][2].." | "..
      (network:forwardPropagate(xor[i]))[1])
  end
end

print("Results:")
printNNresult()

print("Saving Neural Network...")
network:save("neuralNet.txt")

print("Loading Neural Network...")
network = NeuralNetwork.load("neuralNet.txt","r")

print("Loaded results:")
printNNresult()

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment