Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Lua Neural Network
ACTIVATION_RESPONSE = 1
NeuralNetwork = {
transfer = function( x) return 1 / (1 + math.exp(-x / ACTIVATION_RESPONSE)) end --This is the Transfer function (in this case a sigmoid)
}
function NeuralNetwork.create( _numInputs, _numOutputs, _numHiddenLayers, _neuronsPerLayer, _learningRate)
_numInputs = _numInputs or 1
_numOutputs = _numOutputs or 1
_numHiddenLayers = _numHiddenLayers or math.ceil(_numInputs/2)
_neuronsPerLayer = _neuronsPerLayer or math.ceil(_numInputs*.66666+_numOutputs)
_learningRate = _learningRate or .5
--order goes network[layer][neuron][wieght]
local network = setmetatable({
learningRate = _learningRate
},{ __index = NeuralNetwork});
network[1] = {} --Input Layer
for i = 1,_numInputs do
network[1][i] = {}
end
for i = 2,_numHiddenLayers+2 do --plus 2 represents the output layer (also need to skip input layer)
network[i] = {}
local neuronsInLayer = _neuronsPerLayer
if i == _numHiddenLayers+2 then
neuronsInLayer = _numOutputs
end
for j = 1,neuronsInLayer do
network[i][j] = {bias = math.random()*2-1}
local numNeuronInputs = table.getn(network[i-1])
for k = 1,numNeuronInputs do
network[i][j][k] = math.random()*2-1 --return random number between -1 and 1
end
end
end
return network
end
function NeuralNetwork:forewardPropagate(...)
if table.getn(arg) ~= table.getn(self[1]) and type(arg[1]) ~= "table" then
error("Neural Network received "..table.getn(arg).." input[s] (expected "..table.getn(self[1]).." input[s])",2)
elseif type(arg[1]) == "table" and table.getn(arg[1]) ~= table.getn(self[1]) then
error("Neural Network received "..table.getn(arg[1]).." input[s] (expected "..table.getn(self[1]).." input[s])",2)
end
local outputs = {}
for i = 1,table.getn(self) do
for j = 1,table.getn(self[i]) do
if i == 1 then
if type(arg[1]) == "table" then
self[i][j].result = arg[1][j]
else
self[i][j].result = arg[j]
end
else
self[i][j].result = self[i][j].bias
for k = 1,table.getn(self[i][j]) do
self[i][j].result = self[i][j].result + (self[i][j][k]*self[i-1][k].result)
end
self[i][j].result = NeuralNetwork.transfer(self[i][j].result)
if i == table.getn(self) then
table.insert(outputs,self[i][j].result)
end
end
end
end
return outputs
end
function NeuralNetwork:backwardPropagate(inputs,desiredOutputs)
if table.getn(inputs) ~= table.getn(self[1]) then
error("Neural Network received "..table.getn(inputs).." input[s] (expected "..table.getn(self[1]).." input[s])",2)
elseif table.getn(desiredOutputs) ~= table.getn(self[table.getn(self)]) then
error("Neural Network received "..table.getn(desiredOutputs).." desired output[s] (expected "..table.getn(self[table.getn(self)]).." desired output[s])",2)
end
self:forewardPropagate(inputs) --update the internal inputs and outputs
for i = table.getn(self),2,-1 do --iterate backwards (nothing to calculate for input layer)
local tempResults = {}
for j = 1,table.getn(self[i]) do
if i == table.getn(self) then --special calculations for output layer
self[i][j].delta = (desiredOutputs[j] - self[i][j].result) * self[i][j].result * (1 - self[i][j].result)
else
local weightDelta = 0
for k = 1,table.getn(self[i+1]) do
weightDelta = weightDelta + self[i+1][k][j]*self[i+1][k].delta
end
self[i][j].delta = self[i][j].result * (1 - self[i][j].result) * weightDelta
end
end
end
for i = 2,table.getn(self) do
for j = 1,table.getn(self[i]) do
self[i][j].bias = self[i][j].delta * self.learningRate
for k = 1,table.getn(self[i][j]) do
self[i][j][k] = self[i][j][k] + self[i][j].delta * self.learningRate * self[i-1][k].result
end
end
end
end
function NeuralNetwork:save()
--[[
File specs:
|INFO| - should be FF BP NN
|I| - number of inputs
|O| - number of outputs
|HL| - number of hidden layers
|NHL| - number of neurons per hidden layer
|LR| - learning rate
|BW| - bias and weight values
]]--
local data = "|INFO|FF BP NN|I|"..tostring(table.getn(self[1])).."|O|"..tostring(table.getn(self[table.getn(self)])).."|HL|"..tostring(table.getn(self)-2).."|NHL|"..tostring(table.getn(self[2])).."|LR|"..tostring(self.learningRate).."|BW|"
for i = 2,table.getn(self) do -- nothing to save for input layer
for j = 1,table.getn(self[i]) do
local neuronData = tostring(self[i][j].bias).."{"
for k = 1,table.getn(self[i][j]) do
neuronData = neuronData..tostring(self[i][j][k])
neuronData = neuronData..","
end
data = data..neuronData.."}"
end
end
data = data.."|END|"
return data
end
function NeuralNetwork.load( data)
local dataPos = string.find(data,"|")+1
local currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
local dataPos = string.find(data,"|",dataPos)+1
local _inputs, _outputs, _hiddenLayers, _neuronsPerLayer, _learningRate
local biasWeights = {}
local errorExit = false
while currentChunk ~= "END" and not errorExit do
if currentChuck == "INFO" then
currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
if currentChunk ~= "FF BP NN" then
errorExit = true
end
elseif currentChunk == "I" then
currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
_inputs = tonumber(currentChunk)
elseif currentChunk == "O" then
currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
_outputs = tonumber(currentChunk)
elseif currentChunk == "HL" then
currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
_hiddenLayers = tonumber(currentChunk)
elseif currentChunk == "NHL" then
currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
_neuronsPerLayer = tonumber(currentChunk)
elseif currentChunk == "LR" then
currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
_learningRate = tonumber(currentChunk)
elseif currentChunk == "BW" then
currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
local subPos = 1
local subChunk
for i = 1,_hiddenLayers+1 do
biasWeights[i] = {}
local neuronsInLayer = _neuronsPerLayer
if i == _hiddenLayers+1 then
neuronsInLayer = _outputs
end
for j = 1,neuronsInLayer do
biasWeights[i][j] = {}
biasWeights[i][j].bias = tonumber(string.sub(currentChunk,subPos,string.find(currentChunk,"{",subPos)-1))
subPos = string.find(currentChunk,"{",subPos)+1
subChunk = string.sub( currentChunk, subPos, string.find(currentChunk,",",subPos)-1)
local maxPos = string.find(currentChunk,"}",subPos)
while subPos < maxPos do
table.insert(biasWeights[i][j],tonumber(subChunk))
subPos = string.find(currentChunk,",",subPos)+1
if string.find(currentChunk,",",subPos) ~= nil then
subChunk = string.sub( currentChunk, subPos, string.find(currentChunk,",",subPos)-1)
end
end
subPos = maxPos+1
end
end
end
currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
dataPos = string.find(data,"|",dataPos)+1
end
if errorExit then
error("Failed to load Neural Network:"..currentChunk,2)
end
local network = setmetatable({
learningRate = _learningRate
},{ __index = NeuralNetwork});
network[1] = {} --Input Layer
for i = 1,_inputs do
network[1][i] = {}
end
for i = 2,_hiddenLayers+2 do --plus 2 represents the output layer (also need to skip input layer)
network[i] = {}
local neuronsInLayer = _neuronsPerLayer
if i == _hiddenLayers+2 then
neuronsInLayer = _outputs
end
for j = 1,neuronsInLayer do
network[i][j] = {bias = biasWeights[i-1][j].bias}
local numNeuronInputs = table.getn(network[i-1])
for k = 1,numNeuronInputs do
network[i][j][k] = biasWeights[i-1][j][k]
end
end
end
return network
end
@cassiozen

This comment has been minimized.

Copy link
Owner Author

@cassiozen cassiozen commented Jul 17, 2014

Author: "Soulkiller" (http://www.forums.evilmana.com/psp-lua-codebase/lua-neural-networks/)

NerualNetwork.create( numInputs, numOutputs, numHiddenLayers, neuronsPerLayer, learningRate)
Return value - a neural network object.
Parameters:
numInputs - the number of input neurons
numOutputs - the number of output neurons
numHiddenLayers - the number of hidden layers (Reccomended: 1)
neuronsPerLayer - the number of neurons in the hidden layers (not input/output layers) (Reccomended: 4)
learningRate - the rate at which the neural network learns (Percentage eg: 0-1, smaller the value the longer it takes to teach, higher the value the less it takes to teach but it might not learn the sequence corectly) (Reccomended: 3)

NerualNetwork:forewardPropagate(...)
Return value - a table containg the results
number of arguments == number of inputs for the NN object
inputs whatever impulse and returns the learned results

NeuralNetwork:backwardPropagate( inputs, desiredOutputs)
inputs - a table that has the inputs
desiredOutputs - a table that has the desired outputs for this training set

NeuralNetwork:save()
return value - returns a unique string that can be used to load the NN again in its current state (remembers training). This string could then be written to a file.

NeuralNetwork.load( data)
data - the unique string returned by NeuralNetwork:save()
return value - a Neural Network object

@cassiozen

This comment has been minimized.

Copy link
Owner Author

@cassiozen cassiozen commented Jul 17, 2014

Example: XOR

math.randomseed(os.time())
dofile("luaneural.lua")      

network = NeuralNetwork.create(2,1,1,4,0.3)

print("Training the neural network:")
attempts = 10000 -- number of times to do backpropagation
 for i = 1,attempts do
  network:backwardPropagate({0,0},{0}) 
  network:backwardPropagate({1,0},{1})
  network:backwardPropagate({0,1},{1})
  network:backwardPropagate({1,1},{0})
end

print("Results:")
print("0 0 | "..network:forewardPropagate(0,0)[1])
print("1 0 | "..network:forewardPropagate(1,0)[1])
print("0 1 | "..network:forewardPropagate(0,1)[1])
print("1 1 | "..network:forewardPropagate(1,1)[1])
@cassiozen

This comment has been minimized.

Copy link
Owner Author

@cassiozen cassiozen commented Jul 17, 2014

If needed, you can save the trained neural network to a file to use it later

print("Saving Neural Network...")
NN = io.open("neuralNet.txt","w")
NN:write(network:save())
NN:flush()
NN:close()

print("Loading Neural Network...")
NN = io.open("neuralNet.txt","r")
network = NeuralNetwork.load(NN:read())

print("Results:")
print("0 0 | "..network:forewardPropagate(0,0)[1])
print("1 0 | "..network:forewardPropagate(1,0)[1])
print("0 1 | "..network:forewardPropagate(0,1)[1])
print("1 1 | "..network:forewardPropagate(1,1)[1])
@Chemist-pp

This comment has been minimized.

Copy link

@Chemist-pp Chemist-pp commented Jan 8, 2019

Out of curiosity, I noticed your training loop, specifically line 168, you switch which the variables using to index the nodes and the weights, was this intentional?

I think in your case, this works out since you take care of the output layer and restrict the creation of the hidden layers to all have the same node count, so you kind of loop orthogonal to what I had expected initially. Anyway, thanks for sharing! Learned a bit from it.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment