Skip to content

Instantly share code, notes, and snippets.

@InDieTasten
Created March 8, 2015 22:35
Show Gist options
  • Save InDieTasten/c97655c7a4a1bfa5a0d1 to your computer and use it in GitHub Desktop.
Save InDieTasten/c97655c7a4a1bfa5a0d1 to your computer and use it in GitHub Desktop.
table.duplicate = function(t)
if(type(t) ~= "table") then return t end
local t2 = {}
for k,v in pairs(t) do
if(type(v) == "table") then
t2[k] = table.duplicate(v)
else
t2[k] = v
end
end
return t2
end
-- Neural Networking API
local net = {}
function newInput()
return {}
end
function newHidden(inputs, func)
func = func or function(x) return 0 end
local hidden = {}
for i = 1, inputs, 1 do
table.insert(hidden, func(i))
end
return hidden
end
function newOutput(inputs, func)
func = func or function(x) return 0 end
local output = {}
for i = 1, inputs, 1 do
table.insert(output, func(i))
end
return output
end
function newNet(...)
local specs = {...}
if(#specs < 2) then
error("You need to specify at least how many inputs and outputs you want", 2)
end
for k,v in ipairs(specs) do
if(type(v) ~= "number") then
error("Argument #"..k..": Needs to be a number", 2)
end
if(v <= 0) then
error("Argument #"..k..": You cannot have a zero-or-less node amount on a layer", 2)
end
end
-- create net
local net = {}
-- adding input layer
net[1] = {}
for x = 1, specs[1], 1 do
-- add individual input fields
table.insert(net[1], newInput())
end
-- adding hidden layer
for i = 2, #specs-1, 1 do
-- create layer #net[i-1]
net[i] = {}
for x = 1, specs[i], 1 do
--add individual neurons
table.insert(net[i], newHidden(#net[i-1]))
end
end
-- adding output layer
net[#specs] = {}
for x = 1, specs[#specs], 1 do
--add individual neurons
table.insert(net[#specs], newOutput(#net[#specs-1]))
end
return net
end
function runNet(net, inputs)
if(type(net) ~= "table") then
error("Arg #1: Net table expected, got "..type(net), 2)
end
if(type(inputs) ~= "table") then
error("Arg #2: Input table expected, got "..type(inputs), 2)
end
if(#net[1] ~= #inputs) then
error("The input sizes of net and inputs do not match", 2)
end
-- WORK check consistency
-- Create instance to evaluate on without overriding weights
local eval = table.duplicate(net)
-- Insert inputs
for i = 1, #inputs, 1 do
eval[1][i] = inputs[i]
end
-- Evaluate hidden/output layers
for x = 2, #eval, 1 do
-- Insert evaluation for every neuron
for i = 1, #eval[x], 1 do
local nsum = 0
for inp,weight in ipairs(eval[x][i]) do
nsum = nsum + weight*eval[x-1][inp]
end
if(nsum >= 1) then -- threshold
eval[x][i] = 1
else
eval[x][i] = 0
end
end
end
return eval[#eval]
end
function failrate(net, testset)
--WORK
end
function backpropNet(net, testset, rounds)
-- randomize testset order
-- run complete testset and calculate the overall error
-- adjust the weights
-- check remaining time and loop
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment