Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
public class SimpleNeuralNetwork
{
private NeuralLayerFactory _layerFactory;
internal List<NeuralLayer> _layers;
internal double _learningRate;
internal double[][] _expectedResult;
/// <summary>
/// Constructor of the Neural Network.
/// Note:
/// Initialy input layer with defined number of inputs will be created.
/// </summary>
/// <param name="numberOfInputNeurons">
/// Number of neurons in input layer.
/// </param>
public SimpleNeuralNetwork(int numberOfInputNeurons)
{
_layers = new List<NeuralLayer>();
_layerFactory = new NeuralLayerFactory();
// Create input layer that will collect inputs.
CreateInputLayer(numberOfInputNeurons);
_learningRate = 2.95;
}
/// <summary>
/// Add layer to the neural network.
/// Layer will automatically be added as the output layer to the last layer in the neural network.
/// </summary>
public void AddLayer(NeuralLayer newLayer)
{
if (_layers.Any())
{
var lastLayer = _layers.Last();
newLayer.ConnectLayers(lastLayer);
}
_layers.Add(newLayer);
}
/// <summary>
/// Push input values to the neural network.
/// </summary>
public void PushInputValues(double[] inputs)
{
_layers.First().Neurons.ForEach(x => x.PushValueOnInput(inputs[_layers.First().Neurons.IndexOf(x)]));
}
/// <summary>
/// Set expected values for the outputs.
/// </summary>
public void PushExpectedValues(double[][] expectedOutputs)
{
_expectedResult = expectedOutputs;
}
/// <summary>
/// Calculate output of the neural network.
/// </summary>
/// <returns></returns>
public List<double> GetOutput()
{
var returnValue = new List<double>();
_layers.Last().Neurons.ForEach(neuron =>
{
returnValue.Add(neuron.CalculateOutput());
});
return returnValue;
}
/// <summary>
/// Train neural network.
/// </summary>
/// <param name="inputs">Input values.</param>
/// <param name="numberOfEpochs">Number of epochs.</param>
public void Train(double[][] inputs, int numberOfEpochs)
{
double totalError = 0;
for(int i = 0; i < numberOfEpochs; i++)
{
for(int j = 0; j < inputs.GetLength(0); j ++)
{
PushInputValues(inputs[j]);
var outputs = new List<double>();
// Get outputs.
_layers.Last().Neurons.ForEach(x =>
{
outputs.Add(x.CalculateOutput());
});
// Calculate error by summing errors on all output neurons.
totalError = CalculateTotalError(outputs, j);
HandleOutputLayer(j);
HandleHiddenLayers();
}
}
}
/// <summary>
/// Hellper function that creates input layer of the neural network.
/// </summary>
private void CreateInputLayer(int numberOfInputNeurons)
{
var inputLayer = _layerFactory.CreateNeuralLayer(numberOfInputNeurons, new RectifiedActivationFuncion(), new WeightedSumFunction());
inputLayer.Neurons.ForEach(x => x.AddInputSynapse(0));
this.AddLayer(inputLayer);
}
/// <summary>
/// Hellper function that calculates total error of the neural network.
/// </summary>
private double CalculateTotalError(List<double> outputs, int row)
{
double totalError = 0;
outputs.ForEach(output =>
{
var error = Math.Pow(output - _expectedResult[row][outputs.IndexOf(output)], 2);
totalError += error;
});
return totalError;
}
/// <summary>
/// Hellper function that runs backpropagation algorithm on the output layer of the network.
/// </summary>
/// <param name="row">
/// Input/Expected output row.
/// </param>
private void HandleOutputLayer(int row)
{
_layers.Last().Neurons.ForEach(neuron =>
{
neuron.Inputs.ForEach(connection =>
{
var output = neuron.CalculateOutput();
var netInput = connection.GetOutput();
var expectedOutput = _expectedResult[row][_layers.Last().Neurons.IndexOf(neuron)];
var nodeDelta = (expectedOutput - output) * output * (1 - output);
var delta = -1 * netInput * nodeDelta;
connection.UpdateWeight(_learningRate, delta);
neuron.PreviousPartialDerivate = nodeDelta;
});
});
}
/// <summary>
/// Hellper function that runs backpropagation algorithm on the hidden layer of the network.
/// </summary>
/// <param name="row">
/// Input/Expected output row.
/// </param>
private void HandleHiddenLayers()
{
for (int k = _layers.Count - 2; k > 0; k--)
{
_layers[k].Neurons.ForEach(neuron =>
{
neuron.Inputs.ForEach(connection =>
{
var output = neuron.CalculateOutput();
var netInput = connection.GetOutput();
double sumPartial = 0;
_layers[k + 1].Neurons
.ForEach(outputNeuron =>
{
outputNeuron.Inputs.Where(i => i.IsFromNeuron(neuron.Id))
.ToList()
.ForEach(outConnection =>
{
sumPartial += outConnection.PreviousWeight * outputNeuron.PreviousPartialDerivate;
});
});
var delta = -1 * netInput * sumPartial * output * (1 - output);
connection.UpdateWeight(_learningRate, delta);
});
});
}
}
}
@jasdefer

This comment has been minimized.

Copy link

@jasdefer jasdefer commented Apr 6, 2018

There are some typos in the summary part of some methods: Hellper instead of Helper. I cannot open a PR for a GIST, but this comment may help :)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment