Skip to content

Instantly share code, notes, and snippets.

@Theoistic
Created September 1, 2017 14:24
Show Gist options
  • Save Theoistic/f651a1e42592408f6200a1992ec711dd to your computer and use it in GitHub Desktop.
Save Theoistic/f651a1e42592408f6200a1992ec711dd to your computer and use it in GitHub Desktop.
Neural Network
public class Network
{
public double LearnRate { get; set; }
public double Momentum { get; set; }
public List<Neuron> InputLayer { get; set; }
public List<List<Neuron>> HiddenLayers { get; set; }
public List<Neuron> OutputLayer { get; set; }
private static readonly Random Random = new Random();
public Network()
{
LearnRate = 0;
Momentum = 0;
InputLayer = new List<Neuron>();
HiddenLayers = new List<List<Neuron>>();
OutputLayer = new List<Neuron>();
}
public Network(int inputSize, int[] hiddenSizes, int outputSize, double? learnRate = null, double? momentum = null)
{
LearnRate = learnRate ?? .4;
Momentum = momentum ?? .9;
InputLayer = new List<Neuron>();
HiddenLayers = new List<List<Neuron>>();
OutputLayer = new List<Neuron>();
for (var i = 0; i < inputSize; i++)
InputLayer.Add(new Neuron());
var firstHiddenLayer = new List<Neuron>();
for (var i = 0; i < hiddenSizes[0]; i++)
firstHiddenLayer.Add(new Neuron(InputLayer));
HiddenLayers.Add(firstHiddenLayer);
for (var i = 1; i < hiddenSizes.Length; i++)
{
var hiddenLayer = new List<Neuron>();
for (var j = 0; j < hiddenSizes[i]; j++)
hiddenLayer.Add(new Neuron(HiddenLayers[i - 1]));
HiddenLayers.Add(hiddenLayer);
}
for (var i = 0; i < outputSize; i++)
OutputLayer.Add(new Neuron(HiddenLayers.Last()));
}
public void Train(List<DataSet> dataSets, int numEpochs)
{
for (var i = 0; i < numEpochs; i++)
{
foreach (var dataSet in dataSets)
{
ForwardPropagate(dataSet.Values);
BackPropagate(dataSet.Targets);
}
}
}
public void Train(List<DataSet> dataSets, double minimumError)
{
var error = 1.0;
var numEpochs = 0;
while (error > minimumError && numEpochs < int.MaxValue)
{
var errors = new List<double>();
foreach (var dataSet in dataSets)
{
ForwardPropagate(dataSet.Values);
BackPropagate(dataSet.Targets);
errors.Add(CalculateError(dataSet.Targets));
}
error = errors.Average();
numEpochs++;
}
}
private void ForwardPropagate(params double[] inputs)
{
var i = 0;
InputLayer.ForEach(a => a.Value = inputs[i++]);
HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateValue()));
OutputLayer.ForEach(a => a.CalculateValue());
}
private void BackPropagate(params double[] targets)
{
var i = 0;
OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
HiddenLayers.Reverse();
HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateGradient()));
HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearnRate, Momentum)));
HiddenLayers.Reverse();
OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));
}
public double[] Compute(params double[] inputs)
{
ForwardPropagate(inputs);
return OutputLayer.Select(a => a.Value).ToArray();
}
private double CalculateError(params double[] targets)
{
var i = 0;
return OutputLayer.Sum(a => Math.Abs(a.CalculateError(targets[i++])));
}
public static double GetRandom()
{
return 2 * Random.NextDouble() - 1;
}
public class Neuron
{
public Guid Id { get; set; }
public List<Synapse> InputSynapses { get; set; }
public List<Synapse> OutputSynapses { get; set; }
public double Bias { get; set; }
public double BiasDelta { get; set; }
public double Gradient { get; set; }
public double Value { get; set; }
public Neuron()
{
Id = Guid.NewGuid();
InputSynapses = new List<Synapse>();
OutputSynapses = new List<Synapse>();
Bias = Network.GetRandom();
}
public Neuron(IEnumerable<Neuron> inputNeurons) : this()
{
foreach (var inputNeuron in inputNeurons)
{
var synapse = new Synapse(inputNeuron, this);
inputNeuron.OutputSynapses.Add(synapse);
InputSynapses.Add(synapse);
}
}
public virtual double CalculateValue()
{
return Value = Sigmoid.Output(InputSynapses.Sum(a => a.Weight * a.InputNeuron.Value) + Bias);
}
public double CalculateError(double target)
{
return target - Value;
}
public double CalculateGradient(double? target = null)
{
if (target == null)
return Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * Sigmoid.Derivative(Value);
return Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value);
}
public void UpdateWeights(double learnRate, double momentum)
{
var prevDelta = BiasDelta;
BiasDelta = learnRate * Gradient;
Bias += BiasDelta + momentum * prevDelta;
foreach (var synapse in InputSynapses)
{
prevDelta = synapse.WeightDelta;
synapse.WeightDelta = learnRate * Gradient * synapse.InputNeuron.Value;
synapse.Weight += synapse.WeightDelta + momentum * prevDelta;
}
}
}
public class Synapse
{
public Guid Id { get; set; }
public Neuron InputNeuron { get; set; }
public Neuron OutputNeuron { get; set; }
public double Weight { get; set; }
public double WeightDelta { get; set; }
public Synapse() { }
public Synapse(Neuron inputNeuron, Neuron outputNeuron)
{
Id = Guid.NewGuid();
InputNeuron = inputNeuron;
OutputNeuron = outputNeuron;
Weight = Network.GetRandom();
}
}
public class DataSet
{
public double[] Values { get; set; }
public double[] Targets { get; set; }
public DataSet(double[] values, double[] targets)
{
Values = values;
Targets = targets;
}
}
public class DataSetList : List<DataSet> {
public void Add(double[] values, double[] targets)
{
this.Add(new DataSet(values, targets));
}
}
public static class Sigmoid
{
public static double Output(double x)
{
return x < -45.0 ? 0.0 : x > 45.0 ? 1.0 : 1.0 / (1.0 + Math.Exp(-x));
}
public static double Derivative(double x)
{
return x * (1 - x);
}
}
public enum TrainingType
{
Epoch,
MinimumError
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment