Skip to content

Instantly share code, notes, and snippets.

@edtechd
Created Mar 23, 2016
Embed
What would you like to do?
Neural Network
static void Main()
{
double[] data = { 0.5, 0.5, 0.5, 0.5 };
double[] data2 = { 0.3, 0.3, 0.5, 0.5 };
double[] weights1 = { 0.1, 0.2, 0.3, 0.15,
0.05, 0.21, 0.4, 0.1,
0.43, 0.22, 0.20, 0.3 };
double[] weights2 = { 0.1, 0.2, 0.3,
0.08, 0.09, 0.3,
0.04, 0.18, 0.1
};
double[] weights3 = { 0.1, 0.2, 0.3 };
Perceptron p = new Perceptron(3, 3);
p.Initialize(weights1, weights2, weights3);
double r = 0;
p.TrainSeries(data, 0);
p.TrainSeries(data2, 1);
r = p.Predict(data);
// I get r = 1.00267 but it should be 0
}
public class Perceptron
{
public int size1 = 0;
public int size2 = 0;
public double[] weights1;
public double[] weights2;
public double[] weights3;
public double[] values1;
public double[] values2;
public double[] deltas1;
public double[] deltas2;
public Perceptron(int size_layer1, int size_layer2)
{
size1 = size_layer1;
size2 = size_layer2;
values1 = new double[size1];
values2 = new double[size2];
deltas1 = new double[size1];
deltas2 = new double[size2];
}
public void Initialize(double[] weights_layer1, double[] weights_layer2, double[] weights_layer3)
{
weights1 = weights_layer1;
weights2 = weights_layer2;
weights3 = weights_layer3;
}
public void Train(double[] data, double expectedResult)
{
double result = Predict(data);
double delta = Perceptron.ActivationFunction(expectedResult) - Perceptron.ActivationFunction(result);
double eta = 20;
// Calculate layer 2 deltas
for (int i = 0; i < size2; i++)
{
deltas2[i] = delta * weights3[i];
}
// Calculate layer 1 deltas
for (int i = 0; i < size1; i++)
{
deltas1[i] = 0;
for(int j=0; j < size2; j++) {
deltas1[i] += deltas2[j] * weights2[j * size1 + i];
}
}
// Correct layer 1 weights
for (int i = 0; i < data.Length; i++)
{
for (int j = 0; j < size1; j++)
{
weights1[j * data.Length + i] += eta * deltas1[j] * values1[j] * (1 - values1[j]) * data[i];
}
}
// Correct layer 2 weights
for (int i = 0; i < size1; i++)
{
for (int j = 0; j < size2; j++)
{
weights2[j * size1 + i] += eta * deltas2[j] * values2[j] * (1 - values2[j]) * values1[i];
}
}
double resultA = Perceptron.ActivationFunction(result);
for (int i = 0; i < size2; i++)
{
weights3[i] += eta * delta * resultA * (1 - resultA) * values2[i];
}
}
public double TrainSeries(double[] data, double expectedResult, int iterations = 100000, double maxDelta = 0.0001)
{
double r = 0;
double prevDelta = 100, newDelta = 100;
for (int i = 0; i < iterations; i++)
{
this.Train(data, expectedResult);
r = this.Predict(data);
newDelta = Math.Abs(r-expectedResult);
if (newDelta > prevDelta && i > 1)
{
Console.WriteLine("NN doesn't converge");
break;
}
if (newDelta < maxDelta)
{
Console.WriteLine("Success");
break;
}
prevDelta = newDelta;
Console.WriteLine("Int. result " + r.ToString());
}
return r;
}
public double Predict(double[] data)
{
// Calculate layer 1 values;
for (int i = 0; i < size1; i++)
{
values1[i] = 0;
for (int j = 0; j < data.Length; j++)
{
values1[i] += data[j] * weights1[i * data.Length + j];
}
values1[i] = Perceptron.ActivationFunction(values1[i]);
}
// Calculate layer 2 values
for (int i = 0; i < size2; i++)
{
values2[i] = 0;
for (int j = 0; j < size1; j++)
{
values2[i] += values1[j] * weights2[i * size1 + j];
}
values2[i] = Perceptron.ActivationFunction(values2[i]);
}
// Calculate result
double result = 0;
for (int i = 0; i < size2; i++)
{
result += values2[i] * weights3[i];
}
return result;
}
public static double ActivationFunction(double value)
{
return 1 / (1 + Math.Exp(-value));
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment