Skip to content

Instantly share code, notes, and snippets.

@adrianseeley
Created May 4, 2014 16:36
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save adrianseeley/42d552925c80c82534c8 to your computer and use it in GitHub Desktop.
Save adrianseeley/42d552925c80c82534c8 to your computer and use it in GitHub Desktop.
No really, theres only transfer, no activator? Must be drunk...
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace Sinus
{
public partial class Sinus : Form
{
public static Random R = new Random();
public const int WINDOW_SIZE = 2;
public const int HIDDEN_LAYERS = 1;
public const int HIDDEN_NEURONS = 10;
public List<float> data = new List<float>();
public List<float> esti = new List<float>();
public List<float> best_esti = new List<float>();
public float best_fitness = float.MaxValue;
public List<List<List<float>>> gnn = new List<List<List<float>>>();
public Sinus()
{
InitializeComponent();
// create 1000 sinus points
for (int d = 0; d < 10000; d++)
data.Add((float)(Math.Tan((float)d * (1 / (1 + ((float)d / 300)))) + 1) / 2);
//data.Add((float)Math.Tan((float)d / 20));
//data.Add(d);
// normalize sinus points
Normalize(ref data);
// create input layer
List<List<float>> input_layer = new List<List<float>>();
// create an input neuron for each window slot
for (int input = 0; input < WINDOW_SIZE; input++)
{
// create an input neuron for this input
List<float> input_neuron = new List<float>();
// add a float for output
input_neuron.Add(0);
// add input neuron to input layer
input_layer.Add(input_neuron);
}
// add input layer to gnn
gnn.Add(input_layer);
// create hidden layers
for (int layer = 0; layer < HIDDEN_LAYERS; layer++)
{
// create hidden layer
List<List<float>> hidden_layer = new List<List<float>>();
// create hidden neurons
for (int neuron = 0; neuron < HIDDEN_NEURONS; neuron++)
{
// create hidden neuron
List<float> hidden_neuron = new List<float>();
// add floats for all weights plus 2 for bias and output
for (int f = 0; f < gnn[gnn.Count - 1].Count + 2; f++)
hidden_neuron.Add(1);
// add hidden neuron to hidden layer
hidden_layer.Add(hidden_neuron);
}
// add hidden layer to gnn
gnn.Add(hidden_layer);
}
// create output layer
List<List<float>> output_layer = new List<List<float>>();
// create the single output neuron
List<float> output_neuron = new List<float>();
// add floats for all weights plus 2 for bias and output
for (int f = 0; f < gnn[gnn.Count - 1].Count + 2; f++)
output_neuron.Add(1);
// add output neuron to output layer
output_layer.Add(output_neuron);
// add output layer to gnn
gnn.Add(output_layer);
// GO GO GO
Invalidate();
}
private void Normalize(ref List<float> Floats)
{
float max = Floats[0];
float min = Floats[0];
for (int f = 1; f < Floats.Count; f++)
{
if (Floats[f] > max) max = Floats[f];
if (Floats[f] < min) min = Floats[f];
}
float rng = max - min;
for (int f = 0; f < Floats.Count; f++)
Floats[f] = (Floats[f] - min) / rng;
}
private float Run(List<float> Inputs)
{
// provide inputs
for (int i = 0; i < Inputs.Count; i++)
gnn[0][i][0] = Inputs[i];
// iterate layers after input layer
for (int layer = 1; layer < gnn.Count; layer++)
{
// iterate neurons in layer
for (int neuron = 0; neuron < gnn[layer].Count; neuron++)
{
// reset neuron output to bias
gnn[layer][neuron][gnn[layer][neuron].Count - 1] = gnn[layer][neuron][gnn[layer][neuron].Count - 2];
// transfer inputs by weights
for (int transfer = 0; transfer < gnn[layer - 1].Count; transfer++)
{
// add weighted output of previous neuron to output of neuron
gnn[layer][neuron][gnn[layer][neuron].Count - 1] += gnn[layer - 1][transfer][gnn[layer - 1][transfer].Count - 1] * gnn[layer][neuron][transfer];
}
}
}
// return the single output float
return gnn[gnn.Count - 1][0][gnn[gnn.Count - 1][0].Count - 1];
}
private void Fitness()
{
// choose a mutation layer
int mutation_layer = R.Next(1, gnn.Count);
// choose a mutation neuron
int mutation_neuron = R.Next(gnn[mutation_layer].Count);
// choose a weight or bias
int mutation_float = R.Next(gnn[mutation_layer][mutation_neuron].Count - 1);
// store old value
float old_value = gnn[mutation_layer][mutation_neuron][mutation_float];
// mutate value
gnn[mutation_layer][mutation_neuron][mutation_float] = (float)R.NextDouble();
// make new esti list
esti = new List<float>();
// fill the estimate layer with mid for unestimateable
for (int i = 0; i < WINDOW_SIZE; i++)
esti.Add(0.5f);
// create a register for the total error
float current_fitness = 0;
// iterate through all runnable points
for (int i = WINDOW_SIZE; i < data.Count; i++)
{
// run the current window through the NN
float output = Run(data.GetRange(i - WINDOW_SIZE, WINDOW_SIZE));
// add the output to estimated points
esti.Add(output);
// calculate the error and add to total error
current_fitness += Math.Abs(output - data[i]);
}
// if we have a better fitness
if (current_fitness < best_fitness)
{
// store the new fitness value
best_fitness = current_fitness;
// store the best estimate
best_esti = esti;
}
// otherwise we have a worse or same fitness
else
{
// revert mutation to old value
gnn[mutation_layer][mutation_neuron][mutation_float] = old_value;
}
}
private void Line(Rectangle Clip, Graphics G, List<float> NormalizedData, Pen Pen)
{
for (int d = 1; d < NormalizedData.Count; d++)
G.DrawLine(Pen,
((float)(d - 1) / (float)NormalizedData.Count) * (float)Clip.Width,
NormalizedData[d - 1] * (float)Clip.Height,
((float)d / (float)NormalizedData.Count) * (float)Clip.Width,
NormalizedData[d] * (float)Clip.Height);
}
private void Sinus_Paint(object sender, PaintEventArgs e)
{
Fitness();
e.Graphics.Clear(Color.Black);
Line(e.ClipRectangle, e.Graphics, data, Pens.Red);
Line(e.ClipRectangle, e.Graphics, esti, Pens.Blue);
Line(e.ClipRectangle, e.Graphics, best_esti, Pens.Green);
e.Graphics.DrawString("fit: " + best_fitness, SystemFonts.DefaultFont, Brushes.White, 20, 20);
Invalidate();
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment