Skip to content

Instantly share code, notes, and snippets.

@RealTrisT
Last active May 22, 2018 22:58
Show Gist options
  • Save RealTrisT/b4fa5a860ae7b19e6849fdb6d46c529a to your computer and use it in GitHub Desktop.
Save RealTrisT/b4fa5a860ae7b19e6849fdb6d46c529a to your computer and use it in GitHub Desktop.
#include <stdio.h>
#include <stdlib.h>
/* Precision: leave uncommented which precision to use */
/*#define PRECISION_SINGLE*/
#define PRECISION_DOUBLE
#if defined PRECISION_SINGLE
#define PRECISION_S true
#define PRECISION_D false
typedef float fp;
#elif defined PRECISION_DOUBLE
#define PRECISION_S false
#define PRECISION_D true
typedef double fp;
#endif
enum NeuronType{
NType_Linear,
NType_BinaryThreshold,
NType_Rectifier,
NType_Sigmoid,
NType_StochasticBinary
};
class Neuron{
public:
Neuron(){}
~Neuron(){}
virtual fp CalculateOutput(){return outputValue = inputValue;}
fp inputValue;
fp outputValue;
};
class NeuronHiddenLayer : public Neuron{
public:
struct Connection{
Neuron* PrevLayerNeuronIndex;
fp Weight;
};
const unsigned int connectionAmount;
Connection* connections;
private:
bool isConnectionListAllocated;
public:
NeuronHiddenLayer(unsigned int ConAmount, Connection* Cons = 0) : connectionAmount(ConAmount), connections(Cons){
if(isConnectionListAllocated = (!connections)){
connections = (Connection*)malloc(sizeof(Connection) * connectionAmount);
}
}
~NeuronHiddenLayer(){
if(isConnectionListAllocated){
free((void*)connections);
}
}
fp CalculateInput(){
inputValue = 0;
for (int i = 0; i < connectionAmount; ++i){
inputValue += connections[i].PrevLayerNeuronIndex->outputValue * connections[i].Weight;
}
return inputValue;
}
};
class Neuron_Linear : public NeuronHiddenLayer{
public:
Neuron_Linear(unsigned int ConAmount, Connection* Cons, fp M = 1, fp B = 1) : NeuronHiddenLayer(ConAmount, Cons), m(M), b(B){}
fp m; //slope
fp b; //bias
fp CalculateOutput(){return outputValue = inputValue*m + b;}
};
class Neuron_BinaryThreshold : public NeuronHiddenLayer{
public:
Neuron_BinaryThreshold(unsigned int ConAmount, Connection* Cons, fp threshold = 1) : NeuronHiddenLayer(ConAmount, Cons), Threshold(threshold){}
fp Threshold;
fp CalculateOutput(){return outputValue = (fp)(inputValue >= Threshold);}
};
Neuron input[2] = { //2 input neurons
Neuron(), Neuron()
};
NeuronHiddenLayer::Connection inputToLayer1Neuron0[2] = { //2 connections between input layers and first neuron of first hidden layer
{&input[0], 1}, {&input[1], 1}
};
NeuronHiddenLayer::Connection inputToLayer1Neuron1[2] = { //2 connections between input layers and second neuron of first hidden layer
{&input[0], -1}, {&input[1], -1}
};
Neuron_BinaryThreshold layer1[2] = { //first hidden layer
Neuron_BinaryThreshold(2, inputToLayer1Neuron0, 0.5),
Neuron_BinaryThreshold(2, inputToLayer1Neuron1, -1.5)
};
NeuronHiddenLayer::Connection inputToLayer2[2] = { //connections between first and second hidden layers
{&layer1[0], 1},
{&layer1[1], 1}
};
Neuron_BinaryThreshold layer2[1] = { //second hidden layer
Neuron_BinaryThreshold(2, inputToLayer2, 1.5)
};
int main(int argc, char const *argv[]){
printf("xor (input A and B, will output A^B):\n");
while(1 != 2){
scanf((PRECISION_D)?"%lf %lf":"%f %f", &input[0].outputValue, &input[1].outputValue);
layer1[0].CalculateInput();
layer1[0].CalculateOutput();
layer1[1].CalculateInput();
layer1[1].CalculateOutput();
layer2[0].CalculateInput();
layer2[0].CalculateOutput();
printf("%d\n", (char)layer2[0].outputValue);
}
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment