Skip to content

Instantly share code, notes, and snippets.

@niki9796dk
Last active May 20, 2018 10:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save niki9796dk/5d00b3d115f876d69f61d4964757ebb6 to your computer and use it in GitHub Desktop.
Save niki9796dk/5d00b3d115f876d69f61d4964757ebb6 to your computer and use it in GitHub Desktop.
import java.util.Random;
public class SimpleANN {
double[] inputLayer; // The values of the input layer.
double[] hiddenLayer; // The values of the hidden layer.
double[] outputLayer; // The values of the output layer.
double[][] hiddenWeights; // The weights connecting into the hidden layer.
double[][] outputWeights; // The weights connecting into the output layer.
public SimpleANN(int inputSize, int hiddenSize, int outputSize) {
// Initialize the layers as array of the correct size
this.inputLayer = new double[inputSize];
this.hiddenLayer = new double[hiddenSize];
this.outputLayer = new double[outputSize];
// Initialize the weights
this.hiddenWeights = this.initializeRandomWeights(inputSize, hiddenSize);
this.outputWeights = this.initializeRandomWeights(hiddenSize, outputSize);
}
public double[] evaluateInputs(double[] inputs) {
// Copy the inputs into the input layer
System.arraycopy(inputs, 0, this.inputLayer, 0, this.inputLayer.length);
// FeedForward from input layer to hidden layer and then to the output layer.
this.feedForward(inputLayer, hiddenLayer, hiddenWeights);
this.feedForward(hiddenLayer, outputLayer, outputWeights);
// Return the output layer
return this.outputLayer;
}
private void feedForward(double[] prevLayer, double[] nextLayer, double[][] weights){
// For every node in the next layer
for (int nextNode = 0; nextNode < nextLayer.length; nextNode++) {
nextLayer[nextNode] = 0; // Reset the value of the node, before usage.
// For every node in the previous layer
for (int prevNode = 0; prevNode < prevLayer.length; prevNode++) {
// Multiply the value of the previous node with it's weight
// and accumulate the result into the next node.
double weightedValue = prevLayer[prevNode] * weights[nextNode][prevNode];
nextLayer[nextNode] += weightedValue;
}
}
// Once the values have been forwarded into the next layer,
// we then apply the activation function.
for (int nextNode = 0; nextNode < nextLayer.length; nextNode++) {
nextLayer[nextNode] = this.sigmoid(nextLayer[nextNode]);
}
}
// The sigmoid activation function
private double sigmoid(double x){
return 1 / (1 + Math.exp(-x));
}
// Initializes the random weights for a layer - Between -1 and 1.
private double[][] initializeRandomWeights(int prevLayerSize, int nextLayerSize){
double[][] weights = new double[nextLayerSize][prevLayerSize];
// A random element, to create the random weights.
Random r = new Random();
// For every node in the layer, we need a new list of weights
// associated with that node.
for (int thisNode = 0; thisNode < nextLayerSize; thisNode++) {
// For every node in the previous layer, we need a weight.
// To connect the nodes.
for (int prevNode = 0; prevNode < prevLayerSize; prevNode++) {
// Creates a random weight between -1 and 1.
weights[thisNode][prevNode] = (r.nextDouble() * 2) - 1;
}
}
return weights;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment