Skip to content

Instantly share code, notes, and snippets.

@Raja0sama
Created March 15, 2022 12:22
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Raja0sama/c3fef9938f43ca33281db2a927c57df6 to your computer and use it in GitHub Desktop.
Save Raja0sama/c3fef9938f43ca33281db2a927c57df6 to your computer and use it in GitHub Desktop.
I am stuck
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
public class App {
// Number of neurons in the input layer
private static final int INPUT_NEURONS = 4;
private static final int INPUTS_LENGTH = 64;
private static final int OUTPUT_LENGTH = 10;
// Global Accuracy
private static double accuracy = 0.0;
private static int success = 0;
/**
* Neurons in the hidden layer.
*/
private static double[] hiddenNeuron = new double[INPUT_NEURONS];
/**
* Neurons in the output layer.
*/
private static double[] outputNeuron = new double[OUTPUT_LENGTH];
/**
* Weights between input and hidden neurons in a 2d array
*/
private static double[][] w_b_inputAndHidden_N = new double[INPUT_NEURONS][INPUTS_LENGTH];
/**
* Weights between hidden and output neurons
*/
private static double[][] w_b_hiddenAndOutput_N = new double[OUTPUT_LENGTH][INPUT_NEURONS];
/**
* Bias of the hidden layer
*/
private static double[] input = new double[INPUTS_LENGTH];
public static void main(String[] args) throws Exception {
System.out.println("Hello, World!");
// Initialize the weights
initializeWeights();
// Reading The Data Set from file.csv
String fileName = "file.csv";
String line = "";
String cvsSplitBy = ",";
int i = 0;
int count = 0;
// Start a while loop for the number of iterations (cycles of the learning
// algorithm)
while (i < 50) {
// Open file reader
BufferedReader br = new BufferedReader(new FileReader(fileName));
// While loop each line in the file
while ((line = br.readLine()) != null) {
// Split the line by comma
String[] data = line.split(cvsSplitBy);
// Read the first INPUTS_LENGTH values in 1D array
for (int j = 0; j < INPUTS_LENGTH; j++) {
input[j] = Double.parseDouble(data[j]);
}
// Target Output
double target = Double.parseDouble(data[INPUTS_LENGTH]);
// Declare a map of length OUTPUT_LENGTH for the output of the hidden layer
double[] hiddenOutput = new double[OUTPUT_LENGTH];
// Initialize hiddenOutput with 0
for (int j = 0; j < OUTPUT_LENGTH; j++) {
hiddenOutput[j] = 0;
}
// Now set the index corosponding to the target value to 1
hiddenOutput[(int) target] = 1;
// Call the feedforward function
feedForward(input, hiddenOutput);
// Call the test error method, if test return true, then call training method
// else success ++
if (testError(hiddenOutput)) {
// System.out.println("Success");
train(input, hiddenOutput, target);
} else {
success++;
}
count++;
}
i++;
// Accuracy = success / total number of data
// Get the Accuracy in percentage
accuracy = (success * 100) / count;
System.out.println("acc : " + accuracy);
// accuracy = 0;
success = 0;
count = 0;
// Call the testing method
}
Testing(count);
}
/**
* Train Method
*/
public static void train(double[] input, double[] hiddenOutput, double target) {
// Calculate the error for the output layer
double[] outputError = new double[OUTPUT_LENGTH];
for (int i = 0; i < OUTPUT_LENGTH; i++) {
outputError[i] = hiddenOutput[i] - outputNeuron[i];
}
double[] errorHidden = new double[INPUT_NEURONS];
// For Each neuron in the hidden layer (j)
for (int j = 0; j < INPUT_NEURONS; j++) {
// Calculate the error for the hidden layer
double hiddenError = 0;
for (int i = 0; i < OUTPUT_LENGTH; i++) {
hiddenError += w_b_hiddenAndOutput_N[i][j] * outputError[i];
}
// Calculate the gradient of the hidden layer
errorHidden[j] = hiddenError * hiddenNeuron[j] * (1 - hiddenNeuron[j]);
// // Update the weights and bias
for (int i = 0; i < INPUTS_LENGTH; i++) {
w_b_inputAndHidden_N[j][i] = w_b_inputAndHidden_N[j][i] - 0.001 *
errorHidden[j] * input[i];
}
// Update the bias
w_b_hiddenAndOutput_N[j][j] = w_b_hiddenAndOutput_N[j][j] - 0.001 *
errorHidden[j];
}
// // Two nested loops to adjust the weight and bias of the hidden layer
// for (int i = 0; i < INPUTS_LENGTH; i++) {
// for (int j = 0; j < INPUT_NEURONS; j++) {
// w_b_inputAndHidden_N[j][i] = w_b_inputAndHidden_N[j][i] - 0.1 *
// errorHidden[j] * input[i];
// }
// }
// // Two nested loops to adjust the weight and bias of the output layer
// for (int i = 0; i < OUTPUT_LENGTH; i++) {
// for (int j = 0; j < INPUT_NEURONS; j++) {
// w_b_hiddenAndOutput_N[i][j] = w_b_hiddenAndOutput_N[i][j] - 0.1 *
// outputError[i] * hiddenNeuron[j];
// }
// }
}
/**
* Testing Method
*
* @throws IOException
* @throws NumberFormatException
*/
public static void Testing(int count) throws NumberFormatException, IOException {
// Open the file reader and read each row for file2.csv
String fileName = "file2.csv";
String line = "";
String cvsSplitBy = ",";
// int i = 0;
// int success = 0;
BufferedReader br = new BufferedReader(new FileReader(fileName));
// While loop each line in the file
while ((line = br.readLine()) != null) {
// Split the line by comma
String[] data = line.split(cvsSplitBy);
// Read the first INPUTS_LENGTH values in 1D array
for (int j = 0; j < INPUTS_LENGTH; j++) {
input[j] = Double.parseDouble(data[j]);
}
// Target Output
double target = Double.parseDouble(data[INPUTS_LENGTH]);
// Declare a map of length OUTPUT_LENGTH for the output of the hidden layer
double[] hiddenOutput = new double[OUTPUT_LENGTH];
// Initialize hiddenOutput with 0
for (int j = 0; j < OUTPUT_LENGTH; j++) {
hiddenOutput[j] = 0;
}
// Now set the index corosponding to the target value to 1
hiddenOutput[(int) target] = 1;
// Call the feedforward function
feedForward(input, hiddenOutput);
// Call the test error method, if test return true, then call training method
// else success ++
if (testError(hiddenOutput)) {
// System.out.println("Ho");
train(input, hiddenOutput, target);
} else {
success++;
}
count++;
}
// accuracy in percentage
accuracy = (success * 100) / count;
System.out.println("Accuracy: " + accuracy);
success = 0;
}
/**
* Test Error Method
*
* @param target
* @return
*/
private static boolean testError(double[] target) {
for (int i = 0; i < OUTPUT_LENGTH; i++) {
if (target[0] != outputNeuron[i]) {
return false;
}
}
return true;
}
/**
* Feedforward function
*
* @param input
* @param hiddenOutput
*/
public static void feedForward(double[] input, double[] hiddenOutput) {
// Compute the output of the hidden Neurons. For each neuron in the hidden layer
// (i)
for (int i = 0; i < INPUT_NEURONS; i++) {
// Compute the output of the hidden layer
double hiddenOutputValue = 0;
for (int j = 0; j < INPUTS_LENGTH; j++) {
hiddenOutputValue += input[j] * w_b_inputAndHidden_N[i][j];
}
// Compute the sigmoid function
hiddenNeuron[i] = 1 / (1 + Math.exp(-hiddenOutputValue));
}
// Compute the output of the output neurons. For each neuron in the output layer
// (i)
for (int i = 0; i < OUTPUT_LENGTH; i++) {
// Compute the output of the output Neuron
double value = 0;
for (int j = 0; j < INPUT_NEURONS; j++) {
// system
double a = hiddenNeuron[j];
value += w_b_hiddenAndOutput_N[i][j] * hiddenNeuron[j];
}
// Find the threshold value for the output neuron if value > 0 then
// outputneuron[i] = 1 else outputneuron[i] = 0
if (value > 0) {
outputNeuron[i] = 1;
} else {
outputNeuron[i] = 0;
}
}
}
/**
* Sigmoid function
*
* @param x
* @return
*/
public static double sigmoid(double x) {
return 1 / (1 + Math.exp(-x));
}
/**
* Initialize the weights
*/
private static void initializeWeights() {
// Initialize the weights between input and hidden neurons
for (int i = 0; i < INPUT_NEURONS; i++) {
for (int j = 0; j < INPUTS_LENGTH; j++) {
w_b_inputAndHidden_N[i][j] = Random();
}
}
// Initialize the weights between hidden and output neurons
for (int i = 0; i < OUTPUT_LENGTH; i++) {
for (int j = 0; j < INPUT_NEURONS; j++) {
w_b_hiddenAndOutput_N[i][j] = Random();
}
}
}
/**
* Random Number Function
*
* @return
*/
public static double Random() {
double max = 10;
double min = -10;
double range = max - min + 1;
double rand = (double) (Math.random() * range) + 1;
return rand;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment