Skip to content

Instantly share code, notes, and snippets.

@strolling-coder
Created March 31, 2020 17:33
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save strolling-coder/ae0469b67164627f79d22a5fdc04a0da to your computer and use it in GitHub Desktop.
Save strolling-coder/ae0469b67164627f79d22a5fdc04a0da to your computer and use it in GitHub Desktop.
Shows the data of an INDArray which will give a negative entropy value upon calculation of the shannon entropy. DeepLearning 4J Version 1.0.0-beta5
package wld.nd4j.tests;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
public class TestINDArraryEntropy {
public static void main(String[] args) {
TestINDArraryEntropy test = new TestINDArraryEntropy();
test.execute();
}
public void execute() {
System.out.println("Test INDArray entropy function");
INDArray matrix = getMatrix();
//The value of the Shannon Entropy given by Octave is 5.11436944588675
System.out.println("Shannon Entropy " + matrix.shannonEntropyNumber());
System.out.println("Log Entropy " + matrix.logEntropyNumber());
}
private INDArray getMatrix() {
float[][] mat = {{ 1.0811f, 1.1187f, 0.9877f, 0.9078f, 0.8864f, 0.8063f },
{ 1.1656f, 1.2147f, 1.1130f, 1.0366f, 1.0178f, 0.9228f },
{ 1.0634f, 1.1340f, 1.0338f, 0.9852f, 0.9977f, 0.9189f },
{ 0.9837f, 1.0720f, 1.0057f, 0.9877f, 1.0316f, 0.9632f },
{ 0.9441f, 1.0532f, 1.0149f, 1.0310f, 1.0857f, 1.0469f },
{ 0.8091f, 0.9085f, 0.9013f, 0.9428f, 1.0292f, 1.0120f }};
return Nd4j.create(mat);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment