Created
April 10, 2020 03:54
-
-
Save JT5D/fae2c8262980d0ae959a1b826fba5909 to your computer and use it in GitHub Desktop.
Calculate the entropy of a string by determining the frequency of each character, then summing each character's probability multiplied by the log base 2 of that same probability, taking the negative of the sum.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Shannon entropy in bits per symbol. | |
function entropy(str) { | |
const len = str.length | |
// Build a frequency map from the string. | |
const frequencies = Array.from(str) | |
.reduce((freq, c) => (freq[c] = (freq[c] || 0) + 1) && freq, {}) | |
// Sum the frequency of each character. | |
return Object.values(frequencies) | |
.reduce((sum, f) => sum - f/len * Math.log2(f/len), 0) | |
} | |
console.log(entropy('1223334444')) // 1.8464393446710154 | |
console.log(entropy('0')) // 0 | |
console.log(entropy('01')) // 1 | |
console.log(entropy('0123')) // 2 | |
console.log(entropy('01234567')) // 3 | |
console.log(entropy('0123456789abcdef')) // 4 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment