Last active
June 30, 2023 14:43
-
-
Save n2p5/4eda328b080c9f09eff928ad47228ab1 to your computer and use it in GitHub Desktop.
Shannon Entropy example from a post on Functionally Imperative found here: https://functionallyimperative.com/t/entropy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Shannon Entropy example from a post on Functionally Imperative | |
// https://functionallyimperative.com/t/entropy | |
package main | |
import ( | |
"crypto/rand" | |
"fmt" | |
"math" | |
) | |
func main() { | |
cases := []struct { | |
input []byte | |
description string | |
}{ | |
{[]byte(" "), "five spaces ' '"}, | |
{[]byte{0, 0, 0, 0, 0}, "five zeros [ 0, 0, 0, 0, 0 ]"}, | |
{[]byte("hello, world"), "the string 'hello, world'"}, | |
{randBytes(16), " 16 random bytes"}, | |
{randBytes(32), " 32 random bytes"}, | |
{randBytes(64), " 64 random bytes"}, | |
{randBytes(128), " 128 random bytes"}, | |
{randBytes(256), " 256 random bytes"}, | |
{randBytes(512), " 512 random bytes"}, | |
{randBytes(1024), "1024 random bytes"}, | |
{randBytes(1_000_000), "One million random bytes"}, | |
} | |
for i := range cases { | |
c := cases[i] | |
fmt.Printf("entropy: %.10f for: %v\n", entropy(c.input), c.description) | |
} | |
} | |
func randBytes(len int) []byte { | |
r := make([]byte, len) | |
rand.Read(r) | |
return r | |
} | |
func entropy(b []byte) float64 { | |
var score float64 | |
var freqArray [256]float64 | |
for i := 0; i < len(b); i++ { | |
freqArray[b[i]]++ | |
} | |
len := float64(len(b)) | |
for i := 0; i < 256; i++ { | |
if freqArray[i] != 0 { | |
freq := freqArray[i] / len | |
score -= freq * math.Log2(freq) | |
} | |
} | |
return score / 8 | |
} | |
// entropy: 0.0000000000 for: five spaces ' ' | |
// entropy: 0.0000000000 for: five zeros [ 0, 0, 0, 0, 0 ] | |
// entropy: 0.3777569011 for: the string 'hello, world' | |
// entropy: 0.5000000000 for: 16 random bytes | |
// entropy: 0.6171875000 for: 32 random bytes | |
// entropy: 0.7211818603 for: 64 random bytes | |
// entropy: 0.8232230957 for: 128 random bytes | |
// entropy: 0.9021275742 for: 256 random bytes | |
// entropy: 0.9485006656 for: 512 random bytes | |
// entropy: 0.9782479345 for: 1024 random bytes | |
// entropy: 0.9999804355 for: One million random bytes |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment