Skip to content

Instantly share code, notes, and snippets.

@madvn
Last active July 1, 2019 21:21
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save madvn/ee5f892490de2f50b5bf716e7d9bab94 to your computer and use it in GitHub Desktop.
Save madvn/ee5f892490de2f50b5bf716e7d9bab94 to your computer and use it in GitHub Desktop.
Infotheory benchmarks - entropy
###############################################################################
# Entropy baseline
#
# Madhavun Candadai
# Dec, 2018
#
# Entropy of a coin flip for different probabilities of HEADS ranging from 0 to
# 1 should give an inverted-U shaped curve
###############################################################################
import numpy as np
import infotheory
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
# range of probabilties for HEADS in coin flip
p = np.arange(0,1.01,0.01)
entropies = []
for pi in p:
it = infotheory.InfoTools(1, 0)
it.set_equal_interval_binning([2], [0], [1])
# flipping coin 10000 times
for _ in range(10000):
if np.random.rand()<pi: it.add_data_point([0])
else: it.add_data_point([1])
# estimating entropy
entropies.append(it.entropy([0]))
plt.figure(figsize=[3,2])
plt.plot(p, entropies)
plt.xlabel('Probability of HEADS')
plt.ylabel('Entropy')
plt.tight_layout()
#plt.savefig('./entropy_baseline.png')
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment