Skip to content

Instantly share code, notes, and snippets.

@josharian

josharian/ve.py Secret

Created October 18, 2024 16:37
Show Gist options
  • Save josharian/c239f74181be3b580e4c5911068d2446 to your computer and use it in GitHub Desktop.
Save josharian/c239f74181be3b580e4c5911068d2446 to your computer and use it in GitHub Desktop.
varentropy vs entropy, two outcomes
import numpy as np
import matplotlib.pyplot as plt
# Define a range of p0 values from 0 to 1
p0 = np.linspace(0, 1, 500)
p1 = 1 - p0
# To avoid log(0), replace zeros with a very small number
epsilon = 1e-12
p0_safe = np.clip(p0, epsilon, 1 - epsilon)
p1_safe = np.clip(p1, epsilon, 1 - epsilon)
# Calculate the entropy H(p) = -Σ p_i log2(p_i)
entropy = - (p0_safe * np.log2(p0_safe) + p1_safe * np.log2(p1_safe))
# Calculate varentropy V(p) = Σ p_i [ -log2(p_i) - H(p) ]^2
log_p0 = -np.log2(p0_safe)
log_p1 = -np.log2(p1_safe)
varentropy = p0_safe * (log_p0 - entropy)**2 + p1_safe * (log_p1 - entropy)**2
# Plotting the entropy and varentropy
plt.figure(figsize=(10, 6))
plt.plot(p0, entropy, label='Entropy', color='blue')
plt.plot(p0, varentropy, label='Varentropy', color='red')
plt.xlabel('Probability $p_0$')
plt.ylabel('Value')
plt.title('Entropy and Varentropy vs. Probability $p_0$ for a Bernoulli Distribution')
plt.legend()
plt.grid(True)
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment