Skip to content

Instantly share code, notes, and snippets.

@deeso
Created August 25, 2022 04:40
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save deeso/c379b229b41248dcda5b4ed5f7910d80 to your computer and use it in GitHub Desktop.
Save deeso/c379b229b41248dcda5b4ed5f7910d80 to your computer and use it in GitHub Desktop.
metrics manifesto support code for bayesian analysis
import pandas as pd
import numpy as np
from scipy.stats import beta, binom
def binomial_beta_mixture(probs: np.array, beta_0: np.array, beta_1: np.array, data: np.array):
'''
probs:
beta_0: shape of the first beta binomian distribution
beta_1: shape of the second beta binomial distribution
data: np.array([success, failure])
'''
# implementation of binomial.beta.mix from LearnBayes
N = len(probs)
# post.betapar=betapar+outer(rep(1,N),data)
s = data[0] # success
f = data[1] # failures
betapar = np.array([beta_0, beta_1])
post_betapar = betapar + np.array([data for i in range(0, N)])
# p=post.betapar[,1]/(post.betapar[,1]+post.betapar[,2])
p = post_betapar[:, 0] / (post_betapar[:,0]+post_betapar[:,1])
# dbinom(s,size=s+f,prob=p,log=TRUE)
component_0 = np.log(binom(s+f, p).pmf(s))
# dbeta(p,betapar[,1],betapar[,2],log=TRUE)
component_1 = np.log(beta(betapar[:,0], betapar[:,1]).pdf(p))
# dbeta(p,post.betapar[,1],post.betapar[,2],log=TRUE)
component_2 = np.log(beta(post_betapar[:,0], post_betapar[:,1]).pdf(p))
mixture_prob = np.exp(component_0 + component_1 - component_2)
# post.probs=probs*m.prob/sum(probs*m.prob)
return probs*mixture_prob/np.sum(probs*mixture_prob)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment