Skip to content

Instantly share code, notes, and snippets.

@MilesCranmer
Last active October 2, 2019 02:37
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save MilesCranmer/9245b5616bb7e5477d18fee2062474b7 to your computer and use it in GitHub Desktop.
Save MilesCranmer/9245b5616bb7e5477d18fee2062474b7 to your computer and use it in GitHub Desktop.
Use PyMC3 like emcee
##########################################
#emcee example:
##########################################
import numpy as np
import emcee
# Number of samples for each chain.
N_samp = 1000
def log_prob(x):
if x < 0.1 or x > 100:
return -np.inf
return -2.35 * np.log(x)
ndim, nwalkers = 1, 4
ivar = 1. / np.random.rand(ndim)
p0 = np.random.rand(nwalkers, ndim) + 0.2
sampler = emcee.EnsembleSampler(nwalkers, ndim, log_prob)
state = sampler.run_mcmc(p0, 1000)
sampler.reset()
sampler.run_mcmc(state, N_samp)
mc_output = sampler.flatchain[:, 0]
##########################################
#PyMC3 example:
##########################################
import numpy as np
import pymc3 as pm
# Writing our likelihood using tt.log(), etc., allows theano to compute derivatives for HMC
import theano.tensor as tt
# Number of samples for each chain. #chains defaults to #cores.
N_samp = 1000
with pm.Model() as model:
#Bound our sampled parameter between 0.1 and 100
baseM = pm.Uniform('baseM', lower=0.1, upper=100)
#This is p(M):
L = pm.DensityDist(
'like', ############################################
lambda _M: -2.35*tt.log(_M), # Declare your log-likelihood here! #
observed={'_M': baseM} ############################################
)
start = model.test_point
h = pm.find_hessian(start)
step = pm.HamiltonianMC(model.vars, h)
trace = pm.sample(N_samp, tune=1000, init=None, step=step)
mc_output = trace.get_values(baseM)
#mc_output will have your posterior samples just like sampler.flatchain in emcee
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment