Skip to content

Instantly share code, notes, and snippets.

@anthonydouc
Created October 2, 2019 10:46
Show Gist options
  • Save anthonydouc/97a0295a244a49b26f10d12193bd8cde to your computer and use it in GitHub Desktop.
Save anthonydouc/97a0295a244a49b26f10d12193bd8cde to your computer and use it in GitHub Desktop.
Simple script using pymc3 and stats models libraries
from pymc3 import *
import numpy as np
import matplotlib.pyplot as plt
import statsmodels.api as sm
size = 200
true_intercept = 1
true_slope = 2
x = np.linspace(0, 1, size)
# y = a + b*x
true_regression_line = true_intercept + true_slope * x
# add noise
y = true_regression_line + np.random.normal(scale=.5, size=size)
data = dict(x=x, y=y)
# ols
model = sm.OLS(y, sm.add_constant(x))
params = model.fit().params
fig = plt.figure(figsize=(7, 7))
ax = fig.add_subplot(111, xlabel='x', ylabel='y', title='Generated data and underlying model')
ax.plot(x, y, 'x', label='sampled data')
ax.plot(x, true_regression_line, label='true regression line', lw=2.)
plt.legend(loc=0);
with Model() as model: # model specifications in PyMC3 are wrapped in a with-statement
# Define priors
sigma = HalfCauchy('sigma', beta=10, testval=1.)
intercept = Normal('Intercept', 0, sigma=20)
x_coeff = Normal('x', 0, sigma=20)
# Define likelihood
likelihood = Normal('y', mu=intercept + x_coeff * x,
sigma=sigma, observed=y)
# Inference!
trace = sample(3000, cores=1) # draw 3000 posterior samples using NUTS sampling
#plot_posterior_predictive_glm(trace, samples=100,
# label='posterior predictive regression lines')
ax.plot(x, params[0]+params[1]*x, label='OLS', lw=2.)
ax.plot(x, trace.get_values('Intercept').mean()+trace.get_values('x').mean()*x, label='Bayesian mean', lw=2.)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment