Skip to content

Instantly share code, notes, and snippets.

@jdmoore7
Last active June 12, 2022 18:50
Show Gist options
  • Save jdmoore7/d91f2a8c467075fd1c26ed7e61832f33 to your computer and use it in GitHub Desktop.
Save jdmoore7/d91f2a8c467075fd1c26ed7e61832f33 to your computer and use it in GitHub Desktop.
Bayesian 1PL implementation in PyMC3
import pymc3 as pm
from theano import tensor as tt
import arviz as az
import numpy as np
# Binary, correct answer array
scores = np.array([1,1,1,0,0,0
]).flatten()
# (student:question) tuples
# order corresponds to scores
student_question_map = [(0,1), (0,2), (0,3),
(1,2), (1,3),
(3,4)]
with pm.Model() as model:
# Priors
questions = pm.Normal("questions", mu=0,
sigma=1,
shape=(5,))
students = pm.Normal("students", mu=0,
sigma=1,
shape=(5,))
slope = pm.Normal("slope", mu=0,
sigma=1)
# Transformed parameter
deltas = []
for s_idx, q_idx in student_question_map:
s = students[s_idx,]
q = questions[q_idx,]
deltas.append( slope * (s - q) )
thetas = pm.Deterministic("theta", tt.nnet.sigmoid(deltas))
# Likelihood
kij = pm.Bernoulli("kij", p=thetas, observed=scores)
trace = pm.sample(chains=4, )
az.plot_trace(trace, var_names=["questions", "students", "slope"] ,compact=False)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment