Skip to content

Instantly share code, notes, and snippets.

@slinderman
Last active January 23, 2016 23:31
Show Gist options
  • Save slinderman/413090470c00e44f688f to your computer and use it in GitHub Desktop.
Save slinderman/413090470c00e44f688f to your computer and use it in GitHub Desktop.
# Simple demo showing weird marginal likelihood estimates
# for GP classification when using EP inference.
import numpy as np
np.random.seed(0)
import matplotlib.pyplot as plt
import GPy
from GPy.kern import RBF
# Model parameters
N = 100
D = 1
Z = np.linspace(0,10,N)[:,None]
ell_true = 1.0
kernel = RBF(1, lengthscale=ell_true, variance=2.)
# Sample the GP
K = kernel.K(Z) + 1e-6 * np.eye(N)
psi = np.random.multivariate_normal(np.zeros(N), K)
p = 1./(1+np.exp(-psi))
X = (np.random.rand(N) < p).astype(np.float)[:,None]
# Compute marginal likelihood for various length scales
ells = [0.1, 0.5, 1.0, 2.0, 5.0]
lap_lls = []
ep_lls = []
for ell in ells:
# Create kernel for inference
print "ell: ", ell
test_kernel = RBF(1, lengthscale=ell, variance=2.)
# Laplace approximation
lik = GPy.likelihoods.Bernoulli()
m_lap = GPy.core.GP(X=Z,
Y=X,
kernel=test_kernel,
inference_method=GPy.inference.latent_function_inference.laplace.Laplace(),
likelihood=lik)
lap_lls.append(m_lap.log_likelihood())
# Expectation propagation
lik = GPy.likelihoods.Bernoulli()
m_ep = GPy.core.GP(X=Z,
Y=X,
kernel=test_kernel,
inference_method=GPy.inference.latent_function_inference.expectation_propagation.EP(),
likelihood=lik)
ep_lls.append(m_ep.log_likelihood())
plt.figure(figsize=(4,6))
plt.subplot(211)
plt.plot(ells, lap_lls, color='g', lw=2, label="Laplace")
plt.legend(loc="upper right")
plt.ylabel("Marginal Likelihood")
plt.xlabel("$\ell$")
plt.subplot(212)
plt.plot(ells, ep_lls, color='orange', lw=2, label="EP")
plt.legend(loc="lower right")
plt.ylabel("Marginal Likelihood")
plt.xlabel("$\ell$")
plt.tight_layout()
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment