Skip to content

Instantly share code, notes, and snippets.

@AlexanderFabisch
Last active July 18, 2023 12:54
Show Gist options
  • Save AlexanderFabisch/7229207 to your computer and use it in GitHub Desktop.
Save AlexanderFabisch/7229207 to your computer and use it in GitHub Desktop.
Simple Gaussian Process Regression implementation without hyperparameter optimization.
import numpy
def sq_exp(x1, x2, w, sigma_f):
#return sigma_f**2 * numpy.exp(-(numpy.abs(x1-x2)/w).sum()) # Ornstein-Uhlenbeck
return sigma_f**2 * numpy.exp(-0.5*((x1-x2)**2/w**2).sum())
class GPR(object):
def __init__(self, w, sigma_f, sigma_n):
self.w = w
self.sigma_f = sigma_f
self.sigma_n = sigma_n
def fit(self, X, y):
self.N = X.shape[0]
self.X = X
self.y = y
self.K = self.sigma_n**2 * numpy.eye(self.N)
for i in range(self.N):
for j in range(i+1):
self.K[i, j] += sq_exp(X[i], X[j], self.w, self.sigma_f)
if i != j:
self.K[j, i] += self.K[i, j]
def predict(self, X):
y = numpy.ndarray(X.shape[0])
V = numpy.ndarray(X.shape[0])
K_inv = numpy.linalg.pinv(self.K)
for j in range(X.shape[0]):
k_star = numpy.ndarray(self.N)
for i in range(self.N):
k_star[i] = sq_exp(X[j], self.X[i], self.w, self.sigma_f)
K_inv = numpy.linalg.pinv(self.K)
y[j] = k_star.T.dot(K_inv).dot(self.y)
V[j] = sq_exp(X[j], X[j], self.w, self.sigma_f) - k_star.T.dot(K_inv).dot(k_star)
return y, V
if __name__ == "__main__":
N = 10
X = numpy.linspace(0, 2*numpy.pi, N)[:, numpy.newaxis]
y = numpy.sin(X.ravel()) + numpy.random.randn(N)*0.1
gpr = GPR(numpy.array([0.5]), 1.0, 0.1)
gpr.fit(X, y)
X_test = numpy.linspace(-0.5, 2*numpy.pi+0.5, 1000)
y_test, V = gpr.predict(X_test)
import pylab
pylab.plot(X.ravel(), y, "o")
pylab.plot(X_test.ravel(), y_test, "b")
pylab.fill_between(X_test.ravel(), y_test-2*V, y_test+2*V, color="g", alpha=0.3)
pylab.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment