Skip to content

Instantly share code, notes, and snippets.

@c-bata
Created July 19, 2019 05:45
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save c-bata/e6210a345a8431fbe293a725d1f4cc16 to your computer and use it in GitHub Desktop.
Save c-bata/e6210a345a8431fbe293a725d1f4cc16 to your computer and use it in GitHub Desktop.
import numpy as np
import matplotlib.pyplot as plt
from sklearn.gaussian_process import kernels as sk_kern
from sklearn.gaussian_process import GaussianProcessRegressor
def objective(x):
return x + 20 * np.sin(x)
def plot_result(x_test, mean, std):
plt.plot(x_test[:, 0], mean, color="C0", label="predict mean")
plt.fill_between(x_test[:, 0], mean + std, mean - std, color="C0", alpha=.3, label="1 sigma confidence")
xx = np.linspace(-20, 20, 200)
plt.plot(xx, objective(xx), "--", color="C0", label="true function")
plt.title("function evaluation")
plt.legend()
plt.savefig("gpr_predict.png", dpi=150)
def main():
kernel = sk_kern.RBF(1.0, (1e-3, 1e3)) + sk_kern.ConstantKernel(1.0, (1e-3, 1e3))
clf = GaussianProcessRegressor(
kernel=kernel,
alpha=1e-10,
optimizer="fmin_l_bfgs_b",
n_restarts_optimizer=20,
normalize_y=True)
np.random.seed(0)
x_train = np.random.uniform(-20, 20, 200)
y_train = objective(x_train) + np.random.normal(loc=0, scale=.1, size=x_train.shape)
clf.fit(x_train.reshape(-1, 1), y_train)
np.random.uniform()
print(clf.log_marginal_likelihood(theta=np.array([0, 0], dtype=np.float64)))
print(clf.log_marginal_likelihood()) # this line might be changed?
if __name__ == '__main__':
main()
@c-bata
Copy link
Author

c-bata commented Jul 19, 2019

Before

$ python examples/gpr_profile.py 
-5187014697.037492
-22772.548601943214

After applied the patch

$ python examples/gpr_profile.py 
-5187014697.037492
-22772.548601943214

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment