Skip to content

Instantly share code, notes, and snippets.

@arodland
Last active January 12, 2022 21:06
Show Gist options
  • Save arodland/932671d61b3db77f39fdae5ff58070d8 to your computer and use it in GitHub Desktop.
Save arodland/932671d61b3db77f39fdae5ff58070d8 to your computer and use it in GitHub Desktop.
import os
import urllib.request, json
import pandas as pd
import numpy as np
import random
import george
from george.kernels import ExpSquaredKernel, ExpSine2Kernel, Matern32Kernel, ConstantKernel
from kernel import esfi_kernel, esfi_without_daily
import scipy.optimize as op
from multiprocessing import Pool
iter, fev, grev, best = 0, 0, 0, 1e10
datasets = []
def get_data(url):
with urllib.request.urlopen(url) as res:
data = json.loads(res.read().decode())
return data
def make_dataset(dummy):
data = get_data('http://localhost:%s/mixscale_essn.json?series=6h&var=sfi&points=6000' % (os.getenv('HISTORY_PORT')))
tm = [ x[0] / 86400. for x in data ]
sfi = [ x[1] for x in data ]
tm = np.array(tm)
sfi = np.array(sfi)
mean_sfi = np.mean(sfi)
sfi -= mean_sfi
ds = {}
ds['tm'] = tm
ds['sfi'] = sfi
ds['mean_sfi'] = mean_sfi
ds['gp'] = george.GP(esfi_kernel, white_noise=-2.98, fit_white_noise=True)
ds['gp'].compute(ds['tm'])
return ds
def loss(x):
(i, p) = x
ds = datasets[i]
ret = 0
ds['gp'].set_parameter_vector(p)
ll = ds['gp'].log_likelihood(ds['sfi'], quiet=True)
return -ll if np.isfinite(ll) else 1e25
def nll(p):
global fev
global best
fev = fev + 1
losses = [ l for l in pool.imap_unordered(loss, [(i, p) for i in range(len(datasets))] )]
lsum = sum(losses)
if lsum < best:
best = lsum
return lsum
def grad(x):
(i, p) = x
ds = datasets[i]
ds['gp'].set_parameter_vector(p)
return -ds['gp'].grad_log_likelihood(ds['sfi'], quiet=True)
def grad_nll(p):
global grev
grev = grev + 1
gs = 0
for g in pool.imap_unordered(grad, [(i, p) for i in range(len(datasets))] ):
gs += g
return gs
def cb(p):
global iter
iter = iter + 1
print("# iter=", iter, "fev=", fev, "grev=", grev, "best=", best, "p=", repr(p))
if __name__=='__main__':
with Pool(processes=4) as p:
datasets = [ d for d in p.imap(make_dataset, [1,2,3,4,5,6,7,8]) ]
pool = Pool(processes=8)
p0 = datasets[0]['gp'].get_parameter_vector()
print("# Init: ", p0)
bounds = datasets[0]['gp'].get_parameter_bounds()
print("# Bounds: ", bounds)
opt_result = op.minimize(nll, p0, jac=grad_nll, method='L-BFGS-B', callback=cb, options={'maxiter': 100}, bounds=bounds)
print("# RESULT ", repr(opt_result.x))
gp = george.GP(esfi_kernel, white_noise=opt_result.x[0])
gp.set_parameter_vector(opt_result.x[1:])
print(esfi_kernel)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment