Skip to content

Instantly share code, notes, and snippets.

@fabianp
Created April 23, 2012 09:04
Show Gist options
  • Save fabianp/2469699 to your computer and use it in GitHub Desktop.
Save fabianp/2469699 to your computer and use it in GitHub Desktop.
isotonic regression
import numpy as np
def isotonic_regression(w, y, x_min=None, x_max=None):
"""
Solve the isotonic regression model:
min Sum w_i (y_i - x_i) ** 2
subject to x_min = x_1 <= x_2 ... <= x_n = x_max
where each w_i is strictly positive and each y_i is an arbitrary
real number.
Parameters
----------
w : iterable of floating-point values
y : iterable of floating-point values
Returns
-------
x : list of floating-point values
"""
if x_min is not None or x_max is not None:
y = np.copy(y)
w = np.copy(w)
if x_min is not None:
y[0] = x_min
w[0] = 1e32
if x_max is not None:
y[-1] = x_max
w[-1] = 1e32
J = [[i,] for i in range(len(y))]
cur = 0
while cur < len(J) - 1:
av0, av1, av2 = 0, 0, np.inf
while av0 <= av1 and cur < len(J) - 1:
idx0 = J[cur]
idx1 = J[cur + 1]
av0 = np.dot(w[idx0], y[idx0]) / np.sum(w[idx0])
av1 = np.dot(w[idx1], y[idx1]) / np.sum(w[idx1])
cur += 1 if av0 <= av1 else 0
if cur == len(J) - 1:
break
a = J.pop(cur)
b = J.pop(cur)
J.insert(cur, a + b)
while av2 > av0 and cur > 0:
idx0 = J[cur]
idx2 = J[cur - 1]
av0 = np.dot(w[idx0], y[idx0]) / np.sum(w[idx0])
av2 = np.dot(w[idx2], y[idx2]) / np.sum(w[idx2])
if av2 >= av0:
a = J.pop(cur - 1)
b = J.pop(cur - 1)
J.insert(cur - 1, a + b)
cur -= 1
sol = []
for idx in J:
sol += [np.dot(w[idx], y[idx]) / np.sum(w[idx])] * len(idx)
return sol
if __name__ == '__main__':
dat = np.arange(10).astype(np.float)
dat += 2 * np.random.randn(10) # add noise
dat_hat = isotonic_regression(np.ones(dat.size), dat)
import pylab as pl
pl.close('all')
pl.plot(dat, 'ro')
pl.plot(dat_hat, 'b')
pl.show()
import numpy as np
from isotonic_regression import isotonic_regression
def test_isotonic_regression():
for i in range(10):
tol = np.finfo(np.float32).eps
n = 10 * (i+1)
y = np.arange(n).astype(np.float)
y += 2 * np.random.randn(n) # add noise
w = np.random.uniform(low=1., high=2., size=y.size)
x = isotonic_regression(w, y)
# check that constraints are satisfied
assert np.all(np.diff(x) >= 0)
# check KKT conditions
v_prev = 0
for i in range(len(w)):
v = 2 * w[i] * (y[i] - x[i]) + v_prev
v_prev = v
assert v >= - tol
# check contrained isotonic regression
x = isotonic_regression(w, y, x_min=0., x_max=2.)
assert np.allclose(x[0], 0.)
assert np.allclose(x[-1], 2.)
# check that constraints are satisfied
assert np.all(np.diff(x) >= 0)
if __name__ == '__main__':
test_isotonic_regression()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment