Skip to content

Instantly share code, notes, and snippets.

@vene
Last active October 21, 2020 09:33
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save vene/ecb2013a6e51524a2e7b47455b2121a3 to your computer and use it in GitHub Desktop.
Save vene/ecb2013a6e51524a2e7b47455b2121a3 to your computer and use it in GitHub Desktop.
Generalized constrained projection onto simplex
# Author: vlad niculae <vlad@vene.ro>
# License: 3-clause BSD
import numpy as np
import cvxpy as cx
from copt.constraint import SimplexConstraint
from copt.splitting import minimize_primal_dual
class BoxConstraint(object):
def __init__(self, a_min=None, a_max=None):
"""constraint a_min <= x <= a_max"""
self.a_min = a_min
self.a_max = a_max
def prox(self, x, step_size=1):
return np.clip(x, self.a_min, self.a_max)
class EqConstraint(object):
def __init__(self, b):
"""constraint x = b"""
self.b = b
def prox(self, x, step_size=1):
return self.b
def fw_affine(theta, A, b, ineq=True, max_iter=100, beta_init=.01):
""" minimize ||p, theta||^2
st p in simplex
and Ap <= b
f(p) = .5 ||p||^2 - theta @ p
g(y) = indicator(y ? b) where ? is <= if ineq else ==
Using:
"A Conditional Gradient Framework for Composite Convex
Minimization with Applications to Semidefinite Programming",
Alp Yurtsever, Olivier Fercoq, Francesco Locatello, Volkan Cevher.
URL: https://arxiv.org/abs/1804.08544
"""
p = np.zeros_like(theta)
cst = BoxConstraint(a_max=b) if ineq else EqConstraint(b=b)
for it in range(1, max_iter + 1):
step = 2 / (it + 1) # TODO better step size!
beta = beta_init / np.sqrt(it + 1)
Ap = A @ p
Ap_proj = cst.prox(Ap)
v = beta * (p - theta) + A.T @ (Ap - Ap_proj)
ix = np.argmin(v)
p *= (1 - step)
p[ix] += step
return p
def primal_dual(theta, A, b, ineq=True):
x0 = np.zeros_like(theta)
def f_grad(x):
g = x - theta
return 0.5 * np.dot(g, g), g
simplex = SimplexConstraint()
cst = BoxConstraint(a_max=b) if ineq else EqConstraint(b=b)
res = minimize_primal_dual(
f_grad,
x0,
prox_1=simplex.prox,
prox_2=cst.prox,
L=A,
)
return res.x
def cx_affine(theta, A, b, ineq=True):
dim = theta.shape[0]
p = cx.Variable(dim)
obj = cx.Minimize(cx.sum_squares(p - theta))
constraints = [
p >= 0,
cx.sum(p) == 1,
]
if ineq:
constraints.append(A @ p <= b)
else:
constraints.append(A @ p == b)
problem = cx.Problem(obj, constraints)
problem.solve(verbose=False)
return obj.value, p.value.round(12)
def main():
dim = 5
rng = np.random.RandomState(0)
c = rng.randn(dim)
A = np.zeros((3, dim))
A[0, 0] = 1
A[1, 2:4] = 1
A[2, 1] = 1
b = np.array([.1, .5, .1])
print(f"A={A}")
print(f"b={b}")
for ineq in (False, True):
print('ineq', ineq)
print("\tcvxpy/osqp")
obj, pstar = cx_affine(c, A, b, ineq=ineq)
print(f"\t obj={obj:.4f} pstar={pstar}")
print(f"\t Ap*={A @ pstar}")
print()
print("\tComposite primal dual")
pstar = primal_dual(c, A, b, ineq=ineq)
obj = np.sum((pstar - c) ** 2)
print(f"\t obj={obj:.4f} pstar={pstar}")
print(f"\t Ap*={A @ pstar}")
print()
print("\tComposite FW")
pstar = fw_affine(c, A, b, ineq=ineq, max_iter=1000)
obj = np.sum((pstar - c) ** 2)
print(f"\t obj={obj:.4f} pstar={pstar}")
print(f"\t Ap*={A @ pstar}")
print()
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment