Skip to content

Instantly share code, notes, and snippets.

@mayhewsw
Created May 7, 2016 15:09
Show Gist options
  • Save mayhewsw/06b06545140c8e86e99c2c3197b1ab74 to your computer and use it in GitHub Desktop.
Save mayhewsw/06b06545140c8e86e99c2c3197b1ab74 to your computer and use it in GitHub Desktop.
Conjugate Gradient Method
from __future__ import division
import numpy as np
Q = [[3, -1], [-1, 3]]
b = [1,0]
x = [1,1]
d = []
r = []
alpha = []
r_k = np.dot(Q, x) - b
d_k = -r_k
i = 1
while True:
print "iteration:", i
alpha_k = np.dot(r_k, r_k) / np.dot(np.dot(d_k, Q), d_k)
x = x + np.dot(alpha_k, d_k)
r_k1 = r_k + np.dot(np.dot(alpha_k,Q), d_k)
beta_k1 = np.dot(r_k1, r_k1) / np.dot(r_k, r_k)
d_k1 = -r_k1 + np.dot(beta_k1, d_k)
r_k = r_k1
d_k = d_k1
if np.isclose(r_k, np.zeros(r_k.shape)).all():
print "Breaking!"
print "final r_k:", r_k
print "final x:", x
break
print "r_k:",r_k
print "x:", x
print
i += 1
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment