Skip to content

Instantly share code, notes, and snippets.

@junpenglao
Last active August 3, 2017 06:22
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save junpenglao/b2467bb3ad08ea9936ddd58079412c1a to your computer and use it in GitHub Desktop.
Save junpenglao/b2467bb3ad08ea9936ddd58079412c1a to your computer and use it in GitHub Desktop.
Random Correlation matrix using the algorithm in LKJ 2009 (vine method based on a C-vine)
"""
Random Correlation matrix (LKJ 2009) output checking
Created on Wed Aug 2 09:09:02 2017
@author: junpenglao
"""
import numpy as np
from scipy import stats
def is_pos_def(A):
if np.array_equal(A, A.T):
try:
np.linalg.cholesky(A)
return 1
except np.linalg.linalg.LinAlgError:
return 0
else:
return 0
n = 10
eta = 1.
size = 1000
P = lkj_random(n, eta, size)
k=0
for i, p in enumerate(P):
k+=is_pos_def(p)
print("{0} % of the output matrix is positive definite.".format(k/size*100))
import matplotlib.pylab as plt
# Off diagnoal element
C= P.transpose((1, 2, 0))[np.triu_indices(n, k=1)].T
fig, ax = plt.subplots()
ax.hist(C.flatten(), 100, normed=True)
beta = eta - 1 + n/2
C2 = 2 * stats.beta.rvs(size=C.shape, a=beta, b=beta)-1
ax.hist(C2.flatten(), 100, normed=True, histtype='step', label='Beta() distribution')
plt.legend(loc='upper right', frameon=False);
"""
Random Correlation matrix using the algorithm in LKJ 2009 (vine method based on a C-vine)
Created on Wed Aug 2 09:09:02 2017
@author: junpenglao
"""
import numpy as np
from scipy import stats
def lkj_random(n, eta, size=None):
beta0 = eta - 1 + n/2
shape = n * (n-1) // 2
triu_ind = np.triu_indices(n, 1)
beta = np.array([beta0 - k/2 for k in triu_ind[0]])
# partial correlations sampled from beta dist.
P = np.ones((n, n) + (size,))
P[triu_ind] = stats.beta.rvs(a=beta, b=beta, size=(size,) + (shape,)).T
# scale partial correlation matrix to [-1, 1]
P = (P-.5)*2
for k, i in zip(triu_ind[0], triu_ind[1]):
p = P[k, i]
for l in range(k-1, -1, -1): # convert partial correlation to raw correlation
p = p * np.sqrt((1 - P[l, i]**2) *
(1 - P[l, k]**2)) + P[l, i] * P[l, k]
P[k, i] = p
P[i, k] = p
return np.transpose(P, (2, 0 ,1))
"""
Random Correlation matrix (LKJ 2009)
ported R code from @rmcelreath, original see
https://github.com/rmcelreath/rethinking/blob/master/R/distributions.r#L165-L184
Created on Wed Aug 2 09:09:02 2017
@author: junpenglao
"""
import numpy as np
from scipy import stats
def lkj_random(n, eta, size=None):
size = size if isinstance(size, tuple) else (size,)
beta = eta - 1 + n/2
r12 = 2 * stats.beta.rvs(a=beta, b=beta, size=size) - 1
P = np.eye(n)[:,:,np.newaxis] * np.ones(size)
P = np.transpose(P, (2, 0 ,1))
P[:, 0, 1] = r12
P[:, 1, 1] = np.sqrt(1 - r12**2)
if n > 2:
for m in range(1, n-1):
beta -= 0.5
y = stats.beta.rvs(a=(m+1) / 2., b=beta, size=size)
z = stats.norm.rvs(loc=0, scale=1, size=(m+1, ) + size)
z = z/np.sqrt(np.einsum('ij,ij->j', z, z))
P[:, 0:m+1, m+1] = np.transpose(np.sqrt(y) * z)
P[:, m+1, m+1] = np.sqrt(1-y)
C = np.einsum('...ji,...jk->...ik', P, P)
return C
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment