Skip to content

Instantly share code, notes, and snippets.

@czarrar
Last active December 31, 2015 06:49
Show Gist options
  • Save czarrar/7950474 to your computer and use it in GitHub Desktop.
Save czarrar/7950474 to your computer and use it in GitHub Desktop.
This script compares different approaches to calculating eigenvector centrality. It finds that when using normalized matrices, there are differences between the output of the methods, whereas when using non-normalized matrices, the outputs of the methods are the same. The last script is from the fast eigenvector centrality code on bias.googlecod…
import numpy as np
from CPAC.cwas.subdist import norm_cols
from fast_ecm import fast_eigenvector_centrality # this is from the other gist; can ignore this and paste in the other function
print 'Compare with non-normalized matrices'
m = np.random.random((200,1000))
cm = m.T.dot(m)
# Let's first call a basic approach
# This actually doesn't work, not sure what I'm setting wrong
from scipy import linalg as LA
w01,v01 = LA.eigh(cm, eigvals=(0,0))
e01 = cm.dot(np.abs(v01))/w01[0]
# Let's call a second basic approach (used currently)
from scipy.sparse import linalg as sLA
w02,v02 = sLA.eigsh(cm, k=1, which='LM', maxiter=1000)
e02 = cm.dot(np.abs(v02))/w02[0]
# Finally let's call the fast eigenvector (power approach)
v03 = fast_eigenvector_centrality(m, verbose=False)
# How different are the second and third ones?
print 'mean absolute diff: ', np.abs(e02-v03).mean()
print 'correlation: ', np.corrcoef(e02.T, v03.T)[0,1]
print 'Compare with normalized matrices'
n = norm_cols(m)
cn = n.T.dot(n)
# Let's first call a basic approach
from scipy import linalg as LA
w11,v11 = LA.eigh(cn, eigvals=(0,0))
e11 = cn.dot(np.abs(v11))/w11[0]
# Let's call a second basic approach (used currently)
from scipy.sparse import linalg as sLA
w12,v12 = sLA.eigsh(cn, k=1, which='LM', maxiter=1000)
e12 = cn.dot(np.abs(v12))/w12[0]
# Finally let's call the fast eigenvector (power approach)
v13 = fast_eigenvector_centrality(n, verbose=False)
# How different are the second and third ones?
print 'mean absolute diff: ', np.abs(e12-v13).mean()
print 'correlation: ', np.corrcoef(e12.T, v13.T)[0,1]
import numpy as np
def fast_eigenvector_centrality(m, maxiter=99, verbose=True):
"""
References
----------
.. [1] Wink, A.M., de Munck, J.C., van der Werf, Y.D., van den Heuvel, O.A., Barkhof, F., 2012. Fast Eigenvector Centrality Mapping of Voxel-Wise Connectivity in Functional Magnetic Resonance Imaging: Implementation, Validation, and Interpretation. Brain Connectivity 2, 265–274.
Examples
--------
>>> # Simulate Data
>>> import numpy as np
>>> ntpts = 100; nvoxs = 1000
>>> m = np.random.random((ntpts,nvoxs)) # note that don't need to generate connectivity matrix
>>> # Execute
>>> from CPAC.network_centrality.core import fast_eigenvector_centrality
>>> eigenvector = fast_eigenvector_centrality(m)
"""
from numpy import linalg as LA
ntpts = m.shape[0]
nvoxs = m.shape[1]
# Initialize eigenvector estimate
vprev = 0 # initialize previous ECM estimate
vcurr = np.ones((nvoxs,1))/np.sqrt(nvoxs) # initialize estimate with L2-norm == 1
i = 0 # reset iteration counter
dnorm = 1 # initial value for difference L2-norm
cnorm = 0 # initial value to estimate L2-norm
# Efficient power iteration
while (i < maxiter) & (dnorm > cnorm):
vprev = vcurr # start with previous estimate
prevsum = np.sum(vprev) # sum of estimate
vcurr_1 = m.dot(vprev) # part one of M*v
vcurr_2 = m.T.dot(vcurr_1) # part two of M*v
vcurr_3 = vcurr_2 + prevsum # adding sum -- same effect as [M+1]*v
vcurr = vcurr_3/LA.norm(vcurr_3,2) # normalize L2-norm
# TODO: on the first iteration, one could output the weighted degree centrality
if i == 0:
pass # could save vcurr_2
i += 1
dnorm = LA.norm(vcurr-vprev, 2)
cnorm = LA.norm(vcurr,2) * np.spacing(1)
if verbose:
print "iteration %02d, || v_i - v_(i-1) || / || v_i * epsilon || = %0.16f / %0.16f" % (i, dnorm, cnorm) # some stats for the users
if (i >= maxiter) & (dnorm > cnorm):
print "Error: algorithm did not converge" # TODO: convert to an exception?
# now the vcurr value will be the ECM
return vcurr
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment