Instantly share code, notes, and snippets.

# fabianp/gist:934363

Created April 21, 2011 12:16
Show Gist options
• Save fabianp/934363 to your computer and use it in GitHub Desktop.
locally linear embedding - swiss roll example
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters. Learn more about bidirectional Unicode characters
 # -*- coding: utf-8 -*- """ =================================== Swiss Roll reduction with LLE =================================== An illustration of Swiss Roll reduction with locally linear embedding """ ################################################################################ # locally linear embedding function from scipy.sparse import linalg, eye from pyamg import smoothed_aggregation_solver from scikits.learn import neighbors def locally_linear_embedding(X, n_neighbors, out_dim, tol=1e-6, max_iter=200): W = neighbors.kneighbors_graph( X, n_neighbors=n_neighbors, mode='barycenter') # M = (I-W)' (I-W) A = eye(*W.shape, format=W.format) - W A = (A.T).dot(A).tocsr() # initial approximation to the eigenvectors X = np.random.rand(W.shape[0], out_dim) ml = smoothed_aggregation_solver(A, symmetry='symmetric') prec = ml.aspreconditioner() # compute eigenvalues and eigenvectors with LOBPCG eigen_values, eigen_vectors = linalg.lobpcg( A, X, M=prec, largest=False, tol=tol, maxiter=max_iter) index = np.argsort(eigen_values) return eigen_vectors[:, index], np.sum(eigen_values) import numpy as np import pylab as pl ################################################################################ # generate the swiss roll n_samples, n_features = 2000, 3 n_turns, radius = 1.2, 1.0 rng = np.random.RandomState(0) t = rng.uniform(low=0, high=1, size=n_samples) data = np.zeros((n_samples, n_features)) # generate the 2D spiral data driven by a 1d parameter t max_rot = n_turns * 2 * np.pi data[:, 0] = radius = t * np.cos(t * max_rot) data[:, 1] = radius = t * np.sin(t * max_rot) data[:, 2] = rng.uniform(-1, 1.0, n_samples) manifold = np.vstack((t * 2 - 1, data[:, 2])).T.copy() colors = manifold[:, 0] # rotate and plot original data sp = pl.subplot(211) U = np.dot(data, [[-.79, -.59, -.13], [ .29, -.57, .75], [-.53, .56, .63]]) sp.scatter(U[:, 1], U[:, 2], c=colors) sp.set_title("Original data") print "Computing LLE embedding" n_neighbors, out_dim = 12, 2 X_r, cost = locally_linear_embedding(data, n_neighbors, out_dim) sp = pl.subplot(212) sp.scatter(X_r[:,0], X_r[:,1], c=colors) sp.set_title("LLE embedding") pl.show()

### ogrisel commented Apr 21, 2011

Nice! Not as good as the Hessian LLE from mdp but still a good start. Also dig a hole in your swissroll to make the topology more interesting :)

### fabianp commented Apr 22, 2011

I should have credited you for the swiss roll code :-)

BTW, the code is referenced from here: http://fseoane.net/blog/2011/locally-linear-embedding-and-sparse-eigensolvers/