Skip to content

Instantly share code, notes, and snippets.

@jmbr
Last active April 9, 2021 15:54
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jmbr/34409665be250b9adadbc4dda62ae99a to your computer and use it in GitHub Desktop.
Save jmbr/34409665be250b9adadbc4dda62ae99a to your computer and use it in GitHub Desktop.
The distances between points in a linear Euclidean manifold are the same before and after dimensionality reduction with principal component analysis.
import numpy as np
from sklearn.decomposition import PCA
from scipy.spatial.distance import pdist
def get_points_from_square(num_points: int, ambient_dim: int) -> np.ndarray:
"""Get points from a grid of a linear manifold (a square in this case) endowed with the Euclidean metric.
"""
aux = np.linspace(-1, 1, int(np.sqrt(num_points)))
x1, x2 = np.meshgrid(aux, aux)
X = np.zeros((num_points, ambient_dim))
X[:, :2] = np.stack((x1.flatten(), x2.flatten())).T
return X
def make_random_rotation(dim: int) -> np.ndarray:
"""Create a random rotation matrix.
"""
random_matrix = np.random.randn(dim, dim)
rotation_matrix, _ = np.linalg.qr(random_matrix)
return rotation_matrix
N: int = int(100**2) # Number of samples
d: int = 200 # Dimension of ambient space
threshold: float = 1e4 * np.finfo(np.float).eps # Threshold for identifying relevant coordinates.
# Generate points on square and scramble their coordinates.
Q = make_random_rotation(d)
X = get_points_from_square(N, d) @ Q
# Do dimensionality reduction using principal component analysis.
pca = PCA(n_components=10) # Pretend we don't know it's a square just for fun.
Y = pca.fit_transform(X)
Y = Y[:, pca.singular_values_ > threshold] # Get just the relevant coordinates.
assert np.allclose(pdist(X), pdist(Y)), 'Something went awry.'
print('Everything went well.')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment