Skip to content

Instantly share code, notes, and snippets.

@giuseppebonaccorso
Last active December 4, 2017 15:58
Show Gist options
  • Save giuseppebonaccorso/2e015210f7e8f4a57996883be300aa5a to your computer and use it in GitHub Desktop.
Save giuseppebonaccorso/2e015210f7e8f4a57996883be300aa5a to your computer and use it in GitHub Desktop.
PCA with Rubner-Tavan Networks
from sklearn.datasets import load_digits
import numpy as np
# Set random seed for reproducibility
np.random.seed(1000)
# Load MNIST dataset
X, Y = load_digits(return_X_y=True)
X /= 255.0
n_components = 16
learning_rate = 0.001
epochs = 20
stabilization_cycles = 5
# Weight matrices
W = np.random.uniform(-0.01, 0.01, size=(X.shape[1], n_components))
V = np.triu(np.random.uniform(-0.01, 0.01, size=(n_components, n_components)))
np.fill_diagonal(V, 0.0)
# Training process
for e in range(epochs):
for i in range(X.shape[0]):
y_p = np.zeros((n_components, 1))
xi = np.expand_dims(Xs[i], 1)
for _ in range(stabilization_cycles):
y = np.dot(W.T, xi) + np.dot(V, y_p)
y_p = y.copy()
dW = np.zeros((X.shape[1], n_components))
dV = np.zeros((n_components, n_components))
for t in range(n_components):
y2 = np.power(y[t], 2)
dW[:, t] = np.squeeze((y[t] * xi) + (y2 * np.expand_dims(W[:, t], 1)))
dV[t, :] = -np.squeeze((y[t] * y) + (y2 * np.expand_dims(V[t, :], 1)))
W += (learning_rate * dW)
V += (learning_rate * dV)
V = np.tril(V)
np.fill_diagonal(V, 0.0)
W /= np.linalg.norm(W, axis=0).reshape((1, n_components))
# Compute all output components
Y_comp = np.zeros((X.shape[0], n_components))
for i in range(X.shape[0]):
y_p = np.zeros((n_components,))
xi = np.expand_dims(Xs[i], 1)
for _ in range(stabilization_cycles):
Y_comp[i] = np.squeeze(np.dot(W.T, xi) + np.dot(V.T, y_p))
y_p = y.copy()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment