Skip to content

Instantly share code, notes, and snippets.

@arpi-r
Created October 18, 2019 04:22
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save arpi-r/5aca3accac042eae17a28b22368c96ca to your computer and use it in GitHub Desktop.
Save arpi-r/5aca3accac042eae17a28b22368c96ca to your computer and use it in GitHub Desktop.
Contains QR algorithm for finding PCA
# -*- coding: utf-8 -*-
"""QRAlgorithm.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1kOXabcj86XKDY8SeeFA-prZ4w16r3CmX
Import Libraries
"""
import numpy as np
from typing import Union
import pandas as pd
from sklearn.decomposition import PCA
import matplotlib.pyplot as plt
from sklearn import datasets
"""Householder algorithm converts a normal matrix to a Hessenberg matrix"""
def householder_vectorized(a):
v = a / (a[0] + np.copysign(np.linalg.norm(a), a[0]))
v[0] = 1
tau = 2 / (v.T @ v)
return v,tau
"""QR decomposition find the Q and R components of the given matrix"""
def qr_decomposition(A: np.ndarray) -> Union[np.ndarray, np.ndarray]:
m,n = A.shape
R = A.copy()
Q = np.identity(m)
for j in range(0, n):
# Apply Householder transformation.
v, tau = householder_vectorized(R[j:, j, np.newaxis])
H = np.identity(m)
H[j:, j:] -= tau * (v @ v.T)
R = H @ R
Q = H @ Q
return Q[:n].T, np.triu(R[:n])
"""Given Q and R matrices of the given matrix, we can converge them to get the Eigenvectors of the matrix"""
def findEigVec(Q,R):
tol=0.0001
previous = np.empty(shape=Q.shape)
U = np.empty(shape=Q.shape)
U[:] = Q
for i in range(500):
previous[:] = Q
X = R @ Q
Q, R = qr_decomposition(X)
U = U @ Q
if np.allclose(X, np.triu(X), atol=tol):
break
return U
"""Load iris dataset"""
iris = datasets.load_iris()
A=iris.data
y=iris.target
"""Normalize and find the covariance of the dataset"""
col_sums = A.sum(axis=0)
X = A/ col_sums[np.newaxis, :]
X = np.cov(X)
"""Find the eigen vectors of the dataset using above algorithm"""
Q, R = qr_decomposition(X)
np.set_printoptions(linewidth=9999, precision=20, suppress=True)
U=findEigVec(Q,R)
print(U)
"""Find the eigen vectors of the dataset using inbuilt numpy function"""
x=np.linalg.eig(X)
print(x[1])
"""Calculates the PCA components"""
def calcPCAvec(A):
Q, R = qr_decomposition(A)
U=findEigVec(Q,R)
U=U.T
U=U[:2][:]
U=U.T
P=A@U
return P
P=calcPCAvec(X)
dfqr = pd.DataFrame(data = P
, columns = ['PC1','PC2'])
print(dfqr)
"""Calculates PCA components using inbuilt Scikit learn function"""
pca = PCA(n_components=2)
principalComponents = pca.fit_transform(A)
dfskl = pd.DataFrame(data = principalComponents
, columns = ['PC1','PC2'])
print(dfskl)
"""Plots PCA components calculated above"""
x=P[: , 0]
y=P[: , 1]
plt.ylim(-0.00001,0.00001)
plt.xlim(-0.0001,0.0001)
count=0
for i in range(3):
plt.scatter(x[count:count+50], y[count:count+50])
count+=50
plt.show()
"""Plots PCA components found using inbuilt Scikit learn function"""
x=principalComponents[: , 0]
y=principalComponents[: , 1]
count=0
for i in range(3):
plt.scatter(x[count:count+50], y[count:count+50])
count+=50
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment