Skip to content

Instantly share code, notes, and snippets.

View Alescontrela's full-sized avatar

Alejandro Escontrela Alescontrela

View GitHub Profile
@Alescontrela
Alescontrela / mpo.ipynb
Created April 23, 2024 21:17
MPO Notebook
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@Alescontrela
Alescontrela / rotation_manifold_christmas_tree.py
Created December 26, 2019 05:38
Using SO(3) rotation manifolds to build a Christmas tree.
"""Quick script to demonstrate SO(3) rotation manifolds via axis-angle.
Merry christmas :^)
@Author: Alejandro Escontrela
"""
import numpy as np
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.patches import FancyArrowPatch
class DecisionTree(object):
def __init__(self, x, y, idxs = None, min_leaf = 5):
"""
Create a decision tree by computing what feature from the observation x to
perform the current split on. Best feature is computed as that which results
in the minimum standard deviation across the input examples. Split value
is the value of the best feature at which to perform the split.
"""
# ids of data samples to use for the creation of the current decision tree
if idxs is None: idxs = np.arange(len(y))
def adamGD(batch, num_classes, lr, dim, n_c, beta1, beta2, params, cost):
'''
update the parameters through Adam gradient descnet.
'''
[f1, f2, w3, w4, b1, b2, b3, b4] = params
X = batch[:,0:-1] # get batch inputs
X = X.reshape(len(batch), n_c, dim, dim)
Y = batch[:,-1] # get batch labels
def conv(image, label, params, conv_s, pool_f, pool_s):
[f1, f2, w3, w4, b1, b2, b3, b4] = params
################################################
############## Forward Operation ###############
################################################
conv1 = convolution(image, f1, b1, conv_s) # convolution operation
conv1[conv1<=0] = 0 # pass through ReLU non-linearity
def convolutionBackward(dconv_prev, conv_in, filt, s):
'''
Backpropagation through a convolutional layer.
'''
(n_f, n_c, f, _) = filt.shape
(_, orig_dim, _) = conv_in.shape
## initialize derivatives
dout = np.zeros(conv_in.shape)
dfilt = np.zeros(filt.shape)
dbias = np.zeros((n_f,1))
def initializeFilter(size, scale = 1.0):
'''
Initialize filter using a normal distribution with and a
standard deviation inversely proportional the square root of the number of units
'''
stddev = scale/np.sqrt(np.prod(size))
return np.random.normal(loc = 0, scale = stddev, size = size)
def initializeWeight(size):
'''
def extract_data(filename, num_images, IMAGE_WIDTH):
'''
Extract images by reading the file bytestream. Reshape the read values into a 3D matrix of dimensions [m, h, w], where m
is the number of training examples.
'''
print('Extracting', filename)
with gzip.open(filename) as bytestream:
bytestream.read(16)
buf = bytestream.read(IMAGE_WIDTH * IMAGE_WIDTH * num_images)
data = np.frombuffer(buf, dtype=np.uint8).astype(np.float32)
z = w3.dot(fc) + b3 # first dense layer
z[z<=0] = 0 # pass through ReLU non-linearity
out = w4.dot(z) + b4 # second dense layer
def categoricalCrossEntropy(probs, label):
'''
calculate the categorical cross-entropy loss of the predictions
'''
return -np.sum(label * np.log(probs)) # Multiply the desired output label by the log of the prediction, then sum all values in the vector