Skip to content

Instantly share code, notes, and snippets.

@dblalock
Created September 14, 2021 20:50
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save dblalock/c1fdcd928dd13e9d4973651a61ed849e to your computer and use it in GitHub Desktop.
Save dblalock/c1fdcd928dd13e9d4973651a61ed849e to your computer and use it in GitHub Desktop.
maf9wlfhomfxawva
import numpy as np
np.random.seed(1234)
class FullyConnectedLayer(object):
def __init__(self, num_inputs, num_outputs):
pass
def forward(self, X):
pass
def backward(self, dLdY):
# hint1: the gradient(s) are all single matrix products
# hint2: the dimensions of the resulting matrix have to match the
# dimensions of whatever variable they're the gradient of
pass
batch_sz = 2
d_in = 3
d_out = 4
lr = .01
X = np.random.randn(batch_sz, d_in)
layer = FullyConnectedLayer(d_in, d_out)
Y_star = np.random.randn(batch_sz, d_out)
for it in range(5):
Y = layer.forward(X)
diffs = (Y - Y_star)
loss = (diffs * diffs).mean()
dLdY = diffs
something = layer.backward(dLdY)
# probably need to do something with something
print(loss)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment