Skip to content

Instantly share code, notes, and snippets.

@andresn
Created April 3, 2017 23:18
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save andresn/f5d504558f3f9850a9f6cb86c01f4e3c to your computer and use it in GitHub Desktop.
Save andresn/f5d504558f3f9850a9f6cb86c01f4e3c to your computer and use it in GitHub Desktop.
nn.py
"""
Test your network here!
No need to change this code, but feel free to tweak it
to test your network!
Make your changes to backward method of the Sigmoid class in miniflow.py
"""
import numpy as np
from miniflow import *
X, W, b = Input(), Input(), Input()
y = Input()
f = Linear(X, W, b)
a = Sigmoid(f)
cost = MSE(y, a)
X_ = np.array([[1., 2.], [3., 4.]])
n_rows = X_.shape[0]
n_features = X_.shape[1]
n_hidden = 1
# W_ = np.array([[2.], [3.]])
W_ = np.random.randn(n_features, n_hidden)
# b_ = np.array([5.])
b_ = np.zeros(1)
y_ = np.array([12, 20])
'''
X_ = np.array([[-1., -2.], [-1, -2]])
W_ = np.array([[2.], [3.]])
b_ = np.array([-3.])
y_ = np.array([1, 2])
'''
feed_dict = {
X: X_,
y: y_,
W: W_,
b: b_,
}
trainables = [W, b]
graph = topological_sort(feed_dict)
# forward_and_backward(graph)
# return the gradients for each Input
# gradients = [t.gradients[t] for t in [X, y, W, b]]
epochs = 10
for i in range(epochs):
loss = 0
for j in X.value:
# Step 2
forward_and_backward(graph)
# Step 3
sgd_update(trainables, .1)
loss += graph[-1].value
print("Epoch: {}, Loss: {:.3f}".format(i+1, loss/n_rows))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment