Skip to content

Instantly share code, notes, and snippets.

@ferryzhou
Created September 3, 2017 05:37
Show Gist options
  • Save ferryzhou/3752b87e0836d586455b62b55a7f4c42 to your computer and use it in GitHub Desktop.
Save ferryzhou/3752b87e0836d586455b62b55a7f4c42 to your computer and use it in GitHub Desktop.
import numpy as np
# logistic regression.
# yi = sigmoid(dot(a * xi))
def sigmoid(x):
return 1 / (1 + np.exp(-x))
N = 1000
M = 2
A = np.random.random((N, M))
x = np.random.random(M)
noise = np.random.random(N) * 0.001
y = np.dot(A, x) + noise
c = sigmoid(y)
# start optimization with cross-entropy loss
# see http://peterroelants.github.io/posts/neural_network_implementation_part02/
l2_reg = 0.001 # l2 regularization term
r = 0.1 # learning rate
e = 0 # error term
w = np.zeros(M)
for i in range(0, N):
a = A[i, :] # current sample
# feed forward
yy = np.dot(a, w)
cc = sigmoid(yy)
e = cc - c[i]
print(e)
# backprop
g = e * a + l2_reg * w # final gradient
w = w - r * g
print(x)
print(w)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment