Skip to content

Instantly share code, notes, and snippets.

@hongthaiphi
Created October 17, 2016 08:43
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save hongthaiphi/efaa4fd6c56f65e5e71af49664d8d8b4 to your computer and use it in GitHub Desktop.
Save hongthaiphi/efaa4fd6c56f65e5e71af49664d8d8b4 to your computer and use it in GitHub Desktop.
import numpy as np
# sigmoid function
def nonlin(x, deriv=False):
if(deriv==True):
return x*(1-x)
return 1/(1+np.exp(-x))
# input dataset
X = np.array([ [0,0,1],
[0,1,1],
[1,0,1],
[1,1,1]])
# output dataset
y = np.array([[0,0,1,1]]).T
# seed random numbers to make calculation
# deterministic (just a good practice)
np.random.seed(1)
# initialize weights randomly with mean 0
syn0 = 2*np.random.random((3,1)) - 1
for iter in xrange(10000):
#forward propagation
l0 = X
l1 = nonlin(np.dot(l0,syn0))
# how much did we miss?
l1_error = y - l1
# multiply how much we missed by the
# slope of the sigmoid at the values in l1
l1_delta = l1_error*nonlin(l1,True)
# update weights
syn0 += np.dot(l0.T, l1_delta)
print "Output After Training:"
print l1
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment