Skip to content

Instantly share code, notes, and snippets.

@VaughnGH
Last active April 26, 2016 18:05
Show Gist options
  • Save VaughnGH/b9bdeee8699111c56af86badc1bdf854 to your computer and use it in GitHub Desktop.
Save VaughnGH/b9bdeee8699111c56af86badc1bdf854 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 36,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[ 0.5 ]\n",
" [ 0.44822537]\n",
" [ 0.99993704]\n",
" [ 0.9999225 ]]\n"
]
}
],
"source": [
"#Two-Layer, from https://iamtrask.github.io/2015/07/12/basic-python-network/\n",
"import numpy as np\n",
"\n",
"# sigmoid function\n",
"def nonlin(x,deriv=False):\n",
" if(deriv==True):\n",
" return x*(1-x)\n",
" return 1/(1+np.exp(-x))\n",
" \n",
"# input dataset\n",
"X = np.array([ [0,0,1],\n",
" [0,1,1],\n",
" [1,0,1],\n",
" [1,1,1] ])\n",
" \n",
"# output dataset \n",
"y = np.array([[0,0,1,1]]).T\n",
"\n",
"# seed random numbers to make calculation\n",
"# deterministic (just a good practice)\n",
"np.random.seed(1)\n",
"\n",
"# initialize weights randomly with mean 0\n",
"syn0 = 2*np.random.random((3,1)) - 1\n",
"\n",
"for iter in xrange(10000):\n",
"\n",
" # forward propagation\n",
" l0 = X\n",
" l1 = nonlin(np.dot(l0,syn0))\n",
"\n",
" # how much did we miss?\n",
" l1_error = y - l1\n",
"\n",
" # multiply how much we missed by the \n",
" # slope of the sigmoid at the values in l1\n",
" l1_delta = l1_error * nonlin(l1,True)\n",
"\n",
" # update weights\n",
" syn0 += np.dot(l0.T,l1_delta)\n",
"\n",
"#print \"Output After Training:\"\n",
"#print l1\n",
"#print syn0\n",
"\n",
"###Using the syn0 Layer###\n",
"syn0 = np.array([[ 9.67299303], #obtained through above function\n",
" [-0.2078435 ],\n",
" [-4.62963669]])\n",
"X1 = np.array([ [0,0,0],\n",
" [0,1,0],\n",
" [1,0,0],\n",
" [1,1,0] ])\n",
"\n",
"print nonlin(np.dot(X1, syn0))\n"
]
},
{
"cell_type": "code",
"execution_count": 55,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[50, -100], [0, -170], [60, -260], [20, -180], [150, -270], [-50, -220], [100, -270], [40, -100], [70, -180], [50, -80], [-30, 0], [-20, 0], [-20, -70], [40, -120], [-50, 30], [10, -10], [1, -1], [0, -90], [-70, -30], [120, -70]]\n"
]
}
],
"source": [
"x = [[190, 240, 140], [310, 310, 140], [340, 400, 140], [310, 330, 150], [280, 430, 160], [430, 380, 160], [400, 500, 230], [260, 300, 200], [290, 360, 180], [180, 230, 150], [410, 380, 380], [200, 180, 180], [300, 280, 210], [340, 380, 260], [230, 180, 210], [150, 160, 150], [50, 51, 50], [250, 250, 160], [280, 210, 180], [310, 430, 360]]\n",
"y = []\n",
"for dset in x:\n",
" y.append([dset[1]-dset[0], dset[2]-dset[1]])\n",
"print y"
]
},
{
"cell_type": "code",
"execution_count": 70,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[ 7.85014714]\n",
" [ 12.85403101]]\n",
"[[ 1.00000000e+000]\n",
" [ 1.00000000e+000]\n",
" [ 1.00000000e+000]\n",
" [ 1.00000000e+000]\n",
" [ 1.00000000e+000]\n",
" [ 1.00000000e+000]\n",
" [ 1.00000000e+000]\n",
" [ 1.00000000e+000]\n",
" [ 1.00000000e+000]\n",
" [ 1.00000000e+000]\n",
" [ -5.27029514e-103]\n",
" [ -6.52464106e-069]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ -1.02052044e-003]\n",
" [ -1.85536985e-022]\n",
" [ -6.66711373e-003]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ -1.00000000e+000]]\n",
"[[ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ 7.39605361e-277]\n",
" [ 5.26905162e-103]\n",
" [ 6.52361469e-069]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ 1.02051531e-003]\n",
" [ 1.85527567e-022]\n",
" [ 6.66708011e-003]\n",
" [ 0.00000000e+000]\n",
" [ 0.00000000e+000]\n",
" [ 1.00000000e+000]]\n"
]
}
],
"source": [
"#Two-Layer, from https://iamtrask.github.io/2015/07/12/basic-python-network/\n",
"import numpy as np\n",
"\n",
"# sigmoid function\n",
"def nonlin(x,deriv=False):\n",
" if(deriv==True):\n",
" return x*(1-x)\n",
" return 1/(1+np.exp(-x))\n",
" \n",
"# input dataset\n",
"#X = np.array([[190, 240, 140], [310, 310, 140], [340, 400, 140], [310, 330, 150], [280, 430, 160], [430, 380, 160], [400, 500, 230], [260, 300, 200], [290, 360, 180], [180, 230, 150], [410, 380, 380], [200, 180, 180], [300, 280, 210], [340, 380, 260], [230, 180, 210], [150, 160, 150], [50, 51, 50], [250, 250, 160], [280, 210, 180], [310, 430, 360]])\n",
"X = np.array([[50, -100], [0, -170], [60, -260], [20, -180], [150, -270], [-50, -220], [100, -270], [40, -100], [70, -180], [50, -80], [-30, 0], [-20, 0], [-20, -70], [40, -120], [-50, 30], [10, -10], [1, -1], [0, -90], [-70, -30], [120, -70]])\n",
"# output dataset \n",
"y = np.array( [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]] ).T\n",
"\n",
"# seed random numbers to make calculation\n",
"# deterministic (just a good practice)\n",
"np.random.seed(1)\n",
"\n",
"# initialize weights randomly with mean 0\n",
"syn0 = 2*np.random.random((2,1)) - 1\n",
"\n",
"for iter in xrange(100000):\n",
"\n",
" # forward propagation\n",
" l0 = X\n",
" l1 = nonlin(np.dot(l0,syn0))\n",
"\n",
" # how much did we miss?\n",
" l1_error = y - l1\n",
"\n",
" # multiply how much we missed by the \n",
" # slope of the sigmoid at the values in l1\n",
" l1_delta = l1_error * nonlin(l1,True)\n",
"\n",
" # update weights\n",
" syn0 += np.dot(l0.T,l1_delta)\n",
"\n",
"print syn0\n",
"###Using the syn0 Layer###\n",
"'''print l1\n",
"X1 = np.array([ [1,1,0],\n",
" [1,0,0],\n",
" [0,1,0] ])'''\n",
"print l1_error\n",
"print nonlin(np.dot(X, syn0))"
]
},
{
"cell_type": "code",
"execution_count": 26,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Error amount: 0.496410031903\n",
"Error amount: 0.00858452565325\n",
"Error amount: 0.00578945986251\n",
"Error amount: 0.00462917677677\n",
"Error amount: 0.00395876528027\n",
"Error amount: 0.00351012256786\n"
]
}
],
"source": [
"#Simple Three Layer\n",
"import numpy as np\n",
"\n",
"def nonlin(x,deriv=False):\n",
" if(deriv==True):\n",
" return x*(1-x)\n",
"\n",
" return 1/(1+np.exp(-x))\n",
" \n",
"X = np.array([[0,0,1],\n",
" [0,1,1],\n",
" [1,0,1],\n",
" [1,1,1]])\n",
" \n",
"y = np.array([[0],\n",
" [1],\n",
" [1],\n",
" [0]])\n",
"\n",
"np.random.seed(1)\n",
"\n",
"# randomly initialize our weights with mean 0\n",
"syn0 = 2*np.random.random((3,4)) - 1\n",
"syn1 = 2*np.random.random((4,1)) - 1\n",
"\n",
"for j in xrange(60000):\n",
"\n",
" # Feed forward through layers 0, 1, and 2\n",
" l0 = X\n",
" l1 = nonlin(np.dot(l0,syn0))\n",
" l2 = nonlin(np.dot(l1,syn1))\n",
"\n",
" # how much did we miss the target value?\n",
" l2_error = y - l2\n",
" \n",
" if (j% 10000) == 0:\n",
" print \"Error amount: \" + str(np.mean(np.abs(l2_error)))\n",
" \n",
" # in what direction is the target value?\n",
" # were we really sure? if so, don't change too much.\n",
" l2_delta = l2_error*nonlin(l2,deriv=True)\n",
"\n",
" # how much did each l1 value contribute to the l2 error (according to the weights)?\n",
" l1_error = l2_delta.dot(syn1.T)\n",
" \n",
" # in what direction is the target l1?\n",
" # were we really sure? if so, don't change too much.\n",
" l1_delta = l1_error * nonlin(l1,deriv=True)\n",
"\n",
" syn1 += l1.T.dot(l2_delta)\n",
" syn0 += l0.T.dot(l1_delta)\n"
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"S0: [[-4.86505962 3.69399351 -4.25857914 -7.06863763]\n",
" [ 7.36534059 3.69443724 -4.2599785 4.36406914]\n",
" [ 1.5925455 0.27029396 0.31505931 -1.43785397]]\n",
"S1: [[-10.63725336]\n",
" [ 5.49803795]\n",
" [ -4.09239374]\n",
" [ 10.97561749]]\n"
]
}
],
"source": [
"#Two layer, w/ gradient descent\n",
"import numpy as np\n",
"X = np.array([ [0,0,1],[0,1,1],[1,0,1],[1,1,1] ])\n",
"y = np.array([[0,1,1,0]]).T\n",
"alpha,hidden_dim = (0.5,4)\n",
"synapse_0 = 2*np.random.random((3,hidden_dim)) - 1\n",
"synapse_1 = 2*np.random.random((hidden_dim,1)) - 1\n",
"for j in xrange(60000):\n",
" layer_1 = 1/(1+np.exp(-(np.dot(X,synapse_0))))\n",
" layer_2 = 1/(1+np.exp(-(np.dot(layer_1,synapse_1))))\n",
" layer_2_delta = (layer_2 - y)*(layer_2*(1-layer_2))\n",
" layer_1_delta = layer_2_delta.dot(synapse_1.T) * (layer_1 * (1-layer_1))\n",
" synapse_1 -= (alpha * layer_1.T.dot(layer_2_delta))\n",
" synapse_0 -= (alpha * X.T.dot(layer_1_delta))\n",
"print \"S0:\", synapse_0\n",
"print \"S1:\", synapse_1"
]
},
{
"cell_type": "code",
"execution_count": 41,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[[190, 240, 140], [310, 310, 140], [340, 400, 140], [310, 330, 150], [280, 430, 160], [430, 380, 160], [400, 500, 230], [260, 300, 200], [290, 360, 180], [180, 230, 150], [410, 380, 380], [200, 180, 180], [300, 280, 210], [340, 380, 260], [230, 180, 210], [150, 160, 150], [50, 51, 50], [250, 250, 160], [280, 210, 180], [310, 430, 360]] [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]\n",
"syn0:\n",
"[[ 0.54264129]\n",
" [-0.9584961 ]\n",
" [ 0.26729647]]\n",
"Input\n",
"[[190 240 140]\n",
" [310 310 140]\n",
" [340 400 140]\n",
" [310 330 150]\n",
" [280 430 160]\n",
" [430 380 160]\n",
" [400 500 230]\n",
" [260 300 200]\n",
" [290 360 180]\n",
" [180 230 150]\n",
" [410 380 380]\n",
" [200 180 180]\n",
" [300 280 210]\n",
" [340 380 260]\n",
" [230 180 210]\n",
" [150 160 150]\n",
" [ 50 51 50]\n",
" [250 250 160]\n",
" [280 210 180]\n",
" [310 430 360]]\n",
"syn0\n",
"[[-10.57448632]\n",
" [ -9.29801035]\n",
" [ -6.89324866]]\n",
"Output Error:\n",
"[[ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]\n",
" [ 0.]]\n"
]
}
],
"source": [
"def setup(corrInit=\"\", incorrInit=\"\"):\n",
" def getArr(string):\n",
" array = []\n",
" for row in string.split('\\n'):\n",
" array.append([int(x) for x in row.split(',')])\n",
" return array\n",
" correct = '''190, 240, 140\n",
"310, 310, 140\n",
"340, 400, 140\n",
"310, 330, 150\n",
"280, 430, 160\n",
"430, 380, 160\n",
"400, 500, 230\n",
"260, 300, 200\n",
"290, 360, 180\n",
"180, 230, 150''' if not corrInit else corrInit\n",
" incorrect = '''410, 380, 380\n",
"200, 180, 180\n",
"300, 280, 210\n",
"340, 380, 260\n",
"230, 180, 210\n",
"150, 160, 150\n",
"50, 51, 50\n",
"250, 250, 160\n",
"280, 210, 180\n",
"310, 430, 360''' if not incorrInit else incorrInit\n",
" corr = []\n",
" incorr = []\n",
" y = []\n",
" corr, incorr = getArr(correct), getArr(incorrect)\n",
" x = corr + incorr\n",
" y.append([1 for a in xrange(len(corr))])\n",
" y[0] += [0 for a in xrange(len(incorr))]\n",
" return (x,y)\n",
" \n",
"x, y = setup()\n",
"print x, y\n",
"#-------premade-------#\n",
"import numpy as np\n",
"\n",
"# sigmoid function\n",
"def nonlin(x,deriv=False):\n",
" if(deriv==True):\n",
" return x*(1-x)\n",
" return 1/(1+np.exp(-x))\n",
" \n",
"# input dataset\n",
"X = np.array(x)\n",
"##print X \n",
"# output dataset \n",
"y = np.array(y).T\n",
"##print y\n",
"# seed random numbers to make calculation\n",
"# deterministic (just a good practice)\n",
"np.random.seed(10)\n",
"\n",
"# initialize weights randomly with mean 0\n",
"syn0 = 2*np.random.random((3,1)) - 1\n",
"print \"syn0:\\n\", syn0\n",
"\n",
"for iter in xrange(10000):\n",
"\n",
" # forward propagation\n",
" l0 = X\n",
" l1 = nonlin(np.dot(l0,syn0))\n",
"\n",
" # how much did we miss?\n",
" l1_error = y - l1\n",
"\n",
" # multiply how much we missed by the \n",
" # slope of the sigmoid at the values in l1\n",
" l1_delta = l1_error * nonlin(l1,True)\n",
"\n",
" # update weights\n",
" syn0 += np.dot(l0.T,l1_delta)\n",
"\n",
"print \"Input\"\n",
"print l0\n",
"print \"syn0\"\n",
"print syn0\n",
"print \"Output Error:\"\n",
"print l1"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.11"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment