Skip to content

Instantly share code, notes, and snippets.

@Erlemar
Last active July 28, 2017 17:05
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Erlemar/6a5cfcca423ef3b5f6e890c6bef6d5ed to your computer and use it in GitHub Desktop.
Save Erlemar/6a5cfcca423ef3b5f6e890c6bef6d5ed to your computer and use it in GitHub Desktop.
A very simple linear regression as neural net in numpy
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import numpy as np"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x = np.array([[2,2,2],[1,2,0],[1,1,2], [2,0,0], [0,1,0], [0,0,2], [0, 1, 1]])\n",
"y = np.array([[6,3,4,2,1,2,2]]).T\n",
"w = np.random.random((3,1))"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Step 0. Cost 14.7024377997175.\n",
"Step 100. Cost 1.5856568764184218.\n",
"Step 200. Cost 0.9208310250047236.\n",
"Step 300. Cost 0.8871342349696927.\n",
"Step 400. Cost 0.8854263085160973.\n",
"Step 500. Cost 0.8853397420280746.\n",
"Step 600. Cost 0.8853353543936845.\n",
"Step 700. Cost 0.8853351320058304.\n",
"Step 800. Cost 0.8853351207340723.\n",
"Step 900. Cost 0.8853351201627594.\n"
]
}
],
"source": [
"learning_rate = 0.0001\n",
"#In this case these is one sum so weight vector is updated by one value.\n",
"for i in range(1000):\n",
" grad = np.sum((2 * x * (np.dot(x, w) - y)))\n",
" dw = learning_rate * grad\n",
" w = w - dw\n",
" if i % 100 == 0:\n",
" print('Step {0}. Cost {1}.'.format(i, ((y - np.dot(x, w))**2).sum()))"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ 3.33694878])"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"np.dot(np.array([2, 2, 0]), w)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x = np.array([[2,2,2],[1,2,0],[1,1,2], [2,0,0], [0,1,0], [0,0,2], [0, 1, 1]])\n",
"y = np.array([[6,3,4,2,1,2,2]]).T\n",
"w = np.random.random((3,1))"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Step 0. Cost 31.008017394822314.\n",
"Step 1000. Cost 0.052528643029055876.\n",
"Step 2000. Cost 0.011766593689446825.\n",
"Step 3000. Cost 0.002860812273152058.\n",
"Step 4000. Cost 0.0007112780307814341.\n",
"Step 5000. Cost 0.00017843723979905354.\n",
"Step 6000. Cost 4.492221587863265e-05.\n",
"Step 7000. Cost 1.1324841864479193e-05.\n",
"Step 8000. Cost 2.8564986614772563e-06.\n",
"Step 9000. Cost 7.206515856273887e-07.\n"
]
}
],
"source": [
"#In this case these grad has shape (3,1) so that each value in w is updated by a different value.\n",
"for i in range(10000):\n",
" grad = np.sum((2 * x * (np.dot(x, w) - y)), axis=0)\n",
" dw = learning_rate * grad\n",
" w = w - dw.reshape(3,1)\n",
" if i % 1000 == 0:\n",
" print('Step {0}. Cost {1}.'.format(i, ((y - np.dot(x, w))**2).sum()))"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ 4.00002056])"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"np.dot(np.array([2, 2, 0]), w)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python [Root]",
"language": "python",
"name": "Python [Root]"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.1"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment