Created
March 20, 2018 21:59
-
-
Save coder3101/d7f24f617d192d47e048fe8bbf03bd72 to your computer and use it in GitHub Desktop.
MNIST Model using TensorFlow
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"cells": [ | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"# A Handwritten image classifier using TensorFlow" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"### We first import all the required dependencies for our project" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 1, | |
"metadata": { | |
"code_folding": [] | |
}, | |
"outputs": [ | |
{ | |
"name": "stderr", | |
"output_type": "stream", | |
"text": [ | |
"/home/ashar/anaconda3/lib/python3.6/site-packages/h5py/__init__.py:34: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", | |
" from ._conv import register_converters as _register_converters\n" | |
] | |
} | |
], | |
"source": [ | |
"# For Ploting the real image and labels just to show we are using a image to train\n", | |
"import matplotlib.pyplot as plt\n", | |
"import numpy as np\n", | |
"# The dataset we are using is called mnist\n", | |
"from tensorflow.examples.tutorials import mnist\n", | |
"import tensorflow as tf # import the tensorflow module" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"### Let's download the images dataset" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 2, | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": [ | |
"Extracting ./train-images-idx3-ubyte.gz\n", | |
"Extracting ./train-labels-idx1-ubyte.gz\n", | |
"Extracting ./t10k-images-idx3-ubyte.gz\n", | |
"Extracting ./t10k-labels-idx1-ubyte.gz\n" | |
] | |
} | |
], | |
"source": [ | |
"data = mnist.input_data.read_data_sets(train_dir='./',\n", | |
" one_hot=True)" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 3, | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": [ | |
"Using TensorFlow 1.5.0\n" | |
] | |
} | |
], | |
"source": [ | |
"# Just to make sure the tensorflow version we are using\n", | |
"print('Using TensorFlow ', tf.__version__)" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"### We create all the placeholders for the computational graph" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 4, | |
"metadata": { | |
"collapsed": true | |
}, | |
"outputs": [], | |
"source": [ | |
"x = tf.placeholder(dtype=tf.float32, # The data type this tensor should hold\n", | |
" shape=[None, 784],\n", | |
" name='input') # This is input (it can be first dimension is not known second is 28x28 = 784)\n", | |
"\n", | |
"y = tf.placeholder(dtype=tf.float32,\n", | |
" shape=[None, 10],\n", | |
" name='output') # This is output (a digit can only be from 0-9 = total of 10)" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"### We now create all the variables (weights and biases)" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 5, | |
"metadata": { | |
"collapsed": true | |
}, | |
"outputs": [], | |
"source": [ | |
"weight_input = tf.Variable(initial_value=tf.truncated_normal(shape=[784, 16]),\n", | |
" name='input_weights')\n", | |
"\n", | |
"biases_hidden = tf.Variable(initial_value=tf.zeros([16]),\n", | |
" name='biases_hidden')\n", | |
"\n", | |
"weights_hidden = tf.Variable(initial_value=tf.truncated_normal([16, 10]),\n", | |
" name='weight_hidden')\n", | |
"\n", | |
"biases_output = tf.Variable(initial_value=tf.zeros([10]),\n", | |
" name='biases_output')" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"collapsed": true | |
}, | |
"source": [ | |
"### We now connect and build the first layer of neurons" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 6, | |
"metadata": { | |
"collapsed": true | |
}, | |
"outputs": [], | |
"source": [ | |
"first_layer_output = tf.nn.leaky_relu(\n", | |
" features=tf.matmul(x, weight_input) + biases_hidden)\n", | |
"# We are using an activation function leaky_relu. It is good as it prevents dead neuron problem" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"### Time to build second layer (also the last layer) of the neurons" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 7, | |
"metadata": { | |
"collapsed": true | |
}, | |
"outputs": [], | |
"source": [ | |
"final_layer_logits = tf.matmul(\n", | |
" first_layer_output, weights_hidden)+biases_output\n", | |
"\n", | |
"# This last layer will not be activated by leaky_relu() instead we want a raw output also called logits" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"### Using the logits we now generate loss by comparing the logits with true values of 'y'" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 8, | |
"metadata": { | |
"collapsed": true | |
}, | |
"outputs": [], | |
"source": [ | |
"loss = tf.nn.softmax_cross_entropy_with_logits_v2(labels=y,logits=final_layer_logits)\n", | |
"# We now have our loss function in terms of logits\n", | |
"# loss = f(logits)\n", | |
"\n", | |
"loss = tf.reduce_mean(loss)\n", | |
"# for optimizer to work correctly we need to take loss in each input" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"### We now call our optimizer that will reduce the loss" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 9, | |
"metadata": { | |
"collapsed": true | |
}, | |
"outputs": [], | |
"source": [ | |
"trainer = tf.train.AdamOptimizer(learning_rate=0.01).minimize(loss)\n", | |
"# We use Adaptive Momentum Optimizer (ADAM), very efficient in minimizing loss" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"### We now create our algorithm for calculating accuracy of model during training" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 10, | |
"metadata": { | |
"collapsed": true | |
}, | |
"outputs": [], | |
"source": [ | |
"# We convert our logits to probabilities using a function called softmax\n", | |
"prediction = tf.nn.softmax(logits=final_layer_logits)\n", | |
"\n", | |
"# We now got the prediction and true value\n", | |
"actual_label = tf.argmax(y, 1)\n", | |
"predcited_label = tf.argmax(prediction, 1)\n", | |
"\n", | |
"temp = tf.equal(actual_label, predcited_label)\n", | |
"# This may fills 'temp' variable with [TRUE, FALSE, TRUE, FALSE, FALSE] True means correct prediction\n", | |
"# We will cast this to float32 and then find the mean of the tensor. That should be our accuracy\n", | |
"\n", | |
"temp = tf.cast(temp, dtype=tf.float32)\n", | |
"\n", | |
"accuracy = tf.reduce_mean(temp)" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"# Checking some random images from our dataset" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 11, | |
"metadata": { | |
"collapsed": true | |
}, | |
"outputs": [], | |
"source": [ | |
"img_1 = data.train.images[56] # Cheking image at index 56\n", | |
"label_1 = data.train.labels[56] # Label corresponding to index 56\n", | |
"\n", | |
"img_2 = data.train.images[100] \n", | |
"label_2 = data.train.labels[100]\n", | |
"\n", | |
"img_3 = data.train.images[526] # Cheking image at index 526\n", | |
"label_3 = data.train.labels[526] # Label corresponding to index 526\n", | |
"\n", | |
"img_4 = data.train.images[1000] \n", | |
"label_4 = data.train.labels[1000]\n" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 12, | |
"metadata": { | |
"collapsed": true | |
}, | |
"outputs": [], | |
"source": [ | |
"# We now have 4 Images and labels from our dataset.\n", | |
"# The images are in 784 dimensional vector (28x28) pixel\n", | |
"# The value in each of them is from 0 to 1. They are grey scale images and hence only single color channel\n", | |
"# We will use the numpy to convert those long arrays to 2-D array for plotting we will multiply\n", | |
"# them with 256 to get single channel values\n", | |
"\n", | |
"img_1 = np.reshape(img_1,(28,28))*256 \n", | |
"img_2 = np.reshape(img_2,(28,28))*256 \n", | |
"img_3 = np.reshape(img_3,(28,28))*256 \n", | |
"img_4 = np.reshape(img_4,(28,28))*256 " | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 13, | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"data": { | |
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAUcAAAEYCAYAAADPkTRJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzt3Xm8FOWZL/Dfj8NyBAQOmyAgICKg\n0biA6Gi85kMwikwwJjoCKhkXxiQa9LoRvUZHzAQnLhkXdPCq4FwQHXAGjCsyIopKOCIhILLIIiiy\nCOJB9nOe+0eXVV3N23TTp5fq7t/38zmf81TV21Uvh36erre6FpoZREQkrEGhOyAiEkUqjiIiDiqO\nIiIOKo4iIg4qjiIiDiqOIiIOZV0cSU4geW8O1/8qyRG5Wr9ItigXDhSp4khyDcmNJJvFzbua5OwC\nditjZna+mU3M5jpJdiNpJHfE/dyZzW1I4SkXUiM5PCEPdnq5cWo21h+p4uhpCGBUoTtxqEhW5HmT\nrcysufczJs/blvxQLhyEmU2Ky4HmAH4FYBWABdlYfxSL4x8B3EyyVeKCuL2mhnHzZpO82ot/QXIu\nyYdIfk1yFcm/8+avI7nJsWvfluRMkjUk3ybZNW7dvb1lW0kuI3lJ3LIJJB8n+QrJbwH80NHf+L4d\n461/O8ktJJ+v919KSp1y4dCMAPCsZemyvygWx2oAswHcnOHr+wNYBKANgMkApgDoB+AYAJcBeJRk\n87j2wwGMAdAWwEIAkwDAG87M9NbRHsBQAONIHh/32mEAfg/gcADvpujXGABvAKgC0BnAI8kaklxE\ncliK9a0luZ7kMyTbpmgrxUm5kF4uwCvkZwN4NlXbdEWxOALA7wBcT7JdBq9dbWbPmFktgOcBdAFw\nj5ntMbM3AOxF7M3xnZfNbI6Z7QFwB4AzSHYBMBjAGm9d+81sAYBpAH4e99rpZjbXzOrMbHeKfu0D\n0BXAkWa228ySvoHM7EQzm5xk8RbE3uBdAZyK2JtxUoptS/FSLiTPhXhXAHjHzFan0TYtkSyOZrYY\nwJ8BjM7g5Rvj4l3e+hLnxX9arovb7g4AWwEcidh/Xn9vSPI1ya8R+2Tt4HptGm4FQAB/IbmE5JWH\n8Fqfme0ws2rvTboRwHUAziXZIpP1SbQpF9J2BYCsfuHTMHWTgrkLsQOrD8TN+9b73RTAN14c/x+U\niS7fBd4QozWALxD7z37bzAYe5LVpH9swsy8BXONt5ywAb5KcY2YrM+r1gX1gPdcj0aVcOAiSZyJW\nxKdm8vpkIrnnCADeH+p5AL+Jm7cZwOcALiNZ4X3i9KjnpgaRPItkY8SOhcwzs3WIfVofS/Jyko28\nn34k+2SyEZIXk+zsTW5D7M1Um8F6+pPsRbIByTYAHgYw28y2Z9IviT7lQkojAEwzs5p6rOMAkS2O\nnnsANEuYdw2AWwB8BeB4AO/VcxuTEftk3orYMbzhAOD9oc8FcClin55fArgPQJMMt9MPwDySOwDM\nADAq2fERb6gxPMl6jgbwGoAaAIsB7EHsALmUNuWCe3klgEuQ5SE1AFA3uxUROVDU9xxFRApCxVFE\nxEHFUUTEoV7FkeR53qVEK0lmch6WSMlQPpSWjL+QYezi8uUABgJYD2A+gKFm9nGy1zRmE6s84As3\nyYcabNtiZplcZSFpONR8UC4UTrq5UJ+TwE8DsNLMVgEAySkAhgBIWhwr0Qz9OaAem5RMvWlT1xa6\nDyXukPJBuVA46eZCfYbVnRC+ZGi9Ny+E5EiS1SSr92FPPTYnEmkp80G5UFzqUxxdl6sdMEY3s/Fm\n1tfM+jbK+JxRkchLmQ/KheJSn+K4HnHXYiJ266Ev6tcdkaKlfCgx9SmO8wH0JNnduxbzUsQuBRIp\nR8qHEpPxFzJmtp/kdQBeB1AB4GkzW5K1nokUEeVD6anXLcvM7BUAr2SpLyJFTflQWnSFjIiIg4qj\niIiDiqOIiIOKo4iIg4qjiIiDiqOIiIOKo4iIg4qjiIiDiqOIiEO9rpCR1L6+4gw/3tY7/df917AH\n/fj4xoel9Zpez/wyNN3tjvfT36CIhGjPUUTEQcVRRMSh7IfVW0aeEZoe/9t/8+Mr/zrCj8edOCnU\nrpL701p/14Zz/biqQXrD45jgZqi1VufHn+wL30H6mtE3+vHRLy8OLauDiGRKe44iIg4qjiIiDiqO\nIiIOZXnMcfMvg+OM/zn6j6Fl3Ro29eMF/cLHGcPS/dPV/0/8t737/Pja/3NjaFnLKR/4sY4xiktF\nr2NC07u6Vx3yOrb2buTHrT/Zl7TdusvDx+I/OPtRPz7zveBUs701jUPt+oxe7ce1W7465P7lgvYc\nRUQcVBxFRBzKZljdsMMRfnzRL9/y4/hhdK7VJTzW+5Yv+ydt+94j/fy45cpdQfzuB67mUobYMEjf\ndbecFlp21fDX/PisZs+Flp3WpBFyZZ/Vhqa3xR3rWfaDZ5O+7ujGV/pxzys0rBYRiSwVRxERBxVH\nERGHsjnm+NXTh/vxb9t8nLTdh3uDYyYr9nbIaFv//OIlftxiZTCfCefatH4m+V1zqqA76sjBxR9n\nXHL9uKTtttWGT725bM1AZ7t5a7qFppvPDY7HVw7e6Mc9WoaPCc5d1sOPO78ULinNZwa59slDvfx4\n9aD/G2rXsuVOZ58KSXuOIiIOKYsjyadJbiK5OG5ea5IzSa7wfh/6WaUiRUj5UD7SGVZPAPAogPjv\n4UcDmGVmY0mO9qZvy373MrdnUL/Q9FPHPRw3Fdzx5rVd4VN5Hr4sGBLjg0UZbbu7hsSlbAIikg9H\nvbLNj7+/+1ehZYd/FhwearF0W2hZ7cfLnes7GguTb+yxINycsOhYfJj0ZawKPieGnvqXpO0avRi9\nz5OUe45mNgfA1oTZQwBM9OKJAC7Mcr9EIkn5UD4yPeZ4hJltAADvd/tkDUmOJFlNsnof9iRrJlLM\n0soH5UJxyfm31WY2HsB4AGjB1paiedbEXygPAL0bNXG2u+sP/xiabvOBhsSSG9nOhbq/LvXjDn9N\n3q42+aLca9PKD//liODKtA37d4Satfx0F6Im0z3HjSQ7AoD3e1P2uiRSdJQPJSjT4jgDwHfPEBgB\nYHp2uiNSlJQPJSidU3meA/A+gF4k15O8CsBYAANJrgAw0JsWKXnKh/KR8pijmQ1NsmhAlvtSb2vG\nBDex/egf/5SwNPin3vplXz9u80zy0wtEEhVTPhRCg8MPD01ve8S9/zXkzltC01XvRu9Yv66QERFx\nUHEUEXEoqRtPWEUQN2Hyf1pt3GdCRVWL0LINl/b2430Dtvvxgye+kHY/ZtUc78f/83Aw1D/YjSZE\nSsH2C44PTb///Sec7drN+iw0nd5T4PNLe44iIg4qjiIiDiU1rMbR36bV7OZ2s/14+jt9Qsuubflm\nvbsx4LAFfrxjTDCUPuXE8GNVj7lRz4OR0rK7FZMuO2ZS8GjWHl9E/ywR7TmKiDioOIqIOKg4iog4\nlNQxx+VnB/cfrT3IPU86VgQ3uL225dpcdgnNGdwNaMHFD4WWDXskuNhi/6o1Oe2HSK40qKz04/P+\naW5o2cI9wa3Zej242o/31xX0XkFp0Z6jiIiDiqOIiENJDaszsXBv+Nz8zbXBhfO/qf4HP242p3na\n6xz2q9f9+H9XrfDj+CE2ACwdFTz6teeoNWmvXyRKVt9+sh+/esTjoWVn/HWYH7fY8Gne+pQN2nMU\nEXFQcRQRcVBxFBFxKKljjtvrgof07LO60LLHtgbPsX5+6jl+3HVG+Jm+8Q8t6o7Mnls9f1i3YCLu\nmKNISWhQEZo8d1C1H9cm5F3zseG7XhUT7TmKiDioOIqIOJTUsPqSzmekbgSgC97z47qDtEtXgxN7\nh6aHtJvpbLfHwqcNHflO3h7jLZI1n953Wmj69SOD03cuWvnj0LIGb3+Ulz7lgvYcRUQcVBxFRBxK\nalidTzw5eFbGZVNeCy27tPlm52uuXfej0HSzqfOy3zGRHKvs8U3SZR990i00fSzcuVAMtOcoIuKg\n4igi4pCyOJLsQvItkktJLiE5ypvfmuRMkiu831W5765I4SgXyks6xxz3A7jJzBaQPBzAhyRnAvgF\ngFlmNpbkaACjAdyWu66mxr7fC+KPV4WW1e3cmd46mgR3ztly+Sl+fN3N00Ltejf+0I/7NUn+UKF3\ndgd/4i0/aZK0nRSFosmFbGOjxn587wnTQ8t21u314y4vJ8+FYpNyz9HMNpjZAi+uAbAUQCcAQwBM\n9JpNBHBhrjopEgXKhfJySMccSXYDcDKAeQCOMLMNQOxNA6B9kteMJFlNsnof9riaiBQd5ULpS/tU\nHpLNAUwDcIOZfUOmt/tsZuMBjAeAFmyd00tCrp8y1Y/HnTMgtCx+WF33v4Kbc+6pahRqt/6i4CqW\nFQMeO8jWkv/7P9sfbGvUo7f6cYfN77maS5EphlzItm//PsiZC5uFnzl9xdrgFLXDpkf/edTpSmvP\nkWQjxN4Mk8zsRW/2RpIdveUdAWzKTRdFokO5UD7S+baaAJ4CsNTMHoxbNAPACC8eAWB64mtFSoly\nobykM6w+E8DlAP5GcqE373YAYwG8QPIqAJ8BuDg3XRSJDOVCGUlZHM3sXSQ/wDYgyfyCuKDpbj9+\navLe0LLa/Z38+I/dn/DjExqHjzmm69P9wY11Ry4bHlpmjwTH4zu8pOOMpaKYciHbfnTXO0mXVb8c\nnEIXf8erYqcrZEREHFQcRUQcSuquPKd9FBzqef+k5w/SMr2h9Oe1wSk5Fzx8a2hZ1YrglJ8DT19Y\nk9b6RaKsQdOmfty64bak7Y56vcaPi+r8pBS05ygi4qDiKCLiUFLD6ub/1tKPzxj1D6Fl8cPsl3YG\nj4u87fnLk66v49xg6Hzkq6XzLZxIOvae3sePr69K/v7fenxzP66an9Mu5ZX2HEVEHFQcRUQcVBxF\nRBxK6phjozeq/bjqjfCyQTgFLt3wfi67JFK0Vv/CfWJO9z9fE5o+9tnSuRNPPO05iog4qDiKiDiU\n1LBaRLKnyapKP661Oj/u+t8JDetq89Sj/NKeo4iIg4qjiIiDiqOIiIOOOYqI01F3B5cMDro7OBWu\nCUroGsGD0J6jiIiDiqOIiAPN8nd7SpKbAawF0BbAlrxt2C0KfQDy14+uZtYuD9uRNEQsF4Dy6kda\nuZDX4uhvlKw2s75533DE+hClfkhhROX/X/04kIbVIiIOKo4iIg6FKo7jC7TdeFHoAxCdfkhhROX/\nX/1IUJBjjiIiUadhtYiIg4qjiIhDXosjyfNILiO5kuToPG73aZKbSC6Om9ea5EySK7zfVXnoRxeS\nb5FcSnIJyVGF6osUXjnnQzHkQt6KI8kKAI8BOB/AcQCGkjwuT5ufAOC8hHmjAcwys54AZnnTubYf\nwE1m1gfA6QB+7f0NCtEXKSDlQ/RzIZ97jqcBWGlmq8xsL4ApAIbkY8NmNgfA1oTZQwBM9OKJAC7M\nQz82mNkCL64BsBRAp0L0RQqurPOhGHIhn8WxE4B1cdPrvXmFcoSZbQBi/1EA2udz4yS7ATgZwLxC\n90UKQvngiWou5LM40jGvLM8jItkcwDQAN5jZN4XujxSE8gHRzoV8Fsf1ALrETXcG8EUet59oI8mO\nAOD93pSPjZJshNibYZKZvVjIvkhBlX0+RD0X8lkc5wPoSbI7ycYALgUwI4/bTzQDwAgvHgFgeq43\nSJIAngKw1MweLGRfpODKOh+KIhfMLG8/AAYBWA7gUwB35HG7zwHYAGAfYp/YVwFog9i3YSu8363z\n0I+zEBs6LQKw0PsZVIi+6KfwP+WcD8WQC7p8UETEQVfIiIg4qDiKiDioOIqIOKg4iog4qDiKiDio\nOIqIOKg4iog4qDiKiDioOIqIOKg4iog4qDiKiDioOIqIOJR1cSQ5geS9OVz/qyRHpG4pUljKhQNF\nqjiSXENyI8lmcfOuJjm7gN3KmJmdb2YTU7dMH8luJI3kjrifO7O5DSk85UJ6SDYlOY7kFpLbSc7J\n1rojVRw9DQGMKnQnDpX3NLl8amVmzb2fMXnetuSHciG18QBaA+jj/b4xWyuOYnH8I4CbSbZKXBC3\n19Qwbt5skld78S9IziX5EMmvSa4i+Xfe/HXes3oTd+3bes/HrSH5Nsmucevu7S3b6j1f+JK4ZRNI\nPk7yFZLfAviho7/xfTvGW/9271Pu+Xr/paTUKRcOgmQvAD8BMNLMNptZrZl9mMm6XKJYHKsBzAZw\nc4av74/Y3YXbAJiM2CMv+wE4BsBlAB5l7KE+3xkOYAyAtojdjXgSAHjDmZneOtoDGApgHMnj4147\nDMDvARwO4N0U/RoD4A0AVYg9L+SRZA1JLiI5LMX61pJcT/IZkm1TtJXipFw4eC70B7AWwD97RfZv\nJH+WYttpi2JxBIDfAbieZLsMXrvazJ4xs1oAzyP2EKN7zGyPmb0BYC9ib47vvGxmc8xsD4A7AJxB\nsguAwQDWeOvab7Fn7E4D8PO41043s7lmVmdmu1P0ax+ArgCONLPdZpb0DWRmJ5rZ5CSLtyD2Bu8K\n4FTE3oyTUmxbipdyIXkudAbwPQDbARwJ4DoAE0n2SbH9tESyOJrZYgB/BjA6g5dvjIt3eetLnBf/\naek/O9jMdiD2sPMjEfvP6+8NSb4m+TVin6wdXK9Nw62IPY7zLySXkLzyEF7rM7MdZlbtvUk3IvaG\nOJdki0zWJ9GmXDioXYgV2nvNbK+ZvQ3gLQDnZri+kIapmxTMXQAWAHggbt633u+mAL57xm38f1Am\n/MdjekOM1og9InMdgLfNbOBBXpv2A3jM7EsA13jbOQvAmyTnmNnKjHp9YB9cz0GW0qBccFt0iO0P\nSST3HAHA+0M9D+A3cfM2A/gcwGUkK7xPnB713NQgkmcx9njMMQDmmdk6xD6tjyV5OclG3k+/THfZ\nSV5MsrM3uQ2xN1NtBuvpT7IXyQYk2wB4GMBsM9ueSb8k+pQLSc0B8BmA35JsSPJMAOcAeD2TfiWK\nbHH03AOgWcK8awDcAuArAMcDeK+e25iM2CfzVsSO4Q0HADOrQWz3/FLEPj2/BHAfgCYZbqcfgHkk\ndyD2bN5RZrba1dAbagxPsp6jAbwGoAbAYgB7EDtALqVNuZDAzPYBGILYI123A3gSwBVm9kmG/Qpv\nW49mFRE5UNT3HEVECkLFUUTEoV7FkeR53tnyK0lmcqqBSMlQPpSWjI85Mnb95HIAAwGsBzAfwFAz\n+zh73RMpDsqH0lOf8xxPA7DSzFYBAMkpiH1zlPTN0JhNrPKAL9wkH2qwbYuZZXKVhaTnkPJBuVA4\n6eZCfYpjJ4TPil+P2LWOSVWiGfpzQD02KZl606auLXQfStwh5YNyoXDSzYX6FEfXFRkHjNFJjgQw\nEgAq0bQemxOJtJT5oFwoLvX5QmY94i43Quwi8C8SG5nZeDPra2Z9G2V8zqhI5KXMB+VCcalPcZwP\noCfJ7t7lRpcidra7SDlSPpSYjIfVZraf5HWIXcdYAeBpM1uStZ6JFBHlQ+mp1115zOwVAK9kqS8i\nRU35UFp0hYyIiIOKo4iIg4qjiIhDlO8ELiJFZs8F/ULTp94TPAzwgY4L/HjWrvDTW/+1xwm57VgG\ntOcoIuKg4igi4qDiKCLioGOOCb666gw/nnjng37cvWH4GEnTBo2DZdNHhpb1uX2FH9du25btLopE\nSu0PT/Hj+GOMADC2w3w/3lkXPENr1PgbQu061fvxN9mnPUcREQcVRxERBw2rG4SHy22Hf+bHP3nx\nRj/u9YdPQ+0+v6ynH1975czQsifGnuPHx/7TfIiUksTTdf706KN+fELjRqFlL+1s6ce/e+IKP+70\nQPSG0Ym05ygi4qDiKCLiUPbD6k3Xhu9kX90rGCL8eMzVfly7eXOoXYeHgum3J3cPLevdrsaP67LS\nS5H8Y6PgjIyKDu39+L64YTQQHkpP+ObI0LIn773QjztOiv5QOp72HEVEHFQcRUQcVBxFRBzK/phj\nh3e3Jl22ZlDwEKQebyVfR+3GTeEZidMiReibi4IrX955cJwf1yWUjf0Irnx55s4hoWUtp36Qo97l\nnvYcRUQcVBxFRBzKflj9xYDWoelP9u3x42PHb/TjWoiUtopWLUPTvW5M7+GJJz8+yo+7TC2u03UO\nRnuOIiIOKo4iIg4qjiIiDmV/zHFXOwtNz93Vw49rV6zKd3dECmblbceFpqd3ib9MkH508cpBoXbd\nnljmx6V0bD7lniPJp0luIrk4bl5rkjNJrvB+V+W2myLRoHwoH+kMqycAOC9h3mgAs8ysJ4BZ3rRI\nOZgA5UNZSDmsNrM5JLslzB4C4BwvnghgNoDbstgvkUgqtXz46prgmUkLLn8oYWlwt53l+3b78Z7B\nO0Ot6mpqUIoy/ULmCDPbAADe7/Yp2ouUMuVDCcr5FzIkRwIYCQCVaJrrzYlElnKhuGRaHDeS7Ghm\nG0h2BJD0TgtmNh7AeABowdaWrJ1IEUsrH6KYC1u/F3TjMDYOLYsfSl97ffAo1cqav+S+YxGQ6bB6\nBoARXjwCwPTsdEekKCkfSlA6p/I8B+B9AL1Irid5FYCxAAaSXAFgoDctUvKUD+UjnW+rhyZZNCDL\nfRGJPOVD+Sj7K2REys3OnwYPlZt14f1+XIfDQu3e/LaPH1e+VB7HGePp2moREQcVRxERBw2rE5xa\nucaPXzz5h35sH6V340+RqKv4ZXAT584Ng6F0XcJT1iet7efHLbEyo22xYVyJqahI3rAuOKXI9u3N\naFvZpj1HEREHFUcREYeyH1Z3fXV3aPqkEcGf5O5pz/rx5K2nh9rNe7ivH7d69v0c9U6k/tikSWi6\nR4stftwg7j6NZy4cFmpXdcEK5/oqqsJ3ZPv2rJ5+/MUPwkPniwYGuXFv+3nO7QLASztb+PG/X/yT\n0LK6hR87+5Fr2nMUEXFQcRQRcVBxFBFxKPtjjo2Wfx6aHrp6oB83bhA8EaNtkx2hdi/9Priy4MfN\nbwktaz+udJ7dK8Vvx+CTQtNPdBnnx/En79RObxtq1/DofX687N5Wfvwvff8r1O6nzd7048RjiXVI\n7+ZDFzTd7sd33R1+TYcL01pF1mnPUUTEQcVRRMSh7IfVtRvD9yXdfpa73bauXULTv5n89358/03/\nHlp2/7sX+3Hdok/q2UOR+tk69Nu02tVWhofEWx8LTstZeuJTSV933edB0nz45ElJ2zX86WY/nvv9\nF5K2u6n3zND0JHRO2jaXtOcoIuKg4igi4qDiKCLiUPbHHNO1f+260PRXt5/sxyf8v29Cy9bcGTzv\n96iLIVJQzQ/bE5qOP91m5q7grjyd/nNVqB0vovM192/tFWr32dnBqTdtdie/lHZ1j+AZ2fh+eFn8\n+u9+/eehZT3xQdJ15pL2HEVEHFQcRUQcNKzOUIO3P/Lji5ZcHlp2dtfgxqBr8tUhkSSu6/FWaDr+\nqpUnPj/Hj/dv+DLUruKnwd13zhz0Kz9usj18U9zK3cmfL/PVVcFQ+vGLxydtN2tXcOegY6bsStou\nn7TnKCLioOIoIuKgYXUWVDwWvmAfd24oTEdEHMbdG/72d/h9wY0nJvd4yY/PGTEq1K5qYvDNc8tJ\n6X1jXHdW+AqZY65cFqy/MriRxS4LPyfmT+sG+3HFovDzasKD+PzRnqOIiEPK4kiyC8m3SC4luYTk\nKG9+a5IzSa7wflelWpdIMVMulJd09hz3A7jJzPoAOB3Ar0keB2A0gFlm1hPALG9apJQpF8pIymOO\nZrYBwAYvriG5FEAnAEMAnOM1mwhgNoDbctLLiNtygg7dloNizYWqqQtD09de/wM/fqLzO3780F2P\nhdrd/emVftzg3WAd+87tG2q3ZnBw956nL3gytOwHlfv9OP4UolP+48ZQu+6/jd5D6g7pmCPJbgBO\nBjAPwBHem+W7N037JK8ZSbKaZPU+7HE1ESk6yoXSl3ZxJNkcwDQAN5jZN6naf8fMxptZXzPr2whN\nUr9AJOKUC+UhrfEgyUaIvRkmmdmL3uyNJDua2QaSHQFsSr6G0lNxfHDx/QsjHwgtGzzrej8+FtV5\n65PkXjHmQt3u8LPZFz0aXLUy7c7FfvyzZttC7aZOedyPd9YFz1M6vEH4tJ7D2Djptp/cHtwk+r73\nzvfjPvcvC7WrRfSk8201ATwFYKmZPRi3aAaAEV48AsD07HdPJDqUC+UlnT3HMwFcDuBvJL87Kns7\ngLEAXiB5FYDPAOjmXFLqlAtlJJ1vq98FEp63GBiQ3e6IRJdyobyU5Tkoy8ed5se9eoefW73luaP8\nuM2TwekFe8/rF2o38F/n+PGvlw8NLet17SI/Tu+pvSL50+o/gvf1hHk/8uOxA8JfsjccvMWP5540\nxY8HLvlZqF2D+xMun43T5P3g2OKxNcHx9ygeY0ykywdFRBxUHEVEHMpyWN10XfDPbtAnPPB9/+5H\n/XjPXcFdRB7bFn7274SpA/34qDHzQsusrhgGDSJA7fJP/bhdXAwACM7kwWCc6sdNDriFc+J0oFB3\n1MkG7TmKiDioOIqIOJTlsLrzH97z49o/hJfFDx8O5ii8l7qRiBQt7TmKiDioOIqIOKg4iog4qDiK\niDioOIqIOKg4iog4qDiKiDioOIqIOKg4iog4qDiKiDioOIqIOKg4iog4qDiKiDjQLH9POSG5GcBa\nAG0BbEnRPNei0Acgf/3oambt8rAdSUPEcgEor36klQt5LY7+RslqM+ub9w1HrA9R6ocURlT+/9WP\nA2lYLSLioOIoIuJQqOI4vkDbjReFPgDR6YcURlT+/9WPBAU55igiEnUaVouIOKg4iog45LU4kjyP\n5DKSK0mOzuN2nya5ieTiuHmtSc4kucL7XZWHfnQh+RbJpSSXkBxVqL5I4ZVzPhRDLuStOJKsAPAY\ngPMBHAdgKMnj8rT5CQDOS5g3GsAsM+sJYJY3nWv7AdxkZn0AnA7g197foBB9kQJSPkQ/F/K553ga\ngJVmtsrM9gKYAmBIPjZsZnMAbE2YPQTARC+eCODCPPRjg5kt8OIaAEsBdCpEX6TgyjofiiEX8lkc\nOwFYFze93ptXKEeY2QYg9h8FoH0+N06yG4CTAcwrdF+kIJQPnqjmQj6LIx3zyvI8IpLNAUwDcIOZ\nfVPo/khBKB8Q7VzIZ3FcD6B195thAAAAyElEQVRL3HRnAF/kcfuJNpLsCADe70352CjJRoi9GSaZ\n2YuF7IsUVNnnQ9RzIZ/FcT6AniS7k2wM4FIAM/K4/UQzAIzw4hEApud6gyQJ4CkAS83swUL2RQqu\nrPOhKHLBzPL2A2AQgOUAPgVwRx63+xyADQD2IfaJfRWANoh9G7bC+906D/04C7Gh0yIAC72fQYXo\ni34K/1PO+VAMuaDLB0VEHHSFjIiIg4qjiIiDiqOIiIOKo4iIg4qjiIiDiqOIiIOKo4iIw/8HYsb3\nZPbsoDQAAAAASUVORK5CYII=\n", | |
"text/plain": [ | |
"<matplotlib.figure.Figure at 0x7f688e7c0400>" | |
] | |
}, | |
"metadata": {}, | |
"output_type": "display_data" | |
} | |
], | |
"source": [ | |
"# We now plot each of the images using matplotlib\n", | |
"plt.subplot(2, 2, 1)\n", | |
"plt.imshow(img_1)\n", | |
"plt.title('Number is : '+str(np.argmax(label_1)))\n", | |
"\n", | |
"plt.subplot(2, 2, 2)\n", | |
"plt.imshow(img_2)\n", | |
"plt.title('Number is : '+str(np.argmax(label_2)))\n", | |
"\n", | |
"plt.subplot(2, 2, 3)\n", | |
"plt.imshow(img_3)\n", | |
"plt.title('Number is : '+str(np.argmax(label_3)))\n", | |
"\n", | |
"plt.subplot(2, 2, 4)\n", | |
"plt.imshow(img_4)\n", | |
"plt.title('Number is : '+str(np.argmax(label_4)))\n", | |
"\n", | |
"plt.tight_layout()\n", | |
"plt.show()" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"collapsed": true | |
}, | |
"source": [ | |
"# Let's Start the training of the network and check how accurate it performs" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 14, | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": [ | |
"At Epoch 0 -> Accuracy is : 0.08 and Loss is 29.18\n", | |
"At Epoch 1000 -> Accuracy is : 0.92 and Loss is 0.33\n", | |
"At Epoch 2000 -> Accuracy is : 0.94 and Loss is 0.37\n", | |
"At Epoch 3000 -> Accuracy is : 0.92 and Loss is 0.46\n", | |
"At Epoch 4000 -> Accuracy is : 0.82 and Loss is 0.77\n", | |
"At Epoch 5000 -> Accuracy is : 0.92 and Loss is 0.21\n", | |
"At Epoch 6000 -> Accuracy is : 0.94 and Loss is 0.26\n", | |
"At Epoch 7000 -> Accuracy is : 0.92 and Loss is 0.15\n", | |
"At Epoch 8000 -> Accuracy is : 0.96 and Loss is 0.10\n", | |
"At Epoch 9000 -> Accuracy is : 0.90 and Loss is 0.27\n", | |
"\n", | |
"\n", | |
"Final Accuracy Percent is : 93.51999759674072\n" | |
] | |
} | |
], | |
"source": [ | |
"with tf.Session() as sess:\n", | |
" # Initilize the values to variable\n", | |
" sess.run(tf.global_variables_initializer())\n", | |
" for i in range(10000):\n", | |
" batch_x, batch_y = data.train.next_batch(batch_size=50)\n", | |
" if i % 1000 == 0:\n", | |
" acc, los = sess.run([accuracy, loss], feed_dict={\n", | |
" x: batch_x, y: batch_y})\n", | |
" print('At Epoch %d -> Accuracy is : %.2f and Loss is %.2f' %\n", | |
" (i, acc, los))\n", | |
" sess.run(trainer, feed_dict={x: batch_x, y: batch_y})\n", | |
" # In the End Test Really\n", | |
" print('\\n\\nFinal Accuracy Percent is : ', sess.run(accuracy, feed_dict={x:data.test.images, y: data.test.labels})*100)" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"collapsed": true | |
}, | |
"outputs": [], | |
"source": [] | |
} | |
], | |
"metadata": { | |
"kernelspec": { | |
"display_name": "Python 3", | |
"language": "python", | |
"name": "python3" | |
}, | |
"language_info": { | |
"codemirror_mode": { | |
"name": "ipython", | |
"version": 3 | |
}, | |
"file_extension": ".py", | |
"mimetype": "text/x-python", | |
"name": "python", | |
"nbconvert_exporter": "python", | |
"pygments_lexer": "ipython3", | |
"version": "3.6.3" | |
} | |
}, | |
"nbformat": 4, | |
"nbformat_minor": 2 | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment