Skip to content

Instantly share code, notes, and snippets.

@nikogamulin
Last active June 24, 2019 10:45
Show Gist options
  • Save nikogamulin/c0892e33ff823e1cb45caaaf5a949fd1 to your computer and use it in GitHub Desktop.
Save nikogamulin/c0892e33ff823e1cb45caaaf5a949fd1 to your computer and use it in GitHub Desktop.
deeplearning-ai/mnist-vgg.ipynb
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"metadata": {},
"cell_type": "markdown",
"source": "# TensorFlow in Practice Specialization\n\n## Course 1: Introduction to TensorFlow for AI, ML and DL\n\n### Modification of fashion-mnist example: Instead of defining a custom neural net architecture, a VGG16 backbone has been used. As VGG pretrained model expects RGB images of size at least 32 x 32 pixels, the inputs are reshaped"
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "import tensorflow as tf\nimport numpy as np\nfrom tensorflow import keras\n\nfrom keras.layers import Input, Lambda, Dense, Flatten\nfrom keras.models import Model\nfrom keras.applications.vgg16 import VGG16\nfrom keras.applications.vgg16 import preprocess_input\nfrom keras.preprocessing import image\nfrom keras.preprocessing.image import ImageDataGenerator\n\nfrom sklearn.metrics import confusion_matrix\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom glob import glob\n\nimport cv2\n\nfrom itertools import chain",
"execution_count": 2,
"outputs": [
{
"output_type": "stream",
"text": "Using TensorFlow backend.\n",
"name": "stderr"
}
]
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "import matplotlib.pyplot as plt",
"execution_count": 3,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "print(tf.__version__)",
"execution_count": 4,
"outputs": [
{
"output_type": "stream",
"text": "1.13.1\n",
"name": "stdout"
}
]
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "dim = (32, 32)",
"execution_count": 5,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "def to_rgb(img):\n img = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)\n img_rgb = np.asarray(np.dstack((img, img, img)), dtype=np.uint8)\n return img_rgb",
"execution_count": 6,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "mnist = tf.keras.datasets.fashion_mnist",
"execution_count": 7,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "(training_images, training_labels), (test_images, test_labels) = mnist.load_data()",
"execution_count": 8,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "training_images_rgb = []\nfor i in range(len(training_images)):\n rgb = to_rgb(training_images[i])\n training_images_rgb.append(rgb)",
"execution_count": 9,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "test_images_rgb = []\nfor i in range(len(test_images)):\n rgb = to_rgb(test_images[i])\n test_images_rgb.append(rgb)",
"execution_count": 10,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "plt.imshow(training_images_rgb[0])\n# print(training_images_rgb[0])\nprint(training_labels[0])",
"execution_count": 11,
"outputs": [
{
"output_type": "stream",
"text": "9\n",
"name": "stdout"
},
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAFM9JREFUeJzt3X+M1eWVx/H3KTCAzCgIKx1xEO3SUKSClhK2EuPStHGtqW3cmJps4x+mNJva2KT7h9Fk6+5frdm26T/tZlxt7catdfsj8kfj1tWmbJPGipUqytJCqynIzwIOKCMCZ/+4XzYDvefMne/cH8DzeSWEO8+5z/0+85175t77PfM8j7k7IlKed/V6ACLSG0p+kUIp+UUKpeQXKZSSX6RQSn6RQin5RQql5BcplJJfpFBTJ9PZzG4EvgFMAf7N3b88zv3154QiHebu1sr9rO6f95rZFOC3wEeAHcBzwO3u/krSR8kv0mGtJv9k3vavAra5++/d/RjwGHDLJB5PRLpoMsm/APjjmK93VG0icg6Y1Gf+VpjZOmBdp48jIhMzmeTfCQyN+fqyqu007j4MDIM+84ucTSbztv85YLGZXWFmfcCngPXtGZaIdFrtV353P25mdwH/RaPU97C7v9y2kYlIR9Uu9dU6mN72i3RcN0p9InIOU/KLFErJL1IoJb9IoZT8IoVS8osUSskvUiglv0ihlPwihVLyixRKyS9SqI7P55fzV19fXxi79NJLm7aPjIyEfQ4cOFBrHGbxn7JHsZMnT9Y6VidEY+z0vBu98osUSskvUiglv0ihlPwihVLyixRKyS9SKJX6CpGVw7KS0kUXXRTGrrrqqjA2NDTUtP21114L+2Sx0dHRMHbo0KEw1m51yorQ/rJdO8qDeuUXKZSSX6RQSn6RQin5RQql5BcplJJfpFCTKvWZ2avAYeAEcNzdV7ZjUFJfVAJ617vi3/MnTpwIY0uWLAljt956axibOrX5U2vevHlhn8HBwTCWzQbctm1bGNu9e3fT9rfffjvsU7ecVzdWR1TSm0iprx11/r929/1teBwR6SK97Rcp1GST34GfmtnzZrauHQMSke6Y7Nv+Ne6+08wuAZ4ys/919w1j71D9UtAvBpGzzKRe+d19Z/X/XuDHwKom9xl295W6GChydqmd/GY2y8wGTt0GPgpsbtfARKSzJvO2fz7w46qEMRX4D3d/si2jkrarW2paterP3sz9vw9+8INh7NixY03b+/v7wz6LFi0KY1k5csWKFWHsySebPyV37twZ9onGDvDOO++EsSlTpoSxWbNmhbHp06c3bX/rrbfCPlnps1W1k9/dfw8sn/QIRKQnVOoTKZSSX6RQSn6RQin5RQql5BcplBbwPM9Es7qOHz9e6/GuvvrqMLZw4cIwdvTo0abt2ezCqA/kpbmlS5eGsWj8GzduDPts3bo1jG3fvj2MzZ07d8LjABgYGGja/sILL4R9otiRI0fCPmfSK79IoZT8IoVS8osUSskvUiglv0ihrN3bCKUHM+vewQoVTeCZOXNm2Oe6664LYw888EAYu/DCC8PYyZMnJ9QO+fpzb7zxRhh7+eWXw1i0BVg2eSebBDVnzpwwFq1bCLBr164wFo3l/e9/f9jn0Ucfbdr+3HPPMTIy0tIsLr3yixRKyS9SKCW/SKGU/CKFUvKLFErJL1Iolfp6qN1bOGWyrbAefPDBMLZ8ebxSWzZJJ/reslJfNvkoWzsv23rrzTffnPA4su2/Xn/99QkfC+CSSy4JY1dddVXT9myMH/vYx5q2j46OcvLkSZX6RCSm5BcplJJfpFBKfpFCKflFCqXkFynUuGv4mdnDwM3AXndfVrVdDHwfWAS8Ctzm7gc7N8zzU5fLrGEs2i4K8jFma+5FW1dlW1pNmzat7bEZM2Y0bc/KaNkMyCuvvDKMZaXK2bNnT/h4v/zlL8M+2blvVSuv/N8Bbjyj7R7gaXdfDDxdfS0i55Bxk9/dNwAHzmi+BXikuv0I8Ik2j0tEOqzuZ/757n5qdYLdNHbsFZFzyKTX7Xd3z/5s18zWAesmexwRaa+6r/x7zGwQoPp/b3RHdx9295XuvrLmsUSkA+om/3rgjur2HcAT7RmOiHRLK6W+7wE3APPMbAfwJeDLwONmdifwGnBbJwcpnVV3duGJEyfCWFT2yvpki2pmLrjggjCWlfQiUXkQoL+/f8KPB/kYs3PSSeMmv7vfHoQ+3OaxiEgX6S/8RAql5BcplJJfpFBKfpFCKflFCjXpv/CTzmj34p7ZzL2LLroojNXdWy/q99Zbb4V9Dh6MJ4Zm52NoaCiMRYt7ZjMBs5mHWVku6zc6OhrGosVJs30BowVBDxw4cxpOTK/8IoVS8osUSskvUiglv0ihlPwihVLyixRKpb4eyspXdUt90Yy0hQsXhn2yfeSymXZZGTDax6/u95WV0fr6+sJYVOrL9hnMyoDZnoHZGLMyZrT/X1ZWvPzyy5u2HzlyJOxzJr3yixRKyS9SKCW/SKGU/CKFUvKLFEpX+3somxiTXY3ORNtCXXvttWGfbALJ/v37w1g2xqgSkK2PN3fu3FrHyrbJiq7c11nbD2Dq1Dhlsu/t8OHDYeyVV15p2p59z1HVYSJbwOmVX6RQSn6RQin5RQql5BcplJJfpFBKfpFCtbJd18PAzcBed19Wtd0PfAbYV93tXnf/SacGeb7KJoLUWR8P4jXy/vCHP4R9oskvkE9yyUpR0fiz9QKzx8vW/svKaFFpLvu+Zs6cWWscb7zxRhibN29eGFu8eHHT9m9/+9thn02bNoWxVrXyyv8d4MYm7V939xXVPyW+yDlm3OR39w1A60uCisg5YTKf+e8ysxfN7GEzi/9ETETOSnWT/1vAe4AVwC7gq9EdzWydmW00s401jyUiHVAr+d19j7ufcPeTwIPAquS+w+6+0t1X1h2kiLRfreQ3s8ExX34S2Nye4YhIt7RS6vsecAMwz8x2AF8CbjCzFYADrwKf7eAYw3XfstJQFqu7jlw0e6zuDLFsjba6Vq5s/gZreHg47JOVr44ePRrGsnJZncfLZD/PbA2/bMZfnT51f2ZZqS86/1kJsx3GTX53v71J80MdGIuIdJH+wk+kUEp+kUIp+UUKpeQXKZSSX6RQZ80CnnVmiGVll7rlt8xEFkdsxXvf+94wtnr16jAWlfMAli1b1rQ9K7EdOnQojGULVtaZlVh3+6/sWFnJcfr06WEsks1yzJ5z2bnKnjsDAwNN29esWRP2eeaZZ8JYq/TKL1IoJb9IoZT8IoVS8osUSskvUiglv0ihzppSX1YKqVNiy8o//f39YSzbt+7d7373hPt84AMfCGNLliyZ8LEgXwQz+t6yEtsFF1wQxrIyYLRfHMSluaz0lpXzspmYWWz79u1N27NFOqMFNSEv52XnIzNr1qym7VHZtl30yi9SKCW/SKGU/CKFUvKLFErJL1Kos+Zq/4IFC8LYhz70oabtl112Wdgnu6KfxbKrwLNnz27ank1KysaYreuWXfnOJsBEV/VHR0fDPtkV/ezK9549e8JYNP5svb2s6hBdEYd8sk00aSZ7DmRr+GVVk2wc2c8zOidDQ0Nhn3bQK79IoZT8IoVS8osUSskvUiglv0ihlPwihWplu64h4LvAfBrbcw27+zfM7GLg+8AiGlt23ebuB1t4vKbtN910U9jn4x//eNP2hQsXhn1mzJgRxrKyS1bmicaeldGyyR51Y1lpsc4WWj//+c/DWFTehHyCUSSbhJOdx2xLsTfffDOMRWW0upOZsudHdu6zEnL0fMwmEbVDK6/8x4EvuvtSYDXwOTNbCtwDPO3ui4Gnq69F5BwxbvK7+y53/3V1+zCwBVgA3AI8Ut3tEeATnRqkiLTfhD7zm9ki4BrgWWC+u++qQrtpfCwQkXNEyx8qzKwf+CHwBXcfGfvZzd3dzJquuGFm64B1kx2oiLRXS6/8ZjaNRuI/6u4/qpr3mNlgFR8E9jbr6+7D7r7S3eOdJkSk68ZNfmu8xD8EbHH3r40JrQfuqG7fATzR/uGJSKe08rb/OuDTwEtmtqlquxf4MvC4md0JvAbcNt4D9fX1hWvT3XrrrWG/RYsWNW3PSnaZuusFRrGsbJTNwMu2hcpmj2XlsqjEWXd9vPXr14extWvXhrGoTJWVyrLyZlYGzGbTzZ/f/FJUdj7qzs6rs+UcxN939j1HP+fsOXWmcZPf3X8BRM+OD7d8JBE5q+gv/EQKpeQXKZSSX6RQSn6RQin5RQrV1QU8BwYGuOGGG5rGBgcHw35Hjx5t2p6VZLJZfdmWUdnsq6gklj1eVgbcv39/rVhWzonOVVaiuvvuu8PY5z//+TC2bdu2MBaVdLNZglFZDvKFRLPZnZGslJrNpssWIK1b6ovKwdmxLr300qbtO3bsCPucSa/8IoVS8osUSskvUiglv0ihlPwihVLyixSqq6W+48eP86c//alpbPfu3WG/qGyUlU9GRkbCWFYizEpz0QKNF154YdgnWwwy2+sumw2YlSOjhS6z2XRZ7Jvf/GYY27p1axibM2dO0/a5c+eGfbIFMLMZbkeOHAlj0feWnd+6s/oy2XM1KiFnpb4rrriiafu+fftaHpNe+UUKpeQXKZSSX6RQSn6RQin5RQrV1av9x44dY+fOnU1j2fptE7mCecrAwEAYy66WHz58OIxFV9Kz8dW9Opxd6c1i0YSmbLJKNiElm2AUXXGGeOLMwYPxjm7ZtlvZ1f5s/NHxsudbdrU/65ed46wiFMWyKsaKFSuatm/evDnscya98osUSskvUiglv0ihlPwihVLyixRKyS9SqHFLfWY2BHyXxhbcDgy7+zfM7H7gM8CpOte97v6T7LFGR0fZsmVL09gzzzwT9rv55pubts+aNSvsk5XssrJRVsqJ1urLJqRkZbmsX1Y2ykQltqx8lU06yc7VgQMHwlh0vLrjyGT9oola2Rp+WYktKvdCvu1Z9n1Hk4yy50BUgs0maf3Z47dwn+PAF93912Y2ADxvZk9Vsa+7+7+0fDQROWu0slffLmBXdfuwmW0BFnR6YCLSWRP6zG9mi4BrgGerprvM7EUze9jMmk/gFpGzUsvJb2b9wA+BL7j7CPAt4D3AChrvDL4a9FtnZhvNbGPdz3Qi0n4tJb+ZTaOR+I+6+48A3H2Pu59w95PAg8CqZn3dfdjdV7r7yuyCiIh017jJb42MfQjY4u5fG9M+doudTwKtzygQkZ6z8d6Km9ka4H+Al4BTNYl7gdtpvOV34FXgs9XFweyxwoNlWy5df/31TdvXrl0b9sm2d8pmgWUlwmibrOzxsll9dUt92fGid1fZu64668uNN47o+87OR/Z4dUWPmZUps3JZtvZfdq6ymaTR8yBbI/G+++5r2r5v3z6OHTvW0lvsVq72/wJo9mBpTV9Ezm76Cz+RQin5RQql5BcplJJfpFBKfpFCjVvqa+vBzDwqh2TjmD9/ftP2rJz3vve9L4wtX748jF1zzTVhLCrJRLP9IC9fZeW8uiWx6Dxm5zdbODMrX2X9ouNlpbK6Jcc6spJuNrMzO/d79+4NY9nWbFFs06ZNYZ8NGzaEMXdv6WTplV+kUEp+kUIp+UUKpeQXKZSSX6RQSn6RQp0Tpb46Zs+eHcaWLVsWxlavXh3GogVDBwcHm7ZDPpsrK3tlC39mjh492rQ9W7By9+7dtY41MjISxqIFKzsxYy5Tp+SYlfoyWTnv9ddfn3C/uj8XlfpEJKXkFymUkl+kUEp+kUIp+UUKpeQXKVTXS31dO1gXDQ0NhbH+/v4wlpXfZsyYEcayn1lUfosWH4XGoo9y/lCpT0RSSn6RQin5RQql5BcplJJfpFCtbNc1A9gATKexw88P3P1LZnYF8BgwF3ge+LS7x5evOX+v9oucTdp5tf9tYK27L6exN9+NZrYa+ArwdXf/S+AgcGfdwYpI942b/N5wpPpyWvXPgbXAD6r2R4BPdGSEItIRLX3mN7MpZrYJ2As8BWwHDrn7qcnZO4AFnRmiiHRCS8nv7ifcfQVwGbAKWNLqAcxsnZltNLONNccoIh0woav97n4I+BnwV8BsMzu168RlwM6gz7C7r3T3lZMaqYi01bjJb2Z/YWazq9szgY8AW2j8Evjb6m53AE90apAi0n6tlPqupnFBbwqNXxaPu/s/m9mVNEp9FwMvAH/n7vHsEVTqE+mGVkt9mtUncp7RrD4RSSn5RQql5BcplJJfpFBKfpFCTR3/Lm21H3ituj2v+rrXNI7TaRynO9fGcXmrD9jVUt9pBzbbeDb81Z/GoXGUOg697RcplJJfpFC9TP7hHh57LI3jdBrH6c7bcfTsM7+I9Jbe9osUqifJb2Y3mtlWM9tmZvf0YgzVOF41s5fMbFM3Fxsxs4fNbK+ZbR7TdrGZPWVmv6v+n9OjcdxvZjurc7LJzG7qwjiGzOxnZvaKmb1sZndX7V09J8k4unpOzGyGmf3KzH5TjeOfqvYrzOzZKm++b2Z9kzqQu3f1H42pwduBK4E+4DfA0m6PoxrLq8C8Hhz3euBaYPOYtgeAe6rb9wBf6dE47gf+ocvnYxC4tro9APwWWNrtc5KMo6vnBDCgv7o9DXgWWA08Dnyqav9X4O8nc5xevPKvAra5+++9sdT3Y8AtPRhHz7j7BuDAGc230Fg3Abq0IGowjq5z913u/uvq9mEai8UsoMvnJBlHV3lDxxfN7UXyLwD+OObrXi7+6cBPzex5M1vXozGcMt/dd1W3dwPzeziWu8zsxepjQcc/foxlZouAa2i82vXsnJwxDujyOenGormlX/Bb4+7XAn8DfM7Mru/1gKDxm5/GL6Ze+BbwHhp7NOwCvtqtA5tZP/BD4Avuftpe4908J03G0fVz4pNYNLdVvUj+ncDYDe3DxT87zd13Vv/vBX5M4yT3yh4zGwSo/t/bi0G4+57qiXcSeJAunRMzm0Yj4R519x9VzV0/J83G0atzUh17wovmtqoXyf8csLi6ctkHfApY3+1BmNksMxs4dRv4KLA579VR62kshAo9XBD1VLJVPkkXzomZGfAQsMXdvzYm1NVzEo2j2+eka4vmdusK5hlXM2+icSV1O3Bfj8ZwJY1Kw2+Al7s5DuB7NN4+vkPjs9udNPY8fBr4HfDfwMU9Gse/Ay8BL9JIvsEujGMNjbf0LwKbqn83dfucJOPo6jkBrqaxKO6LNH7R/OOY5+yvgG3AfwLTJ3Mc/YWfSKFKv+AnUiwlv0ihlPwihVLyixRKyS9SKCW/SKGU/CKFUvKLFOr/APJl1q8XgNhlAAAAAElFTkSuQmCC\n",
"text/plain": "<Figure size 432x288 with 1 Axes>"
},
"metadata": {
"needs_background": "light"
}
}
]
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "training_rgb_arr = np.stack([training_images_rgb],axis=4)\ntraining_rgb_arr_to_3d = np.squeeze(training_rgb_arr, axis=4)",
"execution_count": 12,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "test_rgb_arr = np.stack([test_images_rgb],axis=4)\ntest_rgb_arr_to_3d = np.squeeze(test_rgb_arr, axis=4)",
"execution_count": 13,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "training_rgb_arr_to_3d = training_rgb_arr_to_3d/255.0\ntest_rgb_arr_to_3d = test_rgb_arr_to_3d/255.0",
"execution_count": 14,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "# add preprocessing layer to the front of VGG\nvgg = VGG16(input_shape=tuple(list(dim) + [3]), weights='imagenet', include_top=False)\n\n# don't train existing weights\nfor layer in vgg.layers:\n layer.trainable = False\n\n# our layers - you can add more if you want\nx = Flatten()(vgg.output)\n# x = Dense(1000, activation='relu')(x)\nprediction = Dense(10, activation='softmax')(x)",
"execution_count": 15,
"outputs": [
{
"output_type": "stream",
"text": "WARNING:tensorflow:From /home/niko/workspace/advanced-computer-vision-course/venv_python3/lib/python3.5/site-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\nInstructions for updating:\nColocations handled automatically by placer.\n",
"name": "stdout"
}
]
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "# create a model object\nmodel = Model(inputs=vgg.input, outputs=prediction)\n\n# view the structure of the model\nmodel.summary()",
"execution_count": 16,
"outputs": [
{
"output_type": "stream",
"text": "_________________________________________________________________\nLayer (type) Output Shape Param # \n=================================================================\ninput_1 (InputLayer) (None, 32, 32, 3) 0 \n_________________________________________________________________\nblock1_conv1 (Conv2D) (None, 32, 32, 64) 1792 \n_________________________________________________________________\nblock1_conv2 (Conv2D) (None, 32, 32, 64) 36928 \n_________________________________________________________________\nblock1_pool (MaxPooling2D) (None, 16, 16, 64) 0 \n_________________________________________________________________\nblock2_conv1 (Conv2D) (None, 16, 16, 128) 73856 \n_________________________________________________________________\nblock2_conv2 (Conv2D) (None, 16, 16, 128) 147584 \n_________________________________________________________________\nblock2_pool (MaxPooling2D) (None, 8, 8, 128) 0 \n_________________________________________________________________\nblock3_conv1 (Conv2D) (None, 8, 8, 256) 295168 \n_________________________________________________________________\nblock3_conv2 (Conv2D) (None, 8, 8, 256) 590080 \n_________________________________________________________________\nblock3_conv3 (Conv2D) (None, 8, 8, 256) 590080 \n_________________________________________________________________\nblock3_pool (MaxPooling2D) (None, 4, 4, 256) 0 \n_________________________________________________________________\nblock4_conv1 (Conv2D) (None, 4, 4, 512) 1180160 \n_________________________________________________________________\nblock4_conv2 (Conv2D) (None, 4, 4, 512) 2359808 \n_________________________________________________________________\nblock4_conv3 (Conv2D) (None, 4, 4, 512) 2359808 \n_________________________________________________________________\nblock4_pool (MaxPooling2D) (None, 2, 2, 512) 0 \n_________________________________________________________________\nblock5_conv1 (Conv2D) (None, 2, 2, 512) 2359808 \n_________________________________________________________________\nblock5_conv2 (Conv2D) (None, 2, 2, 512) 2359808 \n_________________________________________________________________\nblock5_conv3 (Conv2D) (None, 2, 2, 512) 2359808 \n_________________________________________________________________\nblock5_pool (MaxPooling2D) (None, 1, 1, 512) 0 \n_________________________________________________________________\nflatten_1 (Flatten) (None, 512) 0 \n_________________________________________________________________\ndense_1 (Dense) (None, 10) 5130 \n=================================================================\nTotal params: 14,719,818\nTrainable params: 5,130\nNon-trainable params: 14,714,688\n_________________________________________________________________\n",
"name": "stdout"
}
]
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "model.compile(optimizer = tf.train.AdamOptimizer(),\n loss = 'sparse_categorical_crossentropy',\n metrics=['accuracy'])",
"execution_count": 17,
"outputs": []
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "model.fit(training_rgb_arr_to_3d, training_labels, epochs=30)",
"execution_count": 21,
"outputs": [
{
"output_type": "stream",
"text": "Epoch 1/30\n60000/60000 [==============================] - 8s 139us/step - loss: 0.3474 - acc: 0.8736\nEpoch 2/30\n60000/60000 [==============================] - 8s 140us/step - loss: 0.3467 - acc: 0.8738\nEpoch 3/30\n60000/60000 [==============================] - 8s 141us/step - loss: 0.3460 - acc: 0.8733\nEpoch 4/30\n60000/60000 [==============================] - 8s 140us/step - loss: 0.3446 - acc: 0.8750\nEpoch 5/30\n60000/60000 [==============================] - 8s 140us/step - loss: 0.3438 - acc: 0.8750\nEpoch 6/30\n60000/60000 [==============================] - 8s 141us/step - loss: 0.3432 - acc: 0.8749\nEpoch 7/30\n60000/60000 [==============================] - 9s 142us/step - loss: 0.3428 - acc: 0.8756\nEpoch 8/30\n60000/60000 [==============================] - 9s 144us/step - loss: 0.3423 - acc: 0.8749\nEpoch 9/30\n60000/60000 [==============================] - 9s 144us/step - loss: 0.3410 - acc: 0.8760\nEpoch 10/30\n60000/60000 [==============================] - 9s 143us/step - loss: 0.3403 - acc: 0.8766\nEpoch 11/30\n60000/60000 [==============================] - 8s 142us/step - loss: 0.3400 - acc: 0.8760\nEpoch 12/30\n60000/60000 [==============================] - 9s 144us/step - loss: 0.3388 - acc: 0.8770\nEpoch 13/30\n60000/60000 [==============================] - 9s 153us/step - loss: 0.3387 - acc: 0.8771\nEpoch 14/30\n60000/60000 [==============================] - 9s 146us/step - loss: 0.3384 - acc: 0.8769\nEpoch 15/30\n60000/60000 [==============================] - 9s 146us/step - loss: 0.3380 - acc: 0.8772\nEpoch 16/30\n60000/60000 [==============================] - 10s 159us/step - loss: 0.3374 - acc: 0.8779\nEpoch 17/30\n60000/60000 [==============================] - 9s 154us/step - loss: 0.3369 - acc: 0.8783\nEpoch 18/30\n60000/60000 [==============================] - 10s 159us/step - loss: 0.3366 - acc: 0.8775\nEpoch 19/30\n60000/60000 [==============================] - 10s 164us/step - loss: 0.3358 - acc: 0.8782\nEpoch 20/30\n60000/60000 [==============================] - 9s 150us/step - loss: 0.3358 - acc: 0.8775\nEpoch 21/30\n60000/60000 [==============================] - 9s 143us/step - loss: 0.3354 - acc: 0.8776\nEpoch 22/30\n60000/60000 [==============================] - 9s 149us/step - loss: 0.3352 - acc: 0.8775\nEpoch 23/30\n60000/60000 [==============================] - 9s 145us/step - loss: 0.3347 - acc: 0.8786\nEpoch 24/30\n60000/60000 [==============================] - 9s 149us/step - loss: 0.3339 - acc: 0.8780\nEpoch 25/30\n60000/60000 [==============================] - 9s 157us/step - loss: 0.3339 - acc: 0.8780\nEpoch 26/30\n60000/60000 [==============================] - 9s 151us/step - loss: 0.3336 - acc: 0.8783\nEpoch 27/30\n60000/60000 [==============================] - 9s 154us/step - loss: 0.3332 - acc: 0.8777\nEpoch 28/30\n60000/60000 [==============================] - 9s 157us/step - loss: 0.3328 - acc: 0.8788\nEpoch 29/30\n60000/60000 [==============================] - 10s 163us/step - loss: 0.3333 - acc: 0.8784\nEpoch 30/30\n60000/60000 [==============================] - 10s 164us/step - loss: 0.3323 - acc: 0.8787\n",
"name": "stdout"
},
{
"output_type": "execute_result",
"execution_count": 21,
"data": {
"text/plain": "<keras.callbacks.History at 0x7f370142f438>"
},
"metadata": {}
}
]
},
{
"metadata": {
"trusted": true
},
"cell_type": "code",
"source": "model.evaluate(test_rgb_arr_to_3d, test_labels)",
"execution_count": 22,
"outputs": [
{
"output_type": "stream",
"text": "10000/10000 [==============================] - 1s 143us/step\n",
"name": "stdout"
},
{
"output_type": "execute_result",
"execution_count": 22,
"data": {
"text/plain": "[0.3967690948963165, 0.8627]"
},
"metadata": {}
}
]
},
{
"metadata": {},
"cell_type": "markdown",
"source": "# Resources\nhttps://colab.research.google.com/github/lmoroney/dlaicourse/blob/master/Course+1+-+Part+4+-+Lesson+2+-+Notebook.ipynb"
},
{
"metadata": {
"trusted": false
},
"cell_type": "code",
"source": "",
"execution_count": null,
"outputs": []
}
],
"metadata": {
"kernelspec": {
"name": "python3",
"display_name": "Python 3",
"language": "python"
},
"gist": {
"id": "c0892e33ff823e1cb45caaaf5a949fd1",
"data": {
"description": "deeplearning-ai/mnist-vgg.ipynb",
"public": true
}
},
"_draft": {
"nbviewer_url": "https://gist.github.com/c0892e33ff823e1cb45caaaf5a949fd1"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment