Skip to content

Instantly share code, notes, and snippets.

@htmllifehack
Last active August 17, 2018 13:45
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save htmllifehack/92036bf7062177d9f4f1f3eb7c4c77d2 to your computer and use it in GitHub Desktop.
Save htmllifehack/92036bf7062177d9f4f1f3eb7c4c77d2 to your computer and use it in GitHub Desktop.
KerasでMNIST
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Using TensorFlow backend.\n"
]
}
],
"source": [
"import keras\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense, Activation\n",
"from keras.datasets import mnist"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"(x_train, y_train), (x_test, y_test) = mnist.load_data()"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(60000, 28, 28) (60000,) (10000, 28, 28) (10000,)\n"
]
}
],
"source": [
"print(x_train.shape, y_train.shape, x_test.shape, y_test.shape)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([5, 0, 4, 1, 9, 2, 1, 3, 1, 4, 3], dtype=uint8)"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"y_train[0:11]"
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 3 18 18 18 126 136 175 26 166 255 247 127 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 30 36 94 154 170 253 253 253 253 253 225 172 253 242 195 64 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 49 238 253 253 253 253 253 253 253 253 251 93 82 82 56 39 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 18 219 253 253 253 253 253 198 182 247 241 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 80 156 107 253 253 205 11 0 43 154 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 14 1 154 253 90 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 139 253 190 2 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 11 190 253 70 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 35 241 225 160 108 1 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 81 240 253 253 119 25 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 45 186 253 253 150 27 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 16 93 252 253 187 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 249 253 249 64 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 46 130 183 253 253 207 2 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 39 148 229 253 253 253 250 182 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 24 114 221 253 253 253 253 201 78 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 23 66 213 253 253 253 253 198 81 2 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 18 171 219 253 253 253 253 195 80 9 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 55 172 226 253 253 253 253 244 133 11 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 136 253 253 253 212 135 132 16 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n",
"[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]\n"
]
}
],
"source": [
"import numpy as np\n",
"np.set_printoptions(formatter={'int':'{:3d}'.format}, linewidth=130)\n",
"for i in x_train[0]:\n",
" print(i)\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 29,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": 40,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<matplotlib.image.AxesImage at 0x1b471d45da0>"
]
},
"execution_count": 40,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAA2oAAAEZCAYAAADmAtZNAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAGXJJREFUeJzt3X2wbXV5H/Dv0wsWe6Ej1qKEaKTE\najQdMd4SM2pro1hM46Cp8SXWYmq8TtUUnVpFGl86tQYTRWln1GIlkEo0SX1/RURndIylgiGKYIIi\nEZSXOE5G5CUK/PrHPUyucO9ev3v23mf/zjmfzwxzztnrOWs97Hv3c9b3rnX2r1prAQAAYBx/Z9UN\nAAAA8OMENQAAgMEIagAAAIMR1AAAAAYjqAEAAAxGUAMAABiMoAYAADAYQQ0AAGAwghoAAMBgBDUA\nAIDBHDTPN1fVCUnOSLIjyf9qrZ02Ud8kQ9ha7kjSWqtV93FXBzKfzCbYmu5Ivtta+4er7mNvzp2A\n3nOnaq2t6wBVtSPJXyQ5Psk1Sb6Y5Fmttcv29z07qtoh6zoaMKpbk9w+WFA70PlkNsHWdHNycWtt\n16r7uJNzJyDpP3ea5x9pjkvy9dbala21HyZ5T5IT59gfwKKYT8CIzCag2zxB7agkV+/19TVrjwGs\nmvkEjMhsArrN8ztq+7pcd7f7KKtqd5Ld+/sGgCWYnE9mE7ACzp2AbvMEtWuS3H+vr38yyXfuWtRa\nOzPJmcme+6znOB5Ar8n5ZDYBK+DcCeg2z62PX0zyoKo6uqrukeSZST60mLYA5mI+ASMym4Bu676i\n1lq7rapenOS87HmL2bNaa19dWGcA62Q+ASMym4ADse63518PbzELW8+Ib89/oMwm2JpGe3v+9TCf\nYOvZiLfnBwAAYAkENQAAgMEIagAAAIMR1AAAAAYjqAEAAAxGUAMAABiMoAYAADAYQQ0AAGAwghoA\nAMBgBDUAAIDBCGoAAACDEdQAAAAGI6gBAAAMRlADAAAYjKAGAAAwGEENAABgMIIaAADAYAQ1AACA\nwQhqAAAAgxHUAAAABiOoAQAADEZQAwAAGIygBgAAMBhBDQAAYDCCGgAAwGAOWnUDADCS13fUnPx/\nOor+9S3TNe+952TJ6U+bvf1VHa0AsPm4ogYAADAYQQ0AAGAwghoAAMBgBDUAAIDBCGoAAACDEdQA\nAAAGI6gBAAAMRlADAAAYTLXWNuxgO6raIRt2NNar58/owUvvYo8/eXpH0U901OyeLjnxobO3f/Az\nHcd5XM/r6arpkl87erJk57s7DrUBbk1ye2u16j7mYTZtH6/uqHlF+1RH1c/P28oB+PzMrTvrhA3q\nY/O5Obm4tbZr1X3Mw3xiM/tAR83x7V2TNS+tfzNZc2bHsUbRe+500DwHqaqrktyY5PYkt232YQhs\nHeYTMCKzCeg1V1Bb8y9aa99dwH4AFs18AkZkNgGT/I4aAADAYOYNai3JJ6vq4qrq+C0ggA1jPgEj\nMpuALvPe+vjo1tp3quqIJOdX1ddaa5/du2BtCO1Okk39bgPAZjNzPplNwIo4dwK6zHVFrbX2nbWP\nNyR5f5Lj9lFzZmttV2ttl2EDbJSp+WQ2Aavg3Anote6gVlU7q+qwOz9P8sQkly6qMYD1Mp+AEZlN\nwIGY59bH+yZ5f1XduZ8/aK19YiFdAczHfAJGZDYB3dYd1FprVyZ5+AJ72dZOnNh+r459vLXn+ug7\nO2omFn5Okhx3S0fRSM6YrPjgn54yu+DYnv/nizpq/uVkxYcHWcx6szKftq/TJ7a/oL2lYy+P7qi5\nraOmZx5c31Hzr2ZufWvHHk7tqPnrjhrms9ln0//uqPm5jpqHzdsIm8bxT+wo+vb0YtbblbfnBwAA\nGIygBgAAMBhBDQAAYDCCGgAAwGAENQAAgMEIagAAAIMR1AAAAAYjqAEAAAxm3Qte0++/dNS8rH1q\noqJnAdbt6tbJilYTi1knuXqy4p6TFVN/iknylY6aMztqYCv5iY6a13XUPKNNVT2vYy+LcvJkxXX1\n5cma+7UbZ24/qd0yuY+Tfnl6fu386GQJ29yvPL2j6Nc7ap40byeM4uCpgmd07OSoJ0+W7MiHe9rZ\nclxRAwAAGIygBgAAMBhBDQAAYDCCGgAAwGAENQAAgMEIagAAAIMR1AAAAAYjqAEAAAzGgtcb4H0d\nNS/L1Eqjm23B6wdPl9zwremaI77ZcazrJisO7dgLsDpXfLyj6ITZCz+P5/OTFff7u4dN7+a2iZqD\nphe8zsunSyZ/DMEf/qfpmk/87vL7YBi/OFXw7340vZOPTi6bnbd1dbP1uKIGAAAwGEENAABgMIIa\nAADAYAQ1AACAwQhqAAAAgxHUAAAABiOoAQAADMY6ahvgzzpqvlFvnrn9mHNnb0+SvKbjQFd0rLfT\n5fEztz6kptdIu7rjKC/N0ZM1r/u9jh0BK/P6nqITPtVRtIAfWe2e0zWP6tjPhT2z9J2TFa//m+m9\nPG1iiaF/3A6Z3smO6RKY1vF3jW3lfX+1gJ04j9svV9QAAAAGI6gBAAAMRlADAAAYjKAGAAAwGEEN\nAABgMIIaAADAYAQ1AACAwQhqAAAAg6nW2oYdbEdVz7Kc7MODOmqu7Kj5fs+igs+dXsj1lpq9aOx9\nOg7D1nBrkttbq1X3MQ+zaf1e3VHzitazmPXPz9vKHt86bObmh/3U9C7O7TjMsU+arvnFj0/XXNhx\nrCk3tZ7Ft782WXF6PWKy5lUdRxrJzcnFrbVdq+5jHhs1n17ZUfNb7fLpok/8zGTJzo7XD5vDTe0B\nExXfmNzHWXXwZM1vdvazWfSeO01eUauqs6rqhqq6dK/H7l1V51fVFWsfD5+zX4ADZj4BIzKbgEXo\nufXx7CQn3OWxU5Jc0Fp7UJIL1r4G2Ghnx3wCxnN2zCZgTpNBrbX22STfu8vDJyY5Z+3zc5I8ZcF9\nAUwyn4ARmU3AIqz3zUTu21q7NknWPh6xuJYA5mI+ASMym4ADctCyD1BVu5PsTpJN/W4DwJZiNgGj\nMp+AZP1X1K6vqiOTZO3jDfsrbK2d2Vrb1VrbZdgAG6BrPplNwAZz7gQckPUGtQ8lOWnt85OSfHAx\n7QDMzXwCRmQ2AQek5+35353kC0keXFXXVNXzkpyW5PiquiLJ8WtfA2wo8wkYkdkELIIFr7eZm3re\nDPi3OxZPbbMXvL5Xx7XaH3W0wvgseL21/cbE9jN61rJ+fM+CzD0XF945WXFdnT9z+zEdR9ls+ha8\nvm265ILZi4Unyc4ndBxqIBa87nfTEzuKzuv4yf2J6cWLLXi9OTymo+a8dvZExbMn9/GMjgWvP9LR\ny2aysAWvAQAA2FiCGgAAwGAENQAAgMEIagAAAIMR1AAAAAYjqAEAAAxGUAMAABiMoAYAADCYg1bd\nABvrIadN13zt9bMXs06S1OwFVj+c6X2cMH0UYInu3VFzxpUTBUff2LGXiyYrPl/PnKw5peNI9+uo\nYT8e3/M34ntLb4MV2b2g/bx5Qfth5c77eE/V1ILWL5zcw9SPme3MFTUAAIDBCGoAAACDEdQAAAAG\nI6gBAAAMRlADAAAYjKAGAAAwGEENAABgMIIaAADAYCx4vc1c3VHzgo74/j/bJTO3P7ZNr6x90wc6\nlq/tKPn7fz5dc/t0CWw7r+sp6lrQerYP1GMna6aWTAU2h/M/ueoOtrYHddRc0FHzD3pWmT6656xx\nth/WOyZrLpv7KFuXK2oAAACDEdQAAAAGI6gBAAAMRlADAAAYjKAGAAAwGEENAABgMIIaAADAYKyj\nxt28q6PmlfULM7c/sH1ueidPuaWjZrrk+8ffc7LmyZ+a3s+np0tgSzmpPaCjauLHRJt+/VkjbdkO\n6ai5taPGKQHze/iqG9jLf+6ouUdHzcse01H0pI6ae01sf+EXOnbS83r/xnTJt57WsZ9rOmpm/0+d\n2rEH9s8VNQAAgMEIagAAAIMR1AAAAAYjqAEAAAxGUAMAABiMoAYAADAYQQ0AAGAwghoAAMBgrG7J\nujxsYvvL67GT+3jNjR0HOrRjUezzp2s+/NbpRXl/5UWzt5833QkM4/92VV3QUXPb7M2P7DoQS9Wz\nmPXEn2OSnHHD3J2wiV3fUzT99+iI9onJmpvyhp6DLcDHOmp6ToX/uqPmax01z569+XW/MLmHH75q\n7qMkST7XUXNd+8uOqp+eufVtHXtg/yavqFXVWVV1Q1Vdutdjr62qb1fVJWv//dJy2wS4O/MJGJHZ\nBCxCz62PZyc5YR+Pv7m1duzafz3/ZAGwaGfHfALGc3bMJmBOk0GttfbZJN/bgF4ADoj5BIzIbAIW\nYZ43E3lxVX157fL+4QvrCGB+5hMwIrMJ6LbeoPa2JMckOTbJtUnetL/CqtpdVRdV1UVtnQcDOABd\n88lsAjaYcyfggKwrqLXWrm+t3d5auyPJO5IcN6P2zNbartbarlpvlwCdeueT2QRsJOdOwIFaV1Cr\nqiP3+vKpSS7dXy3ARjKfgBGZTcCBmlw8oqreneRxSe5TVdckeU2Sx1XVsUlakquSvGCJPQLsk/kE\njMhsAhahWtu4u593VLVDNuxojO6hHTX/r6OmWs/K2R0LWt4ye1HsnX+v4zDb0K1Jbm9tU9+dsxVn\n0zc7ao7oeu384cytT67fmNzDpzuOsl3du6Pm6v8wUXDGLdM7+cHs+ZYkDztsejdXTZcM5ebk4tba\nrlX3MY+R5tNNT+goevvS21isY6dLzvrBdM1vzt/Jhrrpvh1F1/2oo+jUmVt31u929bPd9J47zfOu\njwAAACyBoAYAADAYQQ0AAGAwghoAAMBgBDUAAIDBCGoAAACDEdQAAAAGI6gBAAAMpmMVYFiOyzpq\nDu2ouSkdK1HmXtMl97x25ub35MjJXTyzoxPYXK6ZudVi1vvXtZj18zqKzphamPyMyV18fgsuZs3G\n2/mpjqKfXnobLMIHFrSfl1nQeplcUQMAABiMoAYAADAYQQ0AAGAwghoAAMBgBDUAAIDBCGoAAACD\nEdQAAAAGI6gBAAAMxoLXLMUrO2p+67kdRV3rKHYsZt3l12Zufc6CjgKbyhteu+oOhvTqjppX9CwO\n/PipxayTfHn2atU7H95xHIAlOOdNq+5ga3NFDQAAYDCCGgAAwGAENQAAgMEIagAAAIMR1AAAAAYj\nqAEAAAxGUAMAABiMoAYAADAYC15zN7s7at78lomCk9/esZeTOmoW5QfTJTd8ZubmHy2oE9gIO7qq\nOn4EvOK02dtPOaXrSJvJTcd1FF3Ys5r1o6dLPnrPyZKdv9xxKAC2HFfUAAAABiOoAQAADEZQAwAA\nGIygBgAAMBhBDQAAYDCCGgAAwGAENQAAgMFYR20L+ecdNR/rWh/olR1Fr+6o2SiPnKz4Wl22gL3A\n5nF7V9VtHTWz1zu86Xem11F7w8unj/Kljk7+R0fNEVdOFBw9sS5ckuTkjpo3TJd89gmTJWdZIw0Y\n1nRMOGniks8L71hQK9vU5BW1qrp/VX2mqi6vqq9W1clrj9+7qs6vqivWPh6+/HYB9jCbgFGZT8Ai\n9Nz6eFuS/9ha+5kkj0ryoqp6aJJTklzQWntQkgvWvgbYKGYTMCrzCZjbZFBrrV3bWvvS2uc3Jrk8\nyVFJTkxyzlrZOUmesqwmAe7KbAJGZT4Bi3BAbyZSVQ9M8ogkFya5b2vt2mTPQEpyxKKbA+hhNgGj\nMp+A9ep+M5GqOjTJe5O8pLX2/arq/b7dSXYnSd93APQzm4BRmU/APLquqFXVwdkzaM5trb1v7eHr\nq+rIte1HJrlhX9/bWjuztbartbbLsAEWyWwCRmU+AfPqedfHSvLOJJe31k7fa9OH8rfv23xSkg8u\nvj2AfTObgFGZT8Ai9Nz6+Ogkz0nylaq6ZO2xU5OcluSPqup5Sb6V5FeX0yLAPplNwKjMJ2Bu1Vrb\nsIPtqGqHbNjRNpfHTGx/acc+TmjP6qg6q6Nmozx4suKq+tZkzbEdR/pRRw3rc2uS21vb1HfnbMXZ\n9M2OmiPajUvvY49zO2ou7Kj57/M20umY6ZJf3+cdaz9m59nzd8J8bk4ubq3tWnUf89iK84nVu+kL\nHUWP6jh7OuTgmZt3/k1fP9tN77nTAb3rIwAAAMsnqAEAAAxGUAMAABiMoAYAADAYQQ0AAGAwghoA\nAMBgBDUAAIDBCGoAAACDOWjVDWx2D++o+ZOLOooeObWk9es7drKRjpq59Rv1vck9PLHjKNd1dgP8\nuBd11Pxx1wT7s3lbSXJSR82zF3CcJPno7M1veObkHnaesqBWALa6t05sf96GdLFluaIGAAAwGEEN\nAABgMIIaAADAYAQ1AACAwQhqAAAAgxHUAAAABiOoAQAADEZQAwAAGMy2XfD6LR01z//TjqJj39hR\n1LP07Eb5+nTJv/8nkyUPefvs7Vd3dgMsx8c6ap5cV07WfPjZh80ueNeNfQ0twqsnekny4v86e/vv\nLagVgK1v28aEYbiiBgAAMBhBDQAAYDCCGgAAwGAENQAAgMEIagAAAIMR1AAAAAYjqAEAAAxGUAMA\nABjMtl3J7vkv6Sg69pal9/G3/u3szSf/8fQubp0u+adnTtdcNl0CbAGf7qjZee5EwbnTi1ADMJjH\nddTcetuyu2CCK2oAAACDEdQAAAAGI6gBAAAMRlADAAAYjKAGAAAwGEENAABgMIIaAADAYAQ1AACA\nwVRrbXZB1f2T/H6S+yW5I8mZrbUzquq1SZ6f5K/WSk9trX1s1r52VLVD5m4ZGMmtSW5vrTb6uGYT\nMOXm5OLW2q6NPq75BMzSe+7UE9SOTHJka+1LVXVYkouTPCXJ05P8oLX2xt6mDBvYelYY1MwmYKYV\nBjXzCdiv3nOng6YKWmvXJrl27fMbq+ryJEfN3SHAHMwmYFTmE7AIB/Q7alX1wCSPSHLh2kMvrqov\nV9VZVXX4gnsD6GI2AaMyn4D16g5qVXVokvcmeUlr7ftJ3pbkmCTHZs+/Gr1pP9+3u6ouqqqLZt9k\nCXDgzCZgVOYTMI/J31FLkqo6OMlHkpzXWjt9H9sfmOQjrbWfnbUf91nD1rOq31FLzCZgtlX9jlpi\nPgH713vuNHlFraoqyTuTXL73oFn7Rdk7PTXJpevoE2BdzCZgVOYTsAiTbyaS5NFJnpPkK1V1ydpj\npyZ5VlUdm6QluSrJC5bSIcC+mU3AqMwnYG5dtz4uisv3sPWs8tbHRTGbYGta5a2Pi2I+wdazsFsf\nAQAA2FiCGgAAwGAENQAAgMEIagAAAIMR1AAAAAYjqAEAAAxGUAMAABiMoAYAADAYQQ0AAGAwghoA\nAMBgBDUAAIDBCGoAAACDEdQAAAAGI6gBAAAMRlADAAAYjKAGAAAwGEENAABgMAdt5MHuSL57c/KX\nez10nyTf3cge5qTf5dLvci2r359awj431D5mU+LPd9n0u1z63WMrzid/tsul3+XS7x5ds6laa0s4\ndp+quqi1tmtlDRwg/S6Xfpdrs/W7apvt+dLvcul3uTZbv6u02Z4r/S6Xfpdr1f269REAAGAwghoA\nAMBgVh3Uzlzx8Q+UfpdLv8u12fpdtc32fOl3ufS7XJut31XabM+VfpdLv8u10n5X+jtqAAAA3N2q\nr6gBAABwFysLalV1QlX9eVV9vapOWVUfvarqqqr6SlVdUlUXrbqfu6qqs6rqhqq6dK/H7l1V51fV\nFWsfD19lj3vbT7+vrapvrz3Hl1TVL62yx71V1f2r6jNVdXlVfbWqTl57fMjneEa/wz7HozCbFsts\nWi6zaXsxnxbLfFoes2lBfa3i1seq2pHkL5Icn+SaJF9M8qzW2mUb3kynqroqya7W2pBrP1TVP0vy\ngyS/31r72bXHfifJ91prp60N9MNba69YZZ932k+/r03yg9baG1fZ275U1ZFJjmytfamqDktycZKn\nJHluBnyOZ/T79Az6HI/AbFo8s2m5zKbtw3xaPPNpecymxVjVFbXjkny9tXZla+2HSd6T5MQV9bIl\ntNY+m+R7d3n4xCTnrH1+Tvb8hRvCfvodVmvt2tbal9Y+vzHJ5UmOyqDP8Yx+mc1sWjCzabnMpm3F\nfFow82l5zKbFWFVQOyrJ1Xt9fU0GeDImtCSfrKqLq2r3qpvpdN/W2rXJnr+ASY5YcT89XlxVX167\nvD/E5fC7qqoHJnlEkguzCZ7ju/SbbILneIXMpo0x/OtmH4Z/3ZhNW575tDGGf+3sw9CvHbNp/VYV\n1Gofj43+9pOPbq39XJInJXnR2uVnFuttSY5JcmySa5O8abXt3F1VHZrkvUle0lr7/qr7mbKPfod/\njlfMbGJfhn/dmE3bgvnEvgz92jGb5rOqoHZNkvvv9fVPJvnOinrp0lr7ztrHG5K8P3tuQRjd9Wv3\n3N557+0NK+5nptba9a2121trdyR5RwZ7jqvq4Ox58Z7bWnvf2sPDPsf76nf053gAZtPGGPZ1sy+j\nv27Mpm3DfNoYw7529mXk147ZNL9VBbUvJnlQVR1dVfdI8swkH1pRL5OqaufaLxamqnYmeWKSS2d/\n1xA+lOSktc9PSvLBFfYy6c4X7pqnZqDnuKoqyTuTXN5aO32vTUM+x/vrd+TneBBm08YY8nWzPyO/\nbsymbcV82hhDvnb2Z9TXjtm0oL5WteB17Xl7y7ck2ZHkrNbaf1tJIx2q6h9lz78EJclBSf5gtH6r\n6t1JHpfkPkmuT/KaJB9I8kdJHpDkW0l+tbU2xC+h7qffx2XPpeWW5KokL7jzPuZVq6rHJPlckq8k\nuWPt4VOz5/7l4Z7jGf0+K4M+x6MwmxbLbFous2l7MZ8Wy3xaHrNpQX2tKqgBAACwbytb8BoAAIB9\nE9QAAAAGI6gBAAAMRlADAAAYjKAGAAAwGEENAABgMIIaAADAYAQ1AACAwfx/oVNfxHFQ7IYAAAAA\nSUVORK5CYII=\n",
"text/plain": [
"<Figure size 1080x1080 with 3 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# imshowは画像表示、cmapはカラーマップ\n",
"plt.figure(figsize=(15,15))\n",
"plt.subplot(1,3,1)\n",
"plt.imshow(x_train[0], cmap='hot')\n",
"plt.subplot(1,3,2)\n",
"plt.imshow(x_train[1], cmap='hot')\n",
"plt.subplot(1,3,3)\n",
"plt.imshow(x_train[2], cmap='hot')\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Accent, Accent_r, Blues, Blues_r, BrBG, BrBG_r, BuGn, BuGn_r, BuPu, BuPu_r, CMRmap, CMRmap_r, Dark2, Dark2_r, GnBu, GnBu_r, Greens, Greens_r, Greys, Greys_r, OrRd, OrRd_r, Oranges, Oranges_r, PRGn, PRGn_r, Paired, Paired_r, Pastel1, Pastel1_r, Pastel2, Pastel2_r, PiYG, PiYG_r, PuBu, PuBuGn, PuBuGn_r, PuBu_r, PuOr, PuOr_r, PuRd, PuRd_r, Purples, Purples_r, RdBu, RdBu_r, RdGy, RdGy_r, RdPu, RdPu_r, RdYlBu, RdYlBu_r, RdYlGn, RdYlGn_r, Reds, Reds_r, Set1, Set1_r, Set2, Set2_r, Set3, Set3_r, Spectral, Spectral_r, Wistia, Wistia_r, YlGn, YlGnBu, YlGnBu_r, YlGn_r, YlOrBr, YlOrBr_r, YlOrRd, YlOrRd_r, afmhot, afmhot_r, autumn, autumn_r, binary, binary_r, bone, bone_r, brg, brg_r, bwr, bwr_r, cividis, cividis_r, cool, cool_r, coolwarm, coolwarm_r, copper, copper_r, cubehelix, cubehelix_r, flag, flag_r, gist_earth, gist_earth_r, gist_gray, gist_gray_r, gist_heat, gist_heat_r, gist_ncar, gist_ncar_r, gist_rainbow, gist_rainbow_r, gist_stern, gist_stern_r, gist_yarg, gist_yarg_r, gnuplot, gnuplot2, gnuplot2_r, gnuplot_r, gray, gray_r, hot, hot_r, hsv, hsv_r, inferno, inferno_r, jet, jet_r, magma, magma_r, nipy_spectral, nipy_spectral_r, ocean, ocean_r, pink, pink_r, plasma, plasma_r, prism, prism_r, rainbow, rainbow_r, seismic, seismic_r, spring, spring_r, summer, summer_r, tab10, tab10_r, tab20, tab20_r, tab20b, tab20b_r, tab20c, tab20c_r, terrain, terrain_r, viridis, viridis_r, winter, winter_r\n"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x_train = x_train.reshape(60000, 784)\n",
"x_test = x_test.reshape(10000,784)"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[ 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.32941177 0.72549021 0.62352943 0.59215689 0.23529412 0.14117648\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0.87058824 0.99607843 0.99607843 0.99607843 0.99607843\n",
" 0.94509804 0.7764706 0.7764706 0.7764706 0.7764706 0.7764706\n",
" 0.7764706 0.7764706 0.7764706 0.66666669 0.20392157 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0.26274511 0.44705883 0.28235295\n",
" 0.44705883 0.63921571 0.89019608 0.99607843 0.88235295 0.99607843\n",
" 0.99607843 0.99607843 0.98039216 0.89803922 0.99607843 0.99607843\n",
" 0.54901963 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0.06666667 0.25882354\n",
" 0.05490196 0.26274511 0.26274511 0.26274511 0.23137255 0.08235294\n",
" 0.9254902 0.99607843 0.41568628 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.32549021 0.99215686 0.81960785 0.07058824 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.08627451 0.9137255 1. 0.32549021 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.50588238 0.99607843 0.93333334 0.17254902 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.23137255 0.97647059 0.99607843 0.24313726 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.52156866 0.99607843 0.73333335 0.01960784 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.03529412 0.80392158 0.97254902 0.22745098 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.49411765 0.99607843 0.71372551 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.29411766 0.98431373 0.94117647 0.22352941 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.07450981 0.86666667 0.99607843 0.65098041 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.01176471 0.79607844 0.99607843 0.85882354 0.13725491 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.14901961 0.99607843 0.99607843 0.3019608 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.12156863 0.87843138 0.99607843 0.4509804 0.00392157 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.52156866 0.99607843 0.99607843 0.20392157 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.23921569 0.94901961 0.99607843 0.99607843 0.20392157 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.47450981 0.99607843 0.99607843 0.85882354 0.15686275 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.47450981 0.99607843 0.81176472 0.07058824 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. ]\n",
"[ 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.01176471\n",
" 0.07058824 0.07058824 0.07058824 0.49411765 0.53333336 0.68627453\n",
" 0.10196079 0.65098041 1. 0.96862745 0.49803922 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0.11764706 0.14117648 0.36862746\n",
" 0.60392159 0.66666669 0.99215686 0.99215686 0.99215686 0.99215686\n",
" 0.99215686 0.88235295 0.67450982 0.99215686 0.94901961 0.7647059\n",
" 0.25098041 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.19215687\n",
" 0.93333334 0.99215686 0.99215686 0.99215686 0.99215686 0.99215686\n",
" 0.99215686 0.99215686 0.99215686 0.98431373 0.36470589 0.32156864\n",
" 0.32156864 0.21960784 0.15294118 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0.07058824 0.85882354 0.99215686 0.99215686 0.99215686\n",
" 0.99215686 0.99215686 0.7764706 0.71372551 0.96862745 0.94509804\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0.3137255 0.61176473\n",
" 0.41960785 0.99215686 0.99215686 0.80392158 0.04313726 0.\n",
" 0.16862746 0.60392159 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.05490196 0.00392157 0.60392159 0.99215686 0.35294119 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0.54509807 0.99215686 0.74509805 0.00784314\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0.04313726 0.74509805 0.99215686\n",
" 0.27450982 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.13725491\n",
" 0.94509804 0.88235295 0.627451 0.42352942 0.00392157 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.31764707 0.94117647 0.99215686 0.99215686 0.46666667 0.09803922\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0.17647059 0.72941178 0.99215686 0.99215686\n",
" 0.58823532 0.10588235 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0.0627451 0.36470589\n",
" 0.98823529 0.99215686 0.73333335 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.97647059 0.99215686 0.97647059 0.25098041 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0.18039216 0.50980395\n",
" 0.71764708 0.99215686 0.99215686 0.81176472 0.00784314 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0.15294118 0.58039218 0.89803922\n",
" 0.99215686 0.99215686 0.99215686 0.98039216 0.71372551 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0.09411765 0.44705883 0.86666667 0.99215686\n",
" 0.99215686 0.99215686 0.99215686 0.78823531 0.30588236 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0.09019608 0.25882354 0.83529413 0.99215686 0.99215686\n",
" 0.99215686 0.99215686 0.7764706 0.31764707 0.00784314 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0.07058824 0.67058825 0.85882354 0.99215686 0.99215686 0.99215686\n",
" 0.99215686 0.7647059 0.3137255 0.03529412 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.\n",
" 0.21568628 0.67450982 0.88627452 0.99215686 0.99215686 0.99215686\n",
" 0.99215686 0.95686275 0.52156866 0.04313726 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0.53333336 0.99215686 0.99215686 0.99215686 0.83137256\n",
" 0.52941179 0.51764709 0.0627451 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. ]\n"
]
}
],
"source": [
"#0-255のRGBを0-1の数値に変換\n",
"x_test = x_test.astype('float32')\n",
"x_train = x_train.astype('float32')\n",
"\n",
"x_test /= 255\n",
"x_train /= 255\n",
"\n",
"print(x_test[0])\n",
"print(x_train[0])"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0.329 0.725 0.624 0.592 0.235 0.141 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.871\n",
" 0.996 0.996 0.996 0.996 0.945 0.776 0.776 0.776 0.776 0.776\n",
" 0.776 0.776 0.776 0.667 0.204 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.263 0.447 0.282 0.447\n",
" 0.639 0.89 0.996 0.882 0.996 0.996 0.996 0.98 0.898 0.996\n",
" 0.996 0.549 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0.067 0.259\n",
" 0.055 0.263 0.263 0.263 0.231 0.082 0.925 0.996 0.416 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0.325 0.992 0.82 0.071 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.086 0.914 1. 0.325 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0.506 0.996 0.933 0.173 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.231 0.976 0.996 0.243 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0.522 0.996 0.733 0.02 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.035 0.804 0.973 0.227 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0.494 0.996 0.714 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.294 0.984 0.941 0.224 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.075\n",
" 0.867 0.996 0.651 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0.012 0.796 0.996 0.859 0.137 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.149\n",
" 0.996 0.996 0.302 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0.122 0.878 0.996 0.451 0.004 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.522\n",
" 0.996 0.996 0.204 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0.239 0.949 0.996 0.996 0.204 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.475\n",
" 0.996 0.996 0.859 0.157 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.475 0.996 0.812 0.071 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. ]\n",
"[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.012\n",
" 0.071 0.071 0.071 0.494 0.533 0.686 0.102 0.651 1. 0.969\n",
" 0.498 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0.118 0.141 0.369 0.604 0.667 0.992 0.992 0.992\n",
" 0.992 0.992 0.882 0.675 0.992 0.949 0.765 0.251 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0.192 0.933\n",
" 0.992 0.992 0.992 0.992 0.992 0.992 0.992 0.992 0.984 0.365\n",
" 0.322 0.322 0.22 0.153 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0.071 0.859 0.992 0.992 0.992\n",
" 0.992 0.992 0.776 0.714 0.969 0.945 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0.314 0.612 0.42 0.992 0.992 0.804 0.043 0. 0.169\n",
" 0.604 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.055\n",
" 0.004 0.604 0.992 0.353 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0.545 0.992 0.745\n",
" 0.008 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0.043 0.745 0.992 0.275 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.137\n",
" 0.945 0.882 0.627 0.424 0.004 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0.318 0.941 0.992 0.992\n",
" 0.467 0.098 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0.176 0.729 0.992 0.992 0.588 0.106 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.063\n",
" 0.365 0.988 0.992 0.733 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0.976 0.992 0.976\n",
" 0.251 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0.18 0.51 0.718 0.992 0.992 0.812 0.008 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0.153 0.58 0.898 0.992 0.992 0.992\n",
" 0.98 0.714 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.094\n",
" 0.447 0.867 0.992 0.992 0.992 0.992 0.788 0.306 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0.09 0.259 0.835 0.992 0.992 0.992 0.992\n",
" 0.776 0.318 0.008 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0.071 0.671\n",
" 0.859 0.992 0.992 0.992 0.992 0.765 0.314 0.035 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0.216 0.675 0.886 0.992 0.992 0.992 0.992 0.957\n",
" 0.522 0.043 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.533\n",
" 0.992 0.992 0.992 0.831 0.529 0.518 0.063 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n",
" 0. 0. 0. 0. 0. 0. 0. 0. ]\n"
]
}
],
"source": [
"import numpy as np\n",
"np.set_printoptions(precision=3, suppress=True)\n",
"\n",
"print(x_test[0])\n",
"print(x_train[0])"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[ 0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]\n",
"[ 0. 0. 0. 0. 0. 0. 0. 1. 0. 0.]\n"
]
}
],
"source": [
"# 正解ラベルをone hot 表現に変える\n",
"# one hot 表現とは[0,1,2,3,4,5]の配列を[0,0,1,0,0]のように変える表現のこと\n",
"\n",
"y_train = keras.utils.to_categorical(y_train, 10)\n",
"y_test = keras.utils.to_categorical(y_test, 10)\n",
"\n",
"print(y_train[0]) # 正解は5\n",
"print(y_test[0]) # 正解は7"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"model = Sequential([\n",
" Dense(32, input_shape=(784,)),\n",
" Activation('relu'),\n",
" Dense(10),\n",
" Activation('softmax'),\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"model.compile(optimizer='rmsprop',\n",
" loss='binary_crossentropy',\n",
" metrics=['accuracy'])"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/10\n",
"60000/60000 [==============================] - 6s 98us/step - loss: 0.0587 - acc: 0.9803\n",
"Epoch 2/10\n",
"60000/60000 [==============================] - 5s 83us/step - loss: 0.0344 - acc: 0.9887\n",
"Epoch 3/10\n",
"60000/60000 [==============================] - 5s 80us/step - loss: 0.0278 - acc: 0.9911\n",
"Epoch 4/10\n",
"60000/60000 [==============================] - 5s 86us/step - loss: 0.0237 - acc: 0.9925: 0s - loss: 0.023\n",
"Epoch 5/10\n",
"60000/60000 [==============================] - 5s 76us/step - loss: 0.0212 - acc: 0.9932\n",
"Epoch 6/10\n",
"60000/60000 [==============================] - 5s 80us/step - loss: 0.0194 - acc: 0.9939: 0s - loss: 0.0193 - ac\n",
"Epoch 7/10\n",
"60000/60000 [==============================] - ETA: 0s - loss: 0.0179 - acc: 0.994 - 5s 80us/step - loss: 0.0180 - acc: 0.9943\n",
"Epoch 8/10\n",
"60000/60000 [==============================] - 5s 78us/step - loss: 0.0169 - acc: 0.9948\n",
"Epoch 9/10\n",
"60000/60000 [==============================] - 5s 77us/step - loss: 0.0160 - acc: 0.9952\n",
"Epoch 10/10\n",
"60000/60000 [==============================] - 4s 73us/step - loss: 0.0154 - acc: 0.9951\n"
]
},
{
"data": {
"text/plain": [
"<keras.callbacks.History at 0x132c54c50>"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.fit(x_train, y_train, epochs=10, batch_size=32)"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"10000/10000 [==============================] - 0s 27us/step\n",
"Loss : 0.0220060736984\n",
"Accuracy : 0.99341999855\n"
]
}
],
"source": [
"score = model.evaluate(x_test, y_test, verbose=1)\n",
"print('Loss : ', score[0])\n",
"print('Accuracy : ', score[1])"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# モデルの保存\n",
"model.save('mnist_test.h5')"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# モデルの読み込み\n",
"model = keras.models.load_model('mnist_test.h5')"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"10000/10000 [==============================] - 0s 38us/step\n"
]
},
{
"data": {
"text/plain": [
"[0.022006073698395631, 0.99341999855041507]"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model.evaluate(x_test, y_test, verbose=1)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.6"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment