Skip to content

Instantly share code, notes, and snippets.

@briandw
Last active February 28, 2017 05:41
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save briandw/fab85da5a61bd81bb876f3420884e2ae to your computer and use it in GitHub Desktop.
Save briandw/fab85da5a61bd81bb876f3420884e2ae to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 89,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"%matplotlib inline\n",
"from matplotlib.pyplot import imshow\n",
"import numpy as np\n",
"from PIL import Image, ImageDraw\n",
"from keras import backend as K\n",
"import random as rnd\n",
"\n",
"def makeRandomLabel():\n",
" labelSize = 25\n",
" bits = rnd.getrandbits(labelSize)\n",
" bitArray = np.zeros(labelSize)\n",
" for i in range(labelSize):\n",
" bitArray[i] = (bits & 0b1) \n",
" bits = bits >> 1\n",
" return bitArray\n",
"\n",
"def drawImage(bitArray, imageSize):\n",
" \n",
" img = Image.new(\"RGB\", (imageSize, imageSize), \"white\")\n",
"\n",
" draw = ImageDraw.Draw(img)\n",
" size = imageSize/30\n",
" offset = (rnd.randrange(int(imageSize/6), int(imageSize/2)), rnd.randrange(int(imageSize/6), int(imageSize/2)))\n",
" spacing = size + imageSize/37\n",
" numLights = 5\n",
"\n",
" box = (offset[0]-spacing*1.5, offset[1]-spacing*1.5, offset[0]+spacing*6, offset[1]+spacing*6)\n",
" draw.ellipse(box, fill=0)\n",
"\n",
" for y in range(numLights):\n",
" for x in range(numLights):\n",
" box = (offset[0] + x*spacing, offset[1] + y*spacing,offset[0] + x*spacing+size, offset[1] + y*spacing+size)\n",
" value = bitArray[x + y * numLights]\n",
" if value:\n",
" draw.ellipse(box, fill=\"white\")\n",
" del draw\n",
" \n",
" return (img)\n",
"\n",
"def traningSet(imageSize, batchSize=32):\n",
" while(True):\n",
" labels = np.ndarray(shape=(batchSize, 25), dtype='float32', order='F')\n",
" images = np.ndarray(shape=(batchSize, 3, imageSize, imageSize), dtype='float64', order='F')\n",
" for i in range(0, batchSize):\n",
" label = makeRandomLabel()\n",
" image = drawImage(label, imageSize)\n",
" labels[i] = np.asarray(label, dtype='float32')\n",
" \n",
" imageData = np.asarray(image, dtype='float64')\n",
" images[i] = imageData.reshape(3, imageSize, imageSize)\n",
"\n",
" yield (images, labels)\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 98,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"array([ 1., 1., 0., 1., 0., 0., 1., 0., 1., 1., 1., 0., 1.,\n",
" 1., 1., 1., 1., 0., 1., 0., 0., 1., 1., 1., 0.])"
]
},
"execution_count": 98,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"label = makeRandomLabel()\n",
"label"
]
},
{
"cell_type": "code",
"execution_count": 99,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAGaUlEQVR4nO3d227bRhRAUbno//+y\n+1AgMJrYlSiSey5rvSaO6NHZGsoU44/Pz88H0PmrPgDYnQghJkKIiRBiIoSYCCEmQoiJEGIihJgI\nISZCiIkQYiKEmAghJkKIiRBiIoSYCCEmQoiJEGIihJgIISZCiIkQYiKEmAghJkKIiRBiIoSYCCEm\nQoiJEGIihJgIISZCiIkQYiKEmAghJkKIiRBiIoSYCCEmQoiJEGIihJgIISZCiIkQYiKEmAghJkKI\niRBiIoSYCCEmQoiJEGIihJgIISZCiIkQYiKEmAghJkKIiRBiIoSYCCEmQoiJEGIihJgIISZCiIkQ\nYiKEmAghJkKIiRBiIoSYCCEmQoiJEGIihJgIISZCiIkQYiKEmAghJkKIiRBiIoSYCCEmQoiJEGIi\nhJgIISZCiIkQYn/XB8BNPj4+nv/Ln5+f1x0J//FhudfzUm/PMyoXEeEiLgrvO8bmRCKc283t/c78\nvE+Es8rz+8oUvUOEMxkqvO+YqFeJcA5T5PeVuXqeCEc3XX5fma5nuFg/tKkLfMx//PewEw5qsfE1\nZj8Q4XAWy+8rw/ZHTkfHsnCBj9W/u8PshKPYakBN3Vd2wiFsVeBjv+/3Z3bC2ObjaPwedsLW5gU+\nrMDj8RBhyPz9yzo4HQ0Yuz/adhTthBAT4d1sg9/ZdmVEeKtt5+xJe66PCO+z54S9asNV8oOZO2w4\nWO/bZzLthBAT4eVsg8fss24ivNY+k3SFTVZPhBfaZIYutcMaihBifjp6lWMv4f95Op78R4591TF3\nPtZ3D7oYEV7iwGj+8ET88K8d+6pj7nyslx59dk5HISbC8527Df7wp8e+6pg7H+uPFv4JjQhPtvCs\n5FZdWxFCTIRnWvWlehxLrrAIISbC07zzIv3z1373p8e+6pg7H+udI5mRCM+x3mSMbLHVdrH+HGeN\nhU/MHDuMqYnwBIu9MM9imdF1OgoxEb7LNlhZZuVFCDERQkyEEBPhW5Z5WzKpNdb/7/oAOMH41wkH\nubo4JtcJjxthksa/s/6GI5x9hp2OQkyEExv/zvr8fvwpiPCgEc5F+dfsz4UIISZCiIkQYiI8YpA3\nIePfWX/bEQ7yjBwjQoi5WH/EaK+7PjHz+0NMRIRHjBYhj5kjdDoKMRFCTIQQE+HLvCEc07zPiwgh\nJkKIubN+LKte8XNn/Q9cJ3zZRQO06j3yg3xfI3M6CjERDmHVe+TdWf8MEUJMhBATIcRECDERDmHV\ne+TH+U33IxMhxFysf9mAn2K587FG/sTMpMMswpc5iRrWpMPsdPRlkz7Ty5v3eREhxEQIMRFCTIQQ\nEyHE3Fk/lpGvwt3/WJtwnfCgqe92P+bOx3rV1GPsdBRiIhzC+Petu0f+OiKEmAgP8to/jtmfCxFC\nTIQQE+FxJ54FjX/f+rD3yM9+LvoQIeRcrH/XuZuAT8y8ZI3pFeG78g+L7GyN6XU6CjERQkyE71rj\njGhGy6y8CCEmwhMs85I8kZXWXIQQc4niNON8amSB/0v7pWOYnQjPdP9Ervrb53+w3sQ6HYWYCM90\n84v0hr99fr1t8CFCyInwZEu+VA9i1bUV4flWnZXWwqsqQoiJ8BL3vGzv89vnF94GHyK8ztpzc6fl\nV9LF+gvddgl77U/MLD+iIryW++7ftMN8Oh291g4zdJ1NVk+El9tkkk63z7qJEGLeE97H+8Mn7TaT\ndsL77DZbx2y4SiK81YYT9pI910eEd9tzzp6x7cqIEGJ+MJPxc5pfNh9CO2Fm88n7xTqIsGT+rMDD\n6eggNjw1NXi/2AmHsNtE7vb9/sxOOJblt0Tz9js74VjWntG1v7vD7ISDWmxLNGY/EOHQFkjRgP0v\np6NDm32CZz/+e9gJ5zDdlmiunifCmUyRool6lQhnNVSQpugdIpxbnqL5eZ8IF3FzjcbmRCJc0EVB\nGpWLiHAXL5VpKu4kQoi5WA8xEUJMhBATIcRECDERQkyEEBMhxEQIMRFCTIQQEyHERAgxEUJMhBAT\nIcRECDERQkyEEBMhxEQIMRFCTIQQEyHERAgxEUJMhBATIcRECDERQkyEEBMhxEQIMRFCTIQQEyHE\nRAgxEUJMhBATIcRECDERQkyEEBMhxEQIMRFCTIQQEyHERAgxEUJMhBATIcRECDERQkyEEBMhxEQI\nMRFCTIQQEyHERAgxEUJMhBATIcRECLF/ACurdNF4HhlwAAAAAElFTkSuQmCC\n",
"text/plain": [
"<PIL.Image.Image image mode=RGB size=300x300 at 0x7F54CDC755F8>"
]
},
"execution_count": 99,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"drawImage(label, 300)\n"
]
},
{
"cell_type": "code",
"execution_count": 91,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"def ConvBlock(layers, model, filters):\n",
" for i in range(layers): \n",
" model.add(ZeroPadding2D((1,1)))\n",
" model.add(Convolution2D(filters, 3, 3, activation='relu'))\n",
" model.add(MaxPooling2D((2,2), strides=(2,2)))\n",
" \n",
"def FCBlock(model):\n",
" model.add(Dense(4096, activation='relu'))\n",
" model.add(Dropout(0.5))\n"
]
},
{
"cell_type": "code",
"execution_count": 93,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/1\n",
"2/2 [==============================] - 0s - loss: 37.0219 - acc: 0.0000e+00\n"
]
},
{
"data": {
"text/plain": [
"<keras.callbacks.History at 0x7f54c642f160>"
]
},
"execution_count": 93,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from numpy.random import random, permutation\n",
"from scipy import misc, ndimage\n",
"from scipy.ndimage.interpolation import zoom\n",
"\n",
"import keras\n",
"from keras import backend as K\n",
"from keras.utils.data_utils import get_file\n",
"from keras.models import Sequential, Model\n",
"from keras.layers.core import Flatten, Dense, Dropout, Lambda\n",
"from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D\n",
"from keras.optimizers import SGD, RMSprop\n",
"from keras.preprocessing import image\n",
"\n",
"imageSize = 300\n",
"model = Sequential()\n",
"model.add(Lambda(lambda x: x/256.0, input_shape=(3, imageSize, imageSize)))\n",
"ConvBlock(2, model, 64)\n",
"ConvBlock(2, model, 128)\n",
"ConvBlock(3, model, 256)\n",
"ConvBlock(3, model, 512)\n",
"ConvBlock(3, model, 512)\n",
"model.add(Flatten())\n",
"FCBlock(model)\n",
"FCBlock(model)\n",
"model.add(Dense(25, activation='softmax'))\n",
"model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])\n",
"\n",
"generatedBatch = traningSet(batchSize=2, imageSize=imageSize)\n",
"model.fit_generator(generatedBatch, samples_per_epoch=2, nb_epoch=1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.4.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment