Skip to content

Instantly share code, notes, and snippets.

@kudaras
Created December 14, 2016 05:03
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save kudaras/e6b757347f553a2c6592f15f970a2cfa to your computer and use it in GitHub Desktop.
Save kudaras/e6b757347f553a2c6592f15f970a2cfa to your computer and use it in GitHub Desktop.
# coding: utf-8
# In[1]:
from scipy import misc
import os.path
import scipy.ndimage
import numpy as np
import matplotlib.pyplot as plt
get_ipython().magic(u'matplotlib inline')
from scikits.statsmodels.tools import categorical
import keras
from keras.models import Sequential
from keras.layers import Dropout, Convolution2D, Reshape, MaxPooling2D, Flatten, Dense
# In[2]:
def read_zyles_image(file):
img = misc.imread(file)
img = misc.imresize(img[100:550, 300:900], (90,120))
category = os.path.dirname(file).split('/').pop()
return (img, category)
# In[3]:
img0, category =read_zyles_image("/Users/pku/temp/dataset/0/20161204133050-1801.jpg")
plt.imshow(img0);
# In[4]:
import os
import fnmatch
import cv2
rootdir = '/Users/pku/temp/dataset'
X =[]
y =[]
for subdir, dirs, files in os.walk(rootdir):
for file in fnmatch.filter(files, '*.jpg'):
(i, c) =read_zyles_image(os.path.join(subdir, file))
i = cv2.subtract(img0,i)
X.append(i)
y.append(int(c))
# In[5]:
X =np.array(X)/255.0
y =np.array(y)
y=categorical(y)[:,1:4]
# In[6]:
from sklearn.utils import shuffle
X, y = shuffle(X, y, random_state=0)
# In[7]:
model = Sequential()
model.add(Convolution2D(32, 8, 8, border_mode='same', activation='relu', init='uniform', batch_input_shape=(None, 90, 120, 3)))
model.add(Dropout(0.2))
model.add(Convolution2D(16, 4, 4, border_mode='same', activation='relu', init='uniform'))
model.add(Dropout(0.2))
model.add(Convolution2D(8, 2, 2, border_mode='same', activation='relu', init='uniform'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(300, init='uniform', activation='relu'))
model.add(Dropout(0.1))
model.add(Dense(30, init='uniform', activation='relu'))
model.add(Dense(3, init='uniform', activation='softmax'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(X, y, nb_epoch=10, batch_size=50, validation_split=0.2, shuffle=True)
# In[8]:
# model = Sequential()
# model.add(Flatten(batch_input_shape=(None, 90, 120, 3)))
# model.add(Dense(3000, init='uniform', activation='relu'))
# model.add(Dropout(0.2))
# model.add(Dense(300, init='uniform', activation='relu'))
# model.add(Dropout(0.1))
# model.add(Dense(30, init='uniform', activation='relu'))
# model.add(Dense(3, init='uniform', activation='softmax'))
# model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# model.fit(X, y, nb_epoch=15, batch_size=50, validation_split=0.2, shuffle=True)
# In[9]:
model_json = model.to_json()
with open("model.json", "w") as json_file:
json_file.write(model_json)
# serialize weights to HDF5
model.save_weights("model.h5")
print("Saved model to disk")
# In[11]:
from keras.models import model_from_json
json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights("model.h5")
print("Loaded model from disk")
# evaluate loaded model on test data
loaded_model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# In[12]:
def predict_zyle(file):
(t, c) =read_zyles_image(file)
t = cv2.subtract(img0,t)
ta = []
ta.append(t)
values = loaded_model.predict(np.array(ta)/255.0)
return np.argmax(values[0])
# In[13]:
for subdir, dirs, files in os.walk('/Users/pku/temp'):
for file in fnmatch.filter(files, '20161206*.jpg'):
print "%s: %d"%(file, predict_zyle(os.path.join(subdir, file)))
# In[ ]:
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment