Skip to content

Instantly share code, notes, and snippets.

@TimSC
Created April 17, 2020 03:56
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save TimSC/2dfd03af307f71fefcea5980dfbcfdbb to your computer and use it in GitHub Desktop.
Save TimSC/2dfd03af307f71fefcea5980dfbcfdbb to your computer and use it in GitHub Desktop.
Classification of coil100 images using keras
import os
#Work around for https://github.com/tensorflow/tensorflow/issues/24496
os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true'
# Work around for https://github.com/tensorflow/tensorflow/issues/33024
import tensorflow.compat as compat
compat.v1.disable_eager_execution()
import imageio
from sklearn.model_selection import KFold, train_test_split
from skimage.color import rgb2gray
from matplotlib import pyplot as plt
import tensorflow.keras as keras
import numpy as np
def GetCoil100FileList(pth):
objects = []
for i in range(1, 101):
fileList = []
for ang in range(0, 360, 5):
fina = os.path.join(pth, "obj{}__{}.png".format(i, ang))
if os.path.exists(fina):
fileList.append(fina)
else:
raise RuntimeError("Missing file in coil100: {}".format(fina))
objects.append(fileList)
return objects
def LoadCoil100(pth):
objects = GetCoil100FileList(pth)
dataX = []
dataY = []
for label, obj in enumerate(objects):
for fina in obj:
img = imageio.imread(fina)
dataX.append(img)
dataY.append(label)
return dataX, dataY
def ScaleImgData(dataX):
dataXout = []
for img, y in zip(dataX, dataY):
#img2 = rgb2gray(img)
img2 = np.array(img, dtype=np.float32) / 255.0
dataXout.append(img2)
dataXout = np.array(dataXout)
return dataXout.reshape(dataXout.shape[0], dataXout.shape[1], dataXout.shape[2], 3)
def GetBasicModel():
# https://machinelearningmastery.com/how-to-develop-a-convolutional-neural-network-from-scratch-for-mnist-handwritten-digit-classification/
model = keras.models.Sequential()
model.add(keras.layers.Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_uniform', input_shape=(128, 128, 3)))
model.add(keras.layers.MaxPooling2D((2, 2)))
model.add(keras.layers.Flatten())
model.add(keras.layers.Dense(200, activation='relu', kernel_initializer='he_uniform'))
model.add(keras.layers.Dense(100, activation='softmax'))
# compile model
opt = keras.optimizers.SGD(lr=0.01, momentum=0.9)
model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])
return model
# plot diagnostic learning curves
def summarize_diagnostics(histories):
for i in range(len(histories)):
# plot loss
plt.subplot(2, 1, 1)
plt.title('Cross Entropy Loss')
plt.plot(histories[i].history['loss'], color='blue', label='train')
plt.plot(histories[i].history['val_loss'], color='orange', label='test')
# plot accuracy
plt.subplot(2, 1, 2)
plt.title('Classification Accuracy')
plt.plot(histories[i].history['accuracy'], color='blue', label='train')
plt.plot(histories[i].history['val_accuracy'], color='orange', label='test')
plt.show()
def RunTests(dataX, dataY):
X_trainVal, X_test, y_trainVal, y_test = train_test_split(dataX, dataY, test_size=0.25)
X_train, X_val, y_train, y_val = train_test_split(X_trainVal, y_trainVal, test_size=0.25)
model = GetBasicModel()
history = model.fit(X_train, y_train, epochs=10, batch_size=32, validation_data=(X_val, y_val), verbose=0)
_, acc = model.evaluate(X_test, y_test, verbose=0)
print ("Performance", acc)
summarize_diagnostics([history])
if __name__=="__main__":
dataX, dataY = LoadCoil100("/home/tim/Downloads/coil-100")
dataX = ScaleImgData(dataX)
dataY = keras.utils.to_categorical(dataY)
RunTests(dataX, dataY)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment