Skip to content

Instantly share code, notes, and snippets.

@mlaib
Last active June 12, 2019 10:06
Show Gist options
  • Save mlaib/f0a3a2b7b3563013d1d929b53de8a720 to your computer and use it in GitHub Desktop.
Save mlaib/f0a3a2b7b3563013d1d929b53de8a720 to your computer and use it in GitHub Desktop.
UnilNet_K.py
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 19 12:27:25 2017
@author: mlaib
"""
import numpy as np
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from keras.layers import Conv2D, Dense, Input, GlobalMaxPooling2D
from keras.layers.normalization import BatchNormalization
from keras.models import Model
from keras.optimizers import Adamax
from keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau
import scipy.io as sc
mat4 = sc.loadmat('D:/Data/sat-4-full.mat')
mat4.keys()
train_X = np.moveaxis(mat4['train_x'], 3, 0)
train_Y = np.swapaxes(mat4['train_y'], 1, 0)
test_X = np.moveaxis(mat4['test_x'], 3, 0)
test_Y = np.swapaxes(mat4['test_y'], 1, 0)
print("Data loaded")
X1_train, X1_valid, yy_train, yy_valid = train_test_split(train_X, train_Y, random_state=1, train_size=0.8)
# plot some images
def show_it(ID, Data=train_X ,CL=train_Y):
r = Data[ID,:,:,0]
g = Data[ID,:,:,1]
b = Data[ID,:,:,2]
rgbArray = np.zeros((28,28,3), 'uint8')
Rband = r.astype(np.float)
Gband = g.astype(np.float)
Bband = b.astype(np.float)
rgbArray[..., 0] = Rband
rgbArray[..., 1] = Gband
rgbArray[..., 2] = Bband
plt.imshow(rgbArray)
plt.title('Class' + str(CL[ID,:]))
ax = plt.gca(); ax.ticklabel_format(useOffset=False, style='plain')
plt.setp(ax.get_xticklabels(),rotation=90)
show_it(ID=10130, Data=train_X ,CL=test_Y)
show_it(ID=11, Data=test_X ,CL=train_Y)
def get_model():
bn_model = 0
p_activation = "elu"
input_1 = Input(shape=(28, 28, 4), name="X_1")
img_1 = Conv2D(32, kernel_size = (3,3), activation=p_activation) ((BatchNormalization(momentum=bn_model))(input_1))
#img_1 = Conv2D(64, kernel_size = (3,3), activation=p_activation) (img_1)
img_1 = GlobalMaxPooling2D() (img_1)
dense_ayer = Dense(40, activation=p_activation)(img_1)
output = Dense(4, activation="softmax")(dense_ayer)
model = Model(input_1, output)
optimizer = Adamax(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)
model.compile(loss="categorical_crossentropy", optimizer=optimizer, metrics=["accuracy"])
return model
model = get_model()
earlyStopping = EarlyStopping(monitor='val_loss', patience=20, verbose=0, mode='min')
mcp_save = ModelCheckpoint('wsat4.hdf5', save_best_only=True, monitor='val_loss', mode='min')
reduce_lr_loss = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=10, verbose=1, epsilon=1e-4, mode='min')
model.summary()
history = model.fit(X1_train, yy_train, epochs=30
, validation_data=(X1_valid, yy_valid)
, batch_size=900
, callbacks=[earlyStopping, mcp_save, reduce_lr_loss])
print(history.history.keys())
fig = plt.figure()
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
fig = plt.figure()
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='lower left')
model.load_weights(filepath = 'wsat4.hdf5')
print("Test:")
print(model.evaluate(test_X, test_Y, verbose=1, batch_size=400))
print("The end")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment