Skip to content

Instantly share code, notes, and snippets.

@mehdidc
Created July 12, 2016 16:50
Show Gist options
  • Save mehdidc/228c3472a803e5f1d22797e79b6c4f4e to your computer and use it in GitHub Desktop.
Save mehdidc/228c3472a803e5f1d22797e79b6c4f4e to your computer and use it in GitHub Desktop.
from keras.layers import Dense, Input, Dropout
from keras.models import Sequential
from keras.optimizers import Adadelta
from sklearn.datasets import make_blobs
from keras.utils.np_utils import to_categorical
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras.regularizers import l2, l1
import matplotlib.pyplot as plt
X, y = make_blobs(n_samples=4000, n_features=10)
# convert output classes (integers) to one hot vectors.
# this is necessary for keras. if you have 3 classes,
# for instance, keras expects y to be a matrix
# of nb_examples rows and 3 columns.
# for each row, the column i (starting from zero)
# is 1 if the class is i, otherwise it is 0
# for instance, the class 0 would be converted to
# [1 0 0], the class 1 to [0 1 0] and the class 2
# to [0 0 1]
y = to_categorical(y)
# setup training, validation and test set
X_train, y_train = X[0:1000], y[0:1000]
X_valid, y_valid = X[1000:2000], y[1000:2000]
X_test, y_test = X[2000:], y[2000:]
# construct the architecture of the neural net
model = Sequential()
model.add(Dense(500, activation='relu', input_dim=10)) # 500 units in first layer, 10 is nb of input variables
model.add(Dense(500, activation='relu')) # 500 units in second layer
#model.add(Dropout(0.5)) #uncomment to apply dropout after the second layer
model.add(Dense(100, activation='relu')) # 100 units in third layer
#model.add(Dropout(0.5)) #uncomment to apply dropout after the third layer
model.add(Dense(3, activation='softmax')) # 3 units in output layer (nb of classes)
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer=Adadelta(lr=0.1), # http://keras.io/optimizers/
metrics=['accuracy'])
# train the model
batch_size = 128
nb_epoch = 20
model_filename = 'model.pkl' # save the model in this file
callbacks = [
EarlyStopping(monitor='val_acc',
patience=10,
verbose=1,
mode='auto'),
# this is used to save the model in a filename
# it saves only the model at the epoch which gives
# the best validation accuracy (because we use 'val_acc')
ModelCheckpoint(model_filename, monitor='val_acc',
verbose=1,
save_best_only=True,
mode='auto'),
]
history = model.fit(X_train, y_train,
batch_size=batch_size,
nb_epoch=nb_epoch,
callbacks=callbacks,
verbose=1, validation_data=(X_valid, y_valid))
model.load_weights(model_filename) # load the model in the epoch which gave the best validation accuracy
score = model.evaluate(X_test, y_test, verbose=0) # evaluate on test data
print('Test score:', score[0])
print('Test accuracy:', score[1])
# plot learning curves
plt.plot(history.history['acc'], label='train accuracy')
plt.plot(history.history['val_acc'], label='valid accuracy')
plt.legend()
plt.show()
plt.plot(history.history['loss'], label='train loss')
plt.plot(history.history['val_loss'], label='valid loss')
plt.legend()
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment