Skip to content

Instantly share code, notes, and snippets.

@Chuck-Aguilar
Created February 27, 2018 13:50
Show Gist options
  • Save Chuck-Aguilar/0db4a72ffcea1b8aac484305ed311c09 to your computer and use it in GitHub Desktop.
Save Chuck-Aguilar/0db4a72ffcea1b8aac484305ed311c09 to your computer and use it in GitHub Desktop.
"""
Trains a simple deep NN on the MNIST dataset.
Gets to 98.40% test accuracy after 20 epochs
(there is *a lot* of margin for parameter tuning).
2 seconds per epoch on a K520 GPU.
"""
from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import RMSprop
import tensorflow as tf
GRAPH = tf.get_default_graph()
def run():
with GRAPH.as_default():
batch_size = 128
num_classes = 10
# epochs = 20
epochs = 4
# the data, split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
model = Sequential()
model.add(Dense(512, activation='relu', input_shape=(784,)))
model.add(Dropout(0.2))
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(num_classes, activation='softmax'))
model.summary()
print('before compiling')
model.compile(loss='categorical_crossentropy',
optimizer=RMSprop(),
metrics=['accuracy'])
print('after compiling')
history = model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test))
print('after training')
score = model.evaluate(x_test, y_test, verbose=0)
print('after evaluating')
print('Test loss:', score[0])
print('Test accuracy:', score[1])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment