Skip to content

Instantly share code, notes, and snippets.

@cadrev
Created February 10, 2016 10:28
Show Gist options
  • Save cadrev/10d7c9ef3c2e98df1a6e to your computer and use it in GitHub Desktop.
Save cadrev/10d7c9ef3c2e98df1a6e to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This is an implementation of adenoising autoencoder as
# described on the following paper:
# http://www.jmlr.org/papers/volume11/vincent10a/vincent10a.pdf
#
import numpy as np
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import containers
from keras.layers.core import Dense, AutoEncoder
from keras.layers.noise import GaussianNoise
from keras.optimizers import RMSprop
from keras.utils import np_utils
np.random.seed(1337)
batch_size = 128
nb_classes = 10
nb_epoch = 20
nb_hidden_layers = [784, 600, 500, 400, ]
nb_noise = [0.3, 0.2, 0.1, ]
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(60000, 784)
X_test = X_test.reshape(10000, 784)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
print('Train samples: {}'.format(X_train.shape[0]))
print('Test samples: {}'.format(X_test.shape[0]))
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
encoders = []
X_train_tmp = np.copy(X_train)
rms = RMSprop()
for i, (n_in, n_out) in enumerate(
zip(nb_hidden_layers[:-1], nb_hidden_layers[1:]), start=1):
print('Training the layer {}: Input {} -> Output {}'.format(
i, n_in, n_out))
ae = Sequential()
encoder = containers.Sequential([
GaussianNoise(nb_noise[i - 1], input_shape=(n_in,)),
Dense(input_dim=n_in, output_dim=n_out, activation='sigmoid')
])
decoder = containers.Sequential([
Dense(input_dim=n_out, output_dim=n_in, activation='sigmoid')
])
ae.add(AutoEncoder(
encoder=encoder, decoder=decoder,
output_reconstruction=False,
))
ae.compile(loss='mean_squared_error', optimizer=rms)
ae.fit(X_train_tmp, X_train_tmp,
batch_size=batch_size, nb_epoch=nb_epoch)
encoders.append(ae.layers[0].encoder)
X_train_tmp = ae.predict(X_train_tmp)
model = Sequential()
for encoder in encoders:
model.add(encoder)
model.add(Dense(
input_dim=nb_hidden_layers[-1], output_dim=nb_classes,
activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer=rms)
score = model.evaluate(X_test, Y_test, show_accuracy=True, verbose=0)
print('Test score before fine turning: {}'.format(score[0]))
print('Test accuracy before fine turning: {}'.format(score[1]))
model.fit(
X_train, Y_train, batch_size=batch_size,
nb_epoch=nb_epoch, show_accuracy=True,
validation_data=(X_test, Y_test)
)
score = model.evaluate(X_test, Y_test, show_accuracy=True, verbose=0)
print('Test score after fine turning: {}'.format(score[0]))
print('Test accuracy after fine turning: {}'.format(score[1]))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment