Skip to content

Instantly share code, notes, and snippets.

@skeeet
Forked from guicho271828/fail.py
Created April 3, 2017 10:27
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save skeeet/ca6549e786f52fcfeaa2812a3e9a4a3e to your computer and use it in GitHub Desktop.
Save skeeet/ca6549e786f52fcfeaa2812a3e9a4a3e to your computer and use it in GitHub Desktop.
minimal failure cases, only on tensorflow backend
from keras.layers import Input, Dense
from keras.models import Model, Sequential
from keras.datasets import mnist
from keras.layers.normalization import BatchNormalization as BN
autoencoder1 = Sequential([
Dense(128, activation='relu',input_shape=(784,)),
BN(),
Dense(784, activation='relu'),
])
### fails on tensorflow, both CPU or GPU
### it works w/o problem with theano
x = Input(shape=(784,))
y = autoencoder1(x)
autoencoder2 = Model(input=x,output=y)
autoencoder = autoencoder2
autoencoder.compile(optimizer='rmsprop', loss='mse')
################################################################
# train the VAE on MNIST digits
(x_train, _), (x_test, y_test) = mnist.load_data()
x_train = x_train.astype('float32') / 255.
print(x_train.shape)
x_train = x_train.reshape((x_train.shape[0],784))
x_test = x_test.astype('float32') / 255.
x_test = x_test.reshape((x_test.shape[0],784))
print('x_train.shape:', x_train.shape)
from keras.callbacks import CSVLogger, ReduceLROnPlateau, EarlyStopping
autoencoder.fit(x_train, x_train,
shuffle=True,
nb_epoch=1,
batch_size=256,
validation_data=(x_test, x_test),
callbacks=[CSVLogger("vae-conv-deconv/loss.csv"),
# EarlyStopping(patience=6,verbose=1,mode='min'),
ReduceLROnPlateau(verbose=1,patience=20,factor=0.5,mode='min',epsilon=0.0001)
])
h5 = "ae.h5"
autoencoder.save(h5)
del autoencoder
import keras.models
autoencoder = keras.models.load_model(h5)
autoencoder.summary()
from keras.layers import Input, Dense
from keras.models import Model, Sequential
from keras.datasets import mnist
from keras.layers.normalization import BatchNormalization as BN
autoencoder1 = Sequential([
Dense(128, activation='relu',input_shape=(784,)),
BN(),
Dense(784, activation='relu'),
])
### with reuse_variables, construction is ok
import tensorflow as tf
x = Input(shape=(784,))
tf.get_variable_scope().reuse_variables()
y = autoencoder1(x)
autoencoder2 = Model(input=x,output=y)
autoencoder = autoencoder2
autoencoder.compile(optimizer='rmsprop', loss='mse')
################################################################
# train the VAE on MNIST digits
(x_train, _), (x_test, y_test) = mnist.load_data()
x_train = x_train.astype('float32') / 255.
print(x_train.shape)
x_train = x_train.reshape((x_train.shape[0],784))
x_test = x_test.astype('float32') / 255.
x_test = x_test.reshape((x_test.shape[0],784))
print('x_train.shape:', x_train.shape)
from keras.callbacks import CSVLogger, ReduceLROnPlateau, EarlyStopping
autoencoder.fit(x_train, x_train,
shuffle=True,
nb_epoch=1,
batch_size=256,
validation_data=(x_test, x_test),
callbacks=[CSVLogger("vae-conv-deconv/loss.csv"),
# EarlyStopping(patience=6,verbose=1,mode='min'),
ReduceLROnPlateau(verbose=1,patience=20,factor=0.5,mode='min',epsilon=0.0001)
])
h5 = "ae.h5"
autoencoder.save(h5)
del autoencoder
import keras.models
### fails to load, regardless of reuse_variables
# tf.get_variable_scope().reuse_variables()
autoencoder = keras.models.load_model(h5)
autoencoder.summary()
from keras.layers import Input, Dense
from keras.models import Model, Sequential
from keras.datasets import mnist
from keras.layers.normalization import BatchNormalization as BN
autoencoder1 = Sequential([
Dense(128, activation='relu',input_shape=(784,)),
BN(),
Dense(784, activation='relu'),
])
# this work
autoencoder = autoencoder1
autoencoder.compile(optimizer='rmsprop', loss='mse')
################################################################
# train the VAE on MNIST digits
(x_train, _), (x_test, y_test) = mnist.load_data()
x_train = x_train.astype('float32') / 255.
print(x_train.shape)
x_train = x_train.reshape((x_train.shape[0],784))
x_test = x_test.astype('float32') / 255.
x_test = x_test.reshape((x_test.shape[0],784))
print('x_train.shape:', x_train.shape)
from keras.callbacks import CSVLogger, ReduceLROnPlateau, EarlyStopping
autoencoder.fit(x_train, x_train,
shuffle=True,
nb_epoch=1,
batch_size=256,
validation_data=(x_test, x_test),
callbacks=[CSVLogger("vae-conv-deconv/loss.csv"),
# EarlyStopping(patience=6,verbose=1,mode='min'),
ReduceLROnPlateau(verbose=1,patience=20,factor=0.5,mode='min',epsilon=0.0001)
])
h5 = "ae.h5"
autoencoder.save(h5)
del autoencoder
import keras.models
autoencoder = keras.models.load_model(h5)
autoencoder.summary()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment