Last active
January 9, 2018 13:06
-
-
Save manashmandal/7fafbcff8e0c56c480efe5e1d4890c93 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def build_deep_conv_autoencoder(img_shape=(44, 44, 3), code_size): | |
H,W,C = img_shape | |
# encoder | |
encoder = keras.models.Sequential() | |
encoder.add(L.InputLayer(img_shape)) | |
encoder.add(L.Conv2D(32, kernel_size=(3, 3),strides=1, padding='same', activation='elu')) | |
encoder.add(L.MaxPool2D(pool_size=(2, 2))) | |
encoder.add(L.Conv2D(64, kernel_size=(3, 3),strides=1, padding='same', activation='elu')) | |
encoder.add(L.MaxPool2D(pool_size=(2, 2))) | |
encoder.add(L.Conv2D(128, kernel_size=(3, 3),strides=1, padding='same', activation='elu')) | |
encoder.add(L.MaxPool2D(pool_size=(2, 2))) | |
encoder.add(L.Conv2D(256, kernel_size=(3, 3),strides=1, padding='same', activation='elu')) | |
encoder.add(L.MaxPool2D(pool_size=(2, 2))) | |
encoder.add(L.Flatten()) | |
encoder.add(L.Dense(code_size)) | |
# decoder | |
decoder = keras.models.Sequential() | |
decoder.add(L.InputLayer((code_size,))) | |
decoder.add(L.Dense(1024)) | |
decoder.add(L.Reshape((2, 2, 256))) | |
decoder.add(L.Conv2DTranspose(filters=128, kernel_size=(3, 3), strides=2, activation='elu', padding='valid')) | |
decoder.add(L.Conv2DTranspose(filters=64, kernel_size=(3, 3), strides=2, activation='elu', padding='valid')) | |
decoder.add(L.Conv2DTranspose(filters=32, kernel_size=(3, 3), strides=2, activation='elu', padding='same')) | |
decoder.add(L.Conv2DTranspose(filters=3, kernel_size=(3, 3), strides=2, activation=None, padding='same')) | |
return encoder, decoder | |
# I should've automatically calculated the shapes but I just prepared the code to handle 44, 44, 3 shaped image only | |
IMG_DIM = 44 | |
# Let's load the dataset again | |
# Loading and normalizing | |
X, attr = load_lfw_dataset(use_raw=True,dimx=IMG_DIM,dimy=IMG_DIM) | |
X = X.astype('float32') / 255.0 | |
img_shape = X.shape[1:] | |
X_train, X_test = train_test_split(X, test_size=0.1, random_state=42) | |
encoder,decoder = build_deep_conv_autoencoder((IMG_DIM, IMG_DIM, 3),code_size=32) | |
inp = L.Input(img_shape) | |
code = encoder(inp) | |
reconstruction = decoder(code) | |
autoencoder.fit(x=X_train,y=X_train,epochs=120, | |
validation_data=[X_test,X_test]) | |
autoencoder = keras.models.Model(inp,reconstruction) | |
autoencoder.compile('adamax','mse') | |
inp = L.Input(img_shape) | |
code = encoder(inp) | |
reconstruction = decoder(code) | |
autoencoder = keras.models.Model(inp,reconstruction) | |
autoencoder.compile('adamax','mse') | |
autoencoder.fit(x=X_train,y=X_train,epochs=120, | |
validation_data=[X_test,X_test]) | |
""" | |
Train on 11828 samples, validate on 1315 samples | |
Epoch 114/120 | |
11828/11828 [==============================] - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - 5s 429us/step - loss: 0.0043 - val_loss: 0.0056 | |
Epoch 115/120 | |
11828/11828 [==============================] - ETA: 5s - loss: 0.004 - ETA: 5s - loss: 0.004 - ETA: 5s - loss: 0.004 - ETA: 5s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - 5s 451us/step - loss: 0.0043 - val_loss: 0.0056 | |
Epoch 116/120 | |
11828/11828 [==============================] - ETA: 4s - loss: 0.003 - ETA: 5s - loss: 0.004 - ETA: 5s - loss: 0.004 - ETA: 5s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - 5s 442us/step - loss: 0.0043 - val_loss: 0.0056 | |
Epoch 117/120 | |
11828/11828 [==============================] - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - 5s 422us/step - loss: 0.0042 - val_loss: 0.0056 | |
Epoch 118/120 | |
11828/11828 [==============================] - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - 5s 417us/step - loss: 0.0042 - val_loss: 0.0056 | |
Epoch 119/120 | |
11828/11828 [==============================] - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - 5s 411us/step - loss: 0.0042 - val_loss: 0.0056 | |
Epoch 120/120 | |
11828/11828 [==============================] - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 4s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 3s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 2s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 1s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - ETA: 0s - loss: 0.004 - 5s 413us/step - loss: 0.0042 - val_loss: 0.0056 | |
""" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment