Skip to content

Instantly share code, notes, and snippets.

@Dref360
Created October 3, 2017 22:55
Show Gist options
  • Save Dref360/82d42107e07d7a64b18ca079c91669c4 to your computer and use it in GitHub Desktop.
Save Dref360/82d42107e07d7a64b18ca079c91669c4 to your computer and use it in GitHub Desktop.
import numpy as np
from keras.layers import Input, Lambda
from keras.models import Model
from keras.utils import Sequence
class SSEquence(Sequence):
def __init__(self, value=1):
self.value = value
def __len__(self):
return 100
def __getitem__(self, item):
return [np.zeros((30, 20, 20, 3)), np.zeros((30, 20, 20, 3))], np.ones((30, 20, 20, 3)) * self.value
def on_epoch_end(self):
self.value *= 2
# Training loss would be 0 and validation loss would always go higher.
inp = Input([20, 20, 3])
inp2 = Input([20, 20, 3])
o = Lambda(lambda i: i[0] + i[1])([inp, inp2])
mod = Model([inp, inp2], o)
mod.compile('adam', 'mse')
mod.fit_generator(SSEquence(0), 100, epochs=10, use_multiprocessing=True, validation_data=SSEquence(2),
validation_steps=100)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment