Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
from keras import backend as K
from keras.applications.vgg16 import VGG16
from keras.layers import GlobalAveragePooling2D, Flatten, Dense, Input, Dropout
from keras.models import Model, load_model
from keras.optimizers import Adam, RMSprop, SGD
from keras.preprocessing.image import ImageDataGenerator
path = 'data/dogscats_redux/sample/'
#path = 'data/dogscats_redux/'
target_size=(224, 224)
batch_size=64
#generate the batches
def get_batches(directory, target_size=target_size, batch_size=batch_size, shuffle=False):
datagen = ImageDataGenerator()
return datagen.flow_from_directory(directory=directory,
target_size=target_size,
batch_size=batch_size,
class_mode='categorical',
shuffle=shuffle)
batches = get_batches(path+'train', shuffle=True)
valid_batches = get_batches(path+'valid', batch_size=batch_size*2, shuffle=False)
# initialize the model
initial_model = VGG16(weights="imagenet", include_top=True)
#finetuning
x = Dense(batches.nb_class, activation='softmax')(initial_model.layers[-2].output)
model = Model(initial_model.input, x)
# we freeze the other layers
for layer in initial_model.layers: layer.trainable=False
opt = Adam(lr=0.001)
model.compile(optimizer=opt,
loss='categorical_crossentropy',
metrics=['accuracy'])
model.fit_generator(batches, samples_per_epoch=batches.nb_sample, nb_epoch=3,
validation_data=valid_batches, nb_val_samples=valid_batches.nb_sample)
for layer in model.layers[:10]:
layer.trainable = False
for layer in model.layers[10:]:
layer.trainable = True
opt = SGD(lr=10e-5)
model.compile(optimizer=opt,
loss='categorical_crossentropy',
metrics=['accuracy'])
model.fit_generator(batches, samples_per_epoch=batches.nb_sample, nb_epoch=20,
validation_data=valid_batches, nb_val_samples=valid_batches.nb_sample)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.