Created
September 17, 2017 17:00
-
-
Save agnesmm/84f2bf535d6e4b3615ebcc13bc2d5ef6 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from keras import backend as K | |
from keras.applications.vgg16 import VGG16 | |
from keras.layers import GlobalAveragePooling2D, Flatten, Dense, Input, Dropout | |
from keras.models import Model | |
from keras.optimizers import Adam | |
from keras.preprocessing.image import ImageDataGenerator | |
#setup | |
path = 'data/dogscats_redux/' | |
#path = 'data/dogscats_redux/sample/' | |
target_size = (224, 224) | |
batch_size = 64 | |
#define batches | |
def get_batches(directory, target_size=target_size, batch_size=batch_size): | |
datagen = ImageDataGenerator() | |
return datagen.flow_from_directory(directory=directory, | |
target_size=target_size, | |
batch_size=batch_size, | |
class_mode='categorical') | |
batches = get_batches(path + 'train') | |
valid_batches = get_batches(path + 'valid', batch_size=batch_size*2) | |
# fine-tuning | |
# include_top=False to remove the last layer | |
initial_model = VGG16(weights="imagenet", include_top=False, input_tensor = Input(shape=(3,224,224))) | |
x = Flatten()(initial_model.output) | |
# we add a Dense layer | |
x = Dense(batches.nb_class, activation='softmax')(x) | |
# new model with "frozen" layers (except the last one) | |
model = Model(initial_model.input, x) | |
for layer in initial_model.layers: layer.trainable=False | |
model.compile(optimizer=Adam(lr=0.001), | |
loss='categorical_crossentropy', | |
metrics=['accuracy']) | |
model.fit_generator(batches, samples_per_epoch=batches.nb_sample, nb_epoch=1, | |
validation_data=valid_batches, nb_val_samples=valid_batches.nb_sample) | |
# predictions on new data | |
test_batches = get_batches(path+'test', batch_size=batch_size*2) | |
preds = model.predict_generator(test_batches, test_batches.nb_sample) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment