Skip to content

Instantly share code, notes, and snippets.

@pabloformoso
Created April 27, 2018 22:18
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save pabloformoso/575efdf1412decfc8bb1cc328cf9bf4b to your computer and use it in GitHub Desktop.
Save pabloformoso/575efdf1412decfc8bb1cc328cf9bf4b to your computer and use it in GitHub Desktop.
VGG16_Keras_Treansferlearning
rom keras import applications
from keras import optimizers
from keras.models import Sequential, Model
from keras.layers import Dropout, Flatten, Dense
from keras import backend as k
from pprint import pprint as pp
img_width, img_height = 300, 300
train_data_dir = "/train"
validation_data_dir = "/validation"
nb_train_samples = 200
nb_validation_samples = 40
batch_size = 20
epochs = 50
model = applications.VGG16(weights = "imagenet", include_top=False, input_shape = (img_width, img_height, 3))
# Freeze the layers which you don't want to train. Here I am freezing the first 10 layers.
for layer in model.layers:
layer.trainable = False
#Adding custom Layers
x = model.output
x = Flatten()(x)
x = Dense(1024, activation="relu")(x)
x = Dropout(0.5)(x)
x = Dense(1024, activation="relu")(x)
predictions = Dense(100, activation="softmax")(x)
# creating the final model
model_final = Model(input = model.input, output = predictions)
# compile the model
model_final.compile(loss = "categorical_crossentropy",
optimizer = optimizers.SGD(lr=0.0001, momentum=0.9),
metrics=["accuracy"])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment