Skip to content

Instantly share code, notes, and snippets.

@alonlavian
Created April 3, 2019 07:58
Show Gist options
  • Save alonlavian/8f1a454099064ea1ca1dcf5dfb64d8ca to your computer and use it in GitHub Desktop.
Save alonlavian/8f1a454099064ea1ca1dcf5dfb64d8ca to your computer and use it in GitHub Desktop.
Using ResNet50 as a base model for transfer learning
from tensorflow.keras.applications.resnet50 import ResNet50, preprocess_input
from tensorflow.keras.layers import Dense, Activation, Flatten, Dropout
from tensorflow.keras.models import Sequential, Model
HEIGHT = 300
WIDTH = 300
base_model = ResNet50(weights='imagenet',
include_top=False,
input_shape=(HEIGHT, WIDTH, 3))
def build_finetune_model(base_model, dropout, fc_layers, num_classes):
for layer in base_model.layers:
layer.trainable = False
x = base_model.output
x = Flatten()(x)
for fc in fc_layers:
# New FC layer, random init
x = Dense(fc, activation='relu')(x)
x = Dropout(dropout)(x)
# For binary classification we'll use a sigmoid layer
predictions = Dense(1, activation='sigmoid')(x)
finetune_model = Model(inputs=base_model.input, outputs=predictions)
return finetune_model
class_list = ["Alma", "Zohar"]
FC_LAYERS = [1024, 1024]
dropout = 0.5
finetune_model = build_finetune_model(base_model,
dropout=dropout,
fc_layers=FC_LAYERS,
num_classes=len(class_list))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment