Skip to content

Instantly share code, notes, and snippets.

@duongntbk
Last active January 4, 2022 14:53
Show Gist options
  • Save duongntbk/e35e1bd6bab5b0c1079236f227f17913 to your computer and use it in GitHub Desktop.
Save duongntbk/e35e1bd6bab5b0c1079236f227f17913 to your computer and use it in GitHub Desktop.
keras_tuner_demo
# -*- coding: utf-8 -*-
'''
This is the demo code for my article about KerasTuner at the link below.
https://duongnt.com/kerastuner
Please install these packages before running this script.
- pip install tensorflow
- pip install keras
- pip install keras-tuner
'''
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras.preprocessing import image_dataset_from_directory
import kerastuner as kt
def load_data():
train_dataset = image_dataset_from_directory(
directory='dataset/train',
image_size=(150,150)
).map(lambda data, label: (data / 255., label))
val_dataset = image_dataset_from_directory(
directory='dataset/valid',
image_size=(150,150)
).map(lambda data, label: (data / 255., label))
test_dataset = image_dataset_from_directory(
directory='dataset/test',
image_size=(150,150)
).map(lambda data, label: (data / 255., label))
return train_dataset, val_dataset, test_dataset
def build_model_baseline():
inputs = keras.Input(shape=(150,150,3), name="input")
features = layers.SeparableConvolution2D(32, 3, activation='relu')(inputs)
features = layers.MaxPooling2D((2, 2))(features)
features = layers.SeparableConvolution2D(64, 3, activation='relu')(features)
features = layers.MaxPooling2D((2, 2))(features)
features = layers.SeparableConvolution2D(128, 3, activation='relu')(features)
features = layers.MaxPooling2D((2, 2))(features)
flatten = layers.Flatten()(features)
dense = layers.Dropout(0.5)(flatten)
dense = layers.Dense(128, activation='relu')(dense)
dense = layers.Dense(32, activation='relu')(dense)
outputs = layers.Dense(1, activation='sigmoid', name='output')(dense)
model = keras.Model(inputs=inputs, outputs = outputs)
model.compile(loss='binary_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
return model
def train_baseline():
train_dataset, val_dataset, test_data = load_data()
model = build_model_baseline()
model.summary()
callbacks = [
keras.callbacks.ModelCheckpoint(
filepath='gender_prediction_baseline.keras',
save_best_only='True',
monitor='val_accuracy'
)
]
model.fit(train_dataset, epochs=50, validation_data=val_dataset, callbacks=callbacks)
print(model.evaluate(test_dataset))
class GenderClassificationHyperModel(kt.HyperModel):
def build(self, hp):
inputs = keras.Input(shape=(150,150,3), name="input")
first_depth = hp.Int(name='first_depth', min_value=16, max_value=32, step=16)
features = layers.SeparableConvolution2D(first_depth, 3, activation='relu')(inputs)
features = layers.MaxPooling2D((2, 2))(features)
second_depth = hp.Int(name='second_depth', min_value=32, max_value=64, step=32)
features = layers.SeparableConvolution2D(second_depth, 3, activation='relu')(features)
features = layers.MaxPooling2D((2, 2))(features)
third_depth = hp.Int(name='third_depth', min_value=64, max_value=128, step=64)
features = layers.SeparableConvolution2D(third_depth, 3, activation='relu')(features)
features = layers.MaxPooling2D((2, 2))(features)
flatten = layers.Flatten()(features)
dropout_rate = hp.Float(name='dropout_rate', min_value=.3, max_value=.7, step=.1)
dense = layers.Dropout(dropout_rate)(flatten)
first_dense = hp.Int(name='first_dense', min_value=64, max_value=128, step=64)
dense = layers.Dense(first_dense, activation='relu')(dense)
second_dense = hp.Int(name='second_dense', min_value=16, max_value=32, step=16)
dense = layers.Dense(second_dense, activation='relu')(dense)
outputs = layers.Dense(1, activation='sigmoid', name='output')(dense)
model = keras.Model(inputs=inputs, outputs = outputs)
model.compile(loss='binary_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
return model
def start_hyper_tunning():
train_dataset, val_dataset, test_dataset = load_data()
hp = GenderClassificationHyperModel()
tuner = kt.BayesianOptimization(hp, objective='val_accuracy',
max_trials=160, executions_per_trial=1, directory='hypermodel_phase_dummy', overwrite=True
)
tuner.search_space_summary()
tuner.search(train_dataset, batch_size=32, epochs=50,
validation_data=val_dataset, verbose=2,
callbacks=[
keras.callbacks.ModelCheckpoint(
filepath='gender_prediction_best.keras',
save_best_only='True',
monitor='val_accuracy'
)
])
model = keras.models.load_model('gender_prediction_best.keras')
print(model.evaluate(test_dataset))
class GenderClassificationHyperModelV2(kt.HyperModel):
def build(self, hp):
inputs = keras.Input(shape=(150,150,3), name="input")
features = layers.SeparableConvolution2D(32, 3, activation='relu')(inputs)
features = layers.MaxPooling2D((2, 2))(features)
if hp.Boolean('three_conv_layer'):
features = layers.SeparableConvolution2D(64, 3, activation='relu')(features)
features = layers.MaxPooling2D((2, 2))(features)
features = layers.SeparableConvolution2D(64, 3, activation='relu')(features)
features = layers.MaxPooling2D((2, 2))(features)
else:
features = layers.SeparableConvolution2D(128, 3, activation='relu')(features)
features = layers.MaxPooling2D((2, 2))(features)
flatten = layers.Flatten()(features)
dropout_rate = hp.Float(name='dropout_rate', min_value=.5, max_value=.7, step=.1)
dense = layers.Dropout(dropout_rate)(flatten)
dense = layers.Dense(128, activation='relu')(dense)
if hp.Boolean('two_dense_layer'):
dense = layers.Dense(32, activation='relu')(dense)
outputs = layers.Dense(1, activation='sigmoid', name='output')(dense)
model = keras.Model(inputs=inputs, outputs = outputs)
model.compile(loss='binary_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
return model
def start_hyper_tunning_v2():
train_dataset, val_dataset, test_dataset = load_data()
hp = GenderClassificationHyperModelV2()
tuner = kt.BayesianOptimization(hp, objective='val_accuracy',
max_trials=48, executions_per_trial=2, directory='hypermodel_phase_2', overwrite=True
)
tuner.search_space_summary()
callbacks=[
keras.callbacks.ModelCheckpoint(
filepath='gender_prediction_best_v2.keras',
save_best_only='True',
monitor='val_accuracy'
),
keras.callbacks.EarlyStopping(
monitor='val_accuracy', patience=10
)
]
tuner.search(train_dataset, batch_size=32, epochs=50,
validation_data=val_dataset, verbose=2,
callbacks=callbacks)
model = keras.models.load_model('gender_prediction_best_v2.keras')
print(model.evaluate(test_dataset))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment