Skip to content

Instantly share code, notes, and snippets.

View obeshor's full-sized avatar
😊
enthusiastic

Yannick Serge Obam obeshor

😊
enthusiastic
View GitHub Profile
%tensorboard --logdir logs/hparam_tuning
#Run a few experiments, which will take a few minutes
session_num = 0
for num_filters in HP_NUM_FILTERS.domain.values:
for dropout_rate in (HP_DROPOUT.domain.min_value, HP_DROPOUT.domain.max_value):
for activation in HP_ACTIVATION.domain.values:
for num_units in HP_NUM_UNITS.domain.values:
for optimizer in HP_OPTIMIZER.domain.values:
hparams = {
HP_NUM_FILTERS: num_filters,
def run(run_dir, hparams):
with tf.summary.create_file_writer(run_dir).as_default():
hp.hparams(hparams) # record the values used in this trial
accuracy = model_builder_HPARAMS(hparams)
tf.summary.scalar(METRIC_ACCURACY, accuracy, step=1)
def model_builder_HPARAMS(hparams):
model = keras.Sequential()
model.add(keras.layers.Conv2D(64, (3,3), activation='relu', input_shape=(150, 150, 3)))
model.add(keras.layers.MaxPooling2D(2, 2))
# Tune the number of filters for the second Conv2D
# Choose an optimal value from 64-128
model.add(keras.layers.Conv2D(hparams[HP_NUM_FILTERS], kernel_size=3, activation='relu'))
model.add(keras.layers.MaxPooling2D(2, 2))
def model_builder_tl(hp):
# Flatten the output layer to 1 dimension
x = layers.Flatten()(last_output)
# Add a fully connected layer with hidden units and ReLU activation
# Tune the number of units in the Dense layer
# Choose an optimal value between 32-512
hp_units = hp.Int('units', min_value = 32, max_value = 512, step = 32,)
# Tune the activation function for Dense layer
from tensorflow.keras import layers
from tensorflow.keras import Model
!wget --no-check-certificate \
https://storage.googleapis.com/mledu-datasets/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5 \
-O /tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5
from tensorflow.keras.applications.inception_v3 import InceptionV3
local_weights_file = '/tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5'
# Get the optimal hyperparameters
best_hps = tuner.get_best_hyperparameters(num_trials = 1)[0]
print(f"""
The hyperparameter search is complete.\n The optimal filter in second Convolutional layer is {best_hps.get('num_filters')}.\n The optimal number of units in the first densely-connected
layer is {best_hps.get('units')}.\n The optimal rate of dropout is {best_hps.get('dropout_1')} And the optimal learning rate for the optimizer is {best_hps.get('learning_rate')}.
""")
tuner.search(train_generator, epochs=10, steps_per_epoch=20, validation_data = validation_generator, verbose = 1, validation_steps=3, callbacks = [ClearTrainingOutput()])
tuner = kt.Hyperband(model_builder,
objective = 'val_accuracy',
max_epochs = 10,
factor = 3,
directory = 'my_dir',
project_name = 'hyper_tuning')
def model_builder(hp):
model = keras.Sequential()
model.add(keras.layers.Conv2D(64, (3,3), activation='relu', input_shape=(150, 150, 3)))
model.add(keras.layers.MaxPooling2D(2, 2))
# Tune the number of filters for the second Conv2D
# Choose an optimal value from 64-128
hp_filters = hp.Choice('num_filters', values=[64, 128], default=64,)
model.add(keras.layers.Conv2D(filters=hp_filters, kernel_size=3, activation='relu'))