Skip to content

Instantly share code, notes, and snippets.

@obeshor
Created February 19, 2021 22:35
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save obeshor/31bdabc258d27972640a0c4060c80198 to your computer and use it in GitHub Desktop.
Save obeshor/31bdabc258d27972640a0c4060c80198 to your computer and use it in GitHub Desktop.
def model_builder(hp):
model = keras.Sequential()
model.add(keras.layers.Conv2D(64, (3,3), activation='relu', input_shape=(150, 150, 3)))
model.add(keras.layers.MaxPooling2D(2, 2))
# Tune the number of filters for the second Conv2D
# Choose an optimal value from 64-128
hp_filters = hp.Choice('num_filters', values=[64, 128], default=64,)
model.add(keras.layers.Conv2D(filters=hp_filters, kernel_size=3, activation='relu'))
model.add(keras.layers.MaxPooling2D(2, 2))
model.add(keras.layers.Flatten())
# Tune the Dropout
# Choose an optimal value between 0.1-0.5
hp_dropout = hp.Float("dropout_1", min_value=0.1, max_value=0.5, default=0.25, step=0.05)
model.add(keras.layers.Dropout(rate=hp_dropout))
# Tune the number of units in the Dense layer
# Choose an optimal value between 32-512
hp_units = hp.Int('units', min_value = 32, max_value = 512, step = 32,)
# Tune the activation function for Dense layer
# Choose an optimal value from relu, tanh, sigmoid
hp_activation_dense = hp.Choice( "dense_activation", values=["relu", "tanh", "sigmoid"], default="relu" )
model.add(keras.layers.Dense(units = hp_units, activation = hp_activation_dense))
model.add(keras.layers.Dense(3, activation='softmax'))
# Tune the learning rate for the optimizer
# Choose an optimal value from 0.01, 0.001, or 0.0001
hp_learning_rate = hp.Choice('learning_rate', values = [1e-2, 1e-3, 1e-4])
model.compile(optimizer = keras.optimizers.Adam(learning_rate = hp_learning_rate),
loss = keras.losses.CategoricalCrossentropy(from_logits = True),
metrics = ['accuracy'])
return model
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment