Skip to content

Instantly share code, notes, and snippets.

@himanshurawlani
Last active September 5, 2020 20:10
Show Gist options
  • Save himanshurawlani/4388b595ee5ee49305473511abec6b68 to your computer and use it in GitHub Desktop.
Save himanshurawlani/4388b595ee5ee49305473511abec6b68 to your computer and use it in GitHub Desktop.
An example script to create a hyper model
def FCN_model(config, len_classes=5):
input = tf.keras.layers.Input(shape=(None, None, 3))
# Adding data augmentation layers
x = augment_images(input, config)
# You can create a fixed number of convolutional blocks or
# You can also use a loop if number of layers is also a hyperparameter
x = tf.keras.layers.Conv2D(filters=config['conv_block1_filters'], kernel_size=3, strides=1)(x)
x = tf.keras.layers.BatchNormalization()(x)
x = tf.keras.layers.Activation('relu')(x)
x = tf.keras.layers.Conv2D(filters=config['conv_block2_filters'], kernel_size=3, strides=1)(x)
x = tf.keras.layers.BatchNormalization()(x)
x = tf.keras.layers.Activation('relu')(x)
if config['fc_layer_type'] == 'dense':
if config['pool_type'] == 'max':
x = tf.keras.layers.GlobalMaxPooling2D()(x)
else:
x = tf.keras.layers.GlobalAveragePooling2D()(x)
# Fully connected layer 1
x = tf.keras.layers.Dense(units=config['fc1_units'])(x)
x = tf.keras.layers.Dropout(config['dropout_rate'])(x)
x = tf.keras.layers.BatchNormalization()(x)
x = tf.keras.layers.Activation('relu')(x)
# Fully connected layer 2
x = tf.keras.layers.Dense(units=len_classes)(x)
x = tf.keras.layers.Dropout(config['dropout_rate'])(x)
x = tf.keras.layers.BatchNormalization()(x)
predictions = tf.keras.layers.Activation('softmax')(x)
else:
# Some other FC layer configuration
.
.
.
model = tf.keras.Model(inputs=input, outputs=predictions)
return model
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment