Skip to content

Instantly share code, notes, and snippets.

@AFAgarap
Last active September 29, 2019 05:21
Show Gist options
  • Save AFAgarap/54d003125cd4eddf061204b483e7ab42 to your computer and use it in GitHub Desktop.
Save AFAgarap/54d003125cd4eddf061204b483e7ab42 to your computer and use it in GitHub Desktop.
TensorFlow 2.0 Subclassing API implementation of VGG-like CNN architecture. Link to blog: https://towardsdatascience.com/how-can-i-trust-you-fb433a06256c?source=friends_link&sk=0af208dc53be2a326d2407577184686b
class MiniVGG(tf.keras.Model):
def __init__(self, **kwargs):
super(MiniVGG, self).__init__()
self.conv1_layer_1 = tf.keras.layers.Conv2D(
filters=32,
kernel_size=(3, 3),
activation=tf.nn.relu,
input_shape=kwargs['input_shape']
)
self.conv1_layer_2 = tf.keras.layers.Conv2D(
filters=32,
kernel_size=(3, 3),
activation=tf.nn.relu
)
self.pool_layer_1 = tf.keras.layers.MaxPooling2D(
pool_size=(2, 2)
)
self.conv2_layer_1 = tf.keras.layers.Conv2D(
filters=64,
kernel_size=(3, 3),
activation=tf.nn.relu
)
self.conv2_layer_2 = tf.keras.layers.Conv2D(
filters=64,
kernel_sizze=(3, 3),
activation=tf.nn.relu
)
self.pool_layer_2 = tf.keras.layers.MaxPooling2D(
pool_size=(2, 2)
)
self.dropout_layer_1 = tf.keras.layers.Dropout(rate=0.25)
self.flatten = tf.keras.layers.Flatten()
self.fc_layer = tf.keras.layers.Dense(
units=256,
activation=tf.nn.relu
)
self.dropout_layer_2 = tf.keras.layers.Dropout(rate=0.50)
self.output_layer = tf.keras.layers.Dense(
units=kwargs['num_classes'],
activation=tf.nn.softmax
)
@tf.function
def call(self, features):
activation = self.conv1_layer_1(features)
activation = self.conv1_layer_2(activation)
activation = self.pool_layer_1(activation)
activation = self.conv2_layer_1(activation)
activation = self.conv2_layer_2(activation)
activation = self.pool_layer_2(activation)
activation = self.dropout_layer_1(activation)
activation = self.flatten(activation)
activation = self.fc_layer(activation)
activation = self.dropout_layer_2(activation)
output = self.output_layer(activation)
return output
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment