Skip to content

Instantly share code, notes, and snippets.

@amankharwal
Created Nov 25, 2020
Embed
What would you like to do?
from keras.optimizers import Adam
model = tf.keras.models.Sequential([
# 1st conv
tf.keras.layers.Conv2D(96, (11,11),strides=(4,4), activation='relu', input_shape=(64, 64, 3)),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.MaxPooling2D(2, strides=(2,2)),
# 2nd conv
tf.keras.layers.Conv2D(256, (11,11),strides=(1,1), activation='relu',padding="same"),
tf.keras.layers.BatchNormalization(),
# 3rd conv
tf.keras.layers.Conv2D(384, (3,3),strides=(1,1), activation='relu',padding="same"),
tf.keras.layers.BatchNormalization(),
# 4th conv
tf.keras.layers.Conv2D(384, (3,3),strides=(1,1), activation='relu',padding="same"),
tf.keras.layers.BatchNormalization(),
# 5th Conv
tf.keras.layers.Conv2D(256, (3, 3), strides=(1, 1), activation='relu',padding="same"),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.MaxPooling2D(2, strides=(2, 2)),
# To Flatten layer
tf.keras.layers.Flatten(),
# To FC layer 1
tf.keras.layers.Dense(4096, activation='relu'),
tf.keras.layers.Dropout(0.5),
#To FC layer 2
tf.keras.layers.Dense(4096, activation='relu'),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(1, activation='sigmoid')
])
model.compile(
optimizer=Adam(lr=0.001),
loss='binary_crossentropy',
metrics=['accuracy']
)
hist = model.fit_generator(generator=train_generator,
validation_data=validation_generator,
steps_per_epoch=256,
validation_steps=256,
epochs=50)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment