Skip to content

Instantly share code, notes, and snippets.

@SherazKhan
Last active April 26, 2022 01:18
Show Gist options
  • Save SherazKhan/6e4c11e133f48ab7209171cc6f6e4189 to your computer and use it in GitHub Desktop.
Save SherazKhan/6e4c11e133f48ab7209171cc6f6e4189 to your computer and use it in GitHub Desktop.
import keras
from keras.layers import Conv2D, Dense, Dropout, Flatten, MaxPooling2D, GlobalAveragePooling2D, BatchNormalization
from keras.models import Sequential
from keras.optimizers import Adam
from keras import regularizers
def model_cnn_reg(input_shape):
model=Sequential()
model.add(Conv2D(filters=64,kernel_size=(3,3), input_shape=input_shape, activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Conv2D(filters=32,kernel_size=(2,2),activation='relu'))
model.add(BatchNormalization())
model.add(Flatten())
model.add(Dense(256,activation='relu',kernel_regularizer=regularizers.l1_l2(l1=3e-4, l2=3e-3),bias_regularizer=regularizers.l2(3e-3),activity_regularizer=regularizers.l2(3e-4)))
model.add(Dropout(0.5))
model.add(Dense(128,activation='relu',kernel_regularizer=regularizers.l1_l2(l1=1e-3, l2=1e-2),bias_regularizer=regularizers.l2(1e-2),activity_regularizer=regularizers.l2(1e-3)))
model.add(Dropout(0.3))
model.add(Dense(1,activation='sigmoid'))
optimiser = keras.optimizers.Adam(learning_rate=0.0001)
model.compile(optimizer=optimiser,
loss='binary_crossentropy',
metrics=['accuracy'])
return model
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment