Skip to content

Instantly share code, notes, and snippets.

@SherazKhan
Last active April 25, 2022 15:48
Show Gist options
  • Save SherazKhan/9492edaf2d7e504f368f5678f272b7fc to your computer and use it in GitHub Desktop.
Save SherazKhan/9492edaf2d7e504f368f5678f272b7fc to your computer and use it in GitHub Desktop.
import numpy as np
import tensorflow.keras as keras
def build_model(input_shape):
"""Generates CNN model
:param input_shape (tuple): Shape of input set
:return model: CNN model
"""
# build network topology
model = keras.Sequential()
# 1st conv layer
model.add(keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=input_shape))
model.add(keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same'))
model.add(keras.layers.BatchNormalization())
# 2nd conv layer
model.add(keras.layers.Conv2D(32, (3, 3), activation='relu'))
model.add(keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same'))
model.add(keras.layers.BatchNormalization())
# 3rd conv layer
model.add(keras.layers.Conv2D(32, (2, 2), activation='relu'))
model.add(keras.layers.MaxPooling2D((2, 2), strides=(2, 2), padding='same'))
model.add(keras.layers.BatchNormalization())
# flatten output and feed it into dense layer
model.add(keras.layers.Flatten())
model.add(keras.layers.Dense(64, activation='relu'))
model.add(keras.layers.Dropout(0.3))
# output layer
model.add(keras.layers.Dense(2, activation='softmax'))
optimiser = keras.optimizers.Adam(learning_rate=0.0001)
model.compile(optimizer=optimiser,
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.summary()
return model
# input_shape = (X_train.shape[1], X_train.shape[2]) # 130, 13
# model = build_model(input_shape)
# history = model.fit(X_train, y_train, validation_data=(X_validation, y_validation), batch_size=32, epochs=30)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment