Skip to content

Instantly share code, notes, and snippets.

@L-Lewis
Last active October 2, 2019 12:13
Show Gist options
  • Save L-Lewis/b1d501f7017c63cd994c3e3c18494a40 to your computer and use it in GitHub Desktop.
Save L-Lewis/b1d501f7017c63cd994c3e3c18494a40 to your computer and use it in GitHub Desktop.
from keras.layers import Flatten, Input, concatenate
from keras.layers.convolutional import Conv2D, MaxPooling2D
from keras.layers.core import Activation, Dropout, Dense
from keras.layers.normalization import BatchNormalization
from keras.models import Model
def create_cnn(width, height, depth, filters=(16, 32, 64), regularizer=None):
"""
Creates a CNN with the given input dimension and filter numbers.
"""
# Initialize the input shape and channel dimension, where the number of channels is the last dimension
inputShape = (height, width, depth)
chanDim = -1
# Define the model input
inputs = Input(shape=inputShape)
# Loop over the number of filters
for (i, f) in enumerate(filters):
# If this is the first CONV layer then set the input appropriately
if i == 0:
x = inputs
# Create loops of CONV => RELU => BN => POOL layers
x = Conv2D(f, (3, 3), padding="same")(x)
x = Activation("relu")(x)
x = BatchNormalization(axis=chanDim)(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
# Final layers - flatten the volume, then Fully-Connected => RELU => BN => DROPOUT
x = Flatten()(x)
x = Dense(16, kernel_regularizer=regularizer)(x)
x = Activation("relu")(x)
x = BatchNormalization(axis=chanDim)(x)
x = Dropout(0.5)(x)
# Apply another fully-connected layer, this one to match the number of nodes coming out of the MLP
x = Dense(4, kernel_regularizer=regularizer)(x)
x = Activation("relu")(x)
# Construct the CNN
model = Model(inputs, x)
# Return the CNN
return model
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment