Skip to content

Instantly share code, notes, and snippets.

@wottpal
Created August 31, 2018 06:14
Show Gist options
  • Save wottpal/deb90512d5ceab0b9d8b4778d5125faf to your computer and use it in GitHub Desktop.
Save wottpal/deb90512d5ceab0b9d8b4778d5125faf to your computer and use it in GitHub Desktop.
def VGG_16_BN(input_shape):
model = models.Sequential()
model.add(Convolution2D(64, (3,3), input_shape=input_shape, activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Convolution2D(64, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Convolution2D(128, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Convolution2D(128, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Convolution2D(256, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Convolution2D(256, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Convolution2D(256, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Convolution2D(512, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Convolution2D(512, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Convolution2D(512, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Convolution2D(512, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Convolution2D(512, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Convolution2D(512, (3,3), activation='relu', padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Flatten())
model.add(Dense(4096, activation='relu', use_bias=False))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(4096, activation='relu', use_bias=False))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
return model
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment