Skip to content

Instantly share code, notes, and snippets.

@PallawiSinghal
Last active March 12, 2019 13:45
Show Gist options
  • Save PallawiSinghal/e48c6b6f5d3379831e1adc6460f27cc8 to your computer and use it in GitHub Desktop.
Save PallawiSinghal/e48c6b6f5d3379831e1adc6460f27cc8 to your computer and use it in GitHub Desktop.
# Import necessary components to build LeNet
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Conv2D, MaxPooling2D, ZeroPadding2D
from keras.layers.normalization import BatchNormalization
from keras.regularizers import l2
# Initialize model
alexnet = Sequential()
# Layer 1
alexnet.add(Conv2D(96, (11, 11), input_shape=(150,150,3)))
alexnet.add(Activation('relu'))
alexnet.add(MaxPooling2D(pool_size=(2, 2)))
# Layer 2
alexnet.add(Conv2D(256, (5, 5), padding='same'))
alexnet.add(Activation('relu'))
alexnet.add(MaxPooling2D(pool_size=(2, 2)))
# Layer 3
alexnet.add(Conv2D(512, (3, 3), padding='same'))
alexnet.add(Activation('relu'))
# Layer 4
alexnet.add(Conv2D(1024, (3, 3), padding='same'))
alexnet.add(Activation('relu'))
# Layer 5
alexnet.add(Conv2D(1024, (3, 3), padding='same'))
alexnet.add(Activation('relu'))
alexnet.add(MaxPooling2D(pool_size=(2, 2)))
# Layer 6
alexnet.add(Flatten())
alexnet.add(Dense(3072))
alexnet.add(Activation('relu'))
alexnet.add(Dropout(0.5))
# Layer 7
alexnet.add(Dense(4096))
alexnet.add(Activation('relu'))
alexnet.add(Dropout(0.5))
# Layer 8
alexnet.add(Dense(1000))
alexnet.add(Activation('softmax'))
alexnet.summary()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment