Last active
April 25, 2018 11:24
-
-
Save johnolafenwa/74088f97dde614c6344eaa0741ca633d to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import keras | |
from keras.layers import * | |
from keras.models import Model | |
# A single resnet module consisting of 1 x 1 conv - 3 x 3 conv and 1 x 1 conv | |
def resnet_identity_module(x, filters, pool=False): | |
res = x | |
stride = 1 | |
if pool: | |
stride = 2 | |
res = Conv2D(filters, kernel_size=1, strides=2, padding="same")(res) | |
x = BatchNormalization()(x) | |
x = Activation("relu")(x) | |
x = Conv2D(int(filters / 4), kernel_size=1, strides=stride, padding="same")(x) | |
x = BatchNormalization()(x) | |
x = Activation("relu")(x) | |
x = Conv2D(int(filters / 4), kernel_size=3, strides=1, padding="same")(x) | |
x = BatchNormalization()(x) | |
x = Activation("relu")(x) | |
x = Conv2D(filters, kernel_size=1, strides=1, padding="same")(x) | |
x = add([x, res]) | |
return x | |
def resnet_first_identity_module(x, filters): | |
res = x | |
stride = 1 | |
res = Conv2D(filters, kernel_size=1, strides=1, padding="same")(res) | |
x = BatchNormalization()(x) | |
x = Activation("relu")(x) | |
x = Conv2D(int(filters / 4), kernel_size=1, strides=stride, padding="same")(x) | |
x = BatchNormalization()(x) | |
x = Activation("relu")(x) | |
x = Conv2D(int(filters / 4), kernel_size=3, strides=1, padding="same")(x) | |
x = BatchNormalization()(x) | |
x = Activation("relu")(x) | |
x = Conv2D(filters, kernel_size=1, strides=1, padding="same")(x) | |
x = add([x, res]) | |
return x | |
# A resnet block consisting of N number of resnet modules, first layer has is pooled. | |
def resnet_block(x, filters, num_layers, pool_first_layer=True): | |
for i in range(num_layers): | |
pool = False | |
if i == 0 and pool_first_layer: pool = True | |
x = resnet_identity_module(x, filters=filters, pool=pool) | |
return x | |
# The Resnet model consisting of Conv - block1 - block2 - block3 - block 4 - FC with Softmax | |
def ResnetV2(input_shape, num_layers=50, num_classes=10): | |
if num_layers not in [50, 101, 152]: | |
raise ValueError("Num Layers must be either 50, 101 or 152") | |
block_layers = {50: [3, 4, 6, 3], | |
101: [3, 4, 23, 3], | |
152: [3, 8, 36, 3] | |
} | |
block_filters = {50: [256, 512, 1024, 2048], | |
101: [256, 512, 1024, 2048], | |
152: [256, 512, 1024, 2048] | |
} | |
layers = block_layers[num_layers] | |
filters = block_filters[num_layers] | |
input = Input(input_shape) | |
#Since the first layers in the modules are bn and relu, we do not include bn and relu after the first conv | |
x = Conv2D(64, kernel_size=7, strides=2, padding="same")(input) | |
x = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(x) | |
x = resnet_first_identity_module(x, filters[0]) | |
for i in range(4): | |
num_filters = filters[i] | |
num_layers = layers[i] | |
pool_first = True | |
if i == 0: | |
pool_first = False | |
num_layers = num_layers - 1 | |
x = resnet_block(x, filters=num_filters, num_layers=num_layers, pool_first_layer=pool_first) | |
#Since the output of the residual unit is addition of convs, we need to appy bn and relu before global average pooling | |
x = BatchNormalization()(x) | |
x = Activation("relu")(x) | |
x = GlobalAveragePooling2D()(x) | |
x = Dense(num_classes)(x) | |
x = Activation("softmax")(x) | |
model = Model(inputs=input, outputs=x, name="Resnetv2{}".format(num_layers)) | |
return model |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment