Created
January 30, 2019 11:38
-
-
Save piyush2896/3c8f673412fd7724ad72e1e551c1f6ad to your computer and use it in GitHub Desktop.
Gist for densenet to use in https://predictiveprogrammer.com/famous-convolutional-neural-network-architectures-2/
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from keras import layers as ls | |
from keras import models | |
from keras.regularizers import l2 | |
def bn_relu_conv(in_tensor, | |
filters, | |
kernel_size, | |
strides=1, | |
padding='same', | |
weight_decay=1e-4): | |
bn = ls.BatchNormalization()(in_tensor) | |
relu = ls.Activation('relu')(bn) | |
conv = ls.Conv2D(filters, kernel_size, | |
strides=strides, padding=padding, | |
kernel_initializer='he_normal', | |
kernel_regularizer=l2(weight_decay), | |
use_bias=False)(relu) | |
return conv | |
def _pre_densenet(in_tensor, k): | |
conv = bn_relu_conv(in_tensor, 2*k, 7, 2) | |
pool = ls.MaxPool2D(3, strides=2, padding='same')(conv) | |
return pool | |
def _post_densenet(in_tensor, n_classes): | |
# End operations, applied after all the resnext blocks! | |
pool = ls.GlobalAvgPool2D()(in_tensor) | |
preds = ls.Dense(n_classes, activation='softmax')(pool) | |
return preds | |
def _conv_block(in_tensor, filters, bottleneck=False, dropout_rate=None): | |
if bottleneck: | |
in_tensor = bn_relu_conv(in_tensor, 4 * filters, 1) | |
act = bn_relu_conv(in_tensor, filters, 3) | |
if dropout_rate: | |
act = ls.Dropout(dropout_rate)(act) | |
return act | |
def dense_block(in_tensor, n_layers, k, bottleneck=False): | |
x = in_tensor | |
for _ in range(n_layers): | |
conv_block = _conv_block(x, k, bottleneck) | |
x = ls.Concatenate()([x, conv_block]) | |
return x | |
def transition_block(in_tensor, reduction): | |
reduced_conv = bn_relu_conv(in_tensor, | |
int(in_tensor.get_shape().as_list()[-1] * reduction), 1) | |
pool = ls.AvgPool2D(2, strides=2)(reduced_conv) | |
return pool | |
def _create_densenet(in_shape, | |
n_classes, | |
blocks, | |
k=32, | |
bottleneck=False, | |
dropout_rate=None, | |
reduction=0.5, | |
include_top=True): | |
in_ = ls.Input(in_shape) | |
downsampled_in = _pre_densenet(in_, k) | |
dense_block1 = dense_block(downsampled_in, blocks[0], k, bottleneck) | |
transition1 = transition_block(dense_block1, reduction) | |
dense_block2 = dense_block(transition1, blocks[1], k, bottleneck) | |
transition2 = transition_block(dense_block2, reduction) | |
dense_block3 = dense_block(transition2, blocks[2], k, bottleneck) | |
transition3 = transition_block(dense_block3, reduction) | |
dense_block4 = dense_block(transition3, blocks[3], k, bottleneck) | |
bn = ls.BatchNormalization()(dense_block4) | |
act = ls.Activation('relu')(bn) | |
if include_top: | |
preds = _post_densenet(act, n_classes) | |
else: | |
preds = act | |
model = models.Model(in_, preds) | |
return model | |
def densenet121(in_shape, | |
n_classes=1000, | |
k=32, | |
bottleneck=True, | |
dropout_rate=None, | |
reduction=0.5, | |
include_top=True): | |
return _create_densenet(in_shape, n_classes, | |
[6, 12, 24, 16], k, | |
bottleneck, dropout_rate, | |
reduction, include_top) | |
def densenet169(in_shape, | |
n_classes=1000, | |
k=32, | |
bottleneck=True, | |
dropout_rate=None, | |
reduction=0.5, | |
include_top=True): | |
return _create_densenet(in_shape, n_classes, | |
[6, 12, 32, 32], k, | |
bottleneck, dropout_rate, | |
reduction, include_top) | |
def densenet201(in_shape, | |
n_classes=1000, | |
k=32, | |
bottleneck=True, | |
dropout_rate=None, | |
reduction=0.5, | |
include_top=True): | |
return _create_densenet(in_shape, n_classes, | |
[6, 12, 48, 32], k, | |
bottleneck, dropout_rate, | |
reduction, include_top) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment