Skip to content

Instantly share code, notes, and snippets.

@aRe00t
Created November 3, 2017 06:10
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save aRe00t/c1fb7b863c236c267821d966affbadcb to your computer and use it in GitHub Desktop.
Save aRe00t/c1fb7b863c236c267821d966affbadcb to your computer and use it in GitHub Desktop.
DenseNet with BC
import math
from mxnet import init
from mxnet.gluon import nn
from mxnet.gluon.model_zoo.custom_layers import HybridConcurrent, Identity
def sigma(kernel_size, channels):
return math.sqrt(2. / (kernel_size * kernel_size * channels))
class DenseNet(nn.HybridSequential):
""" https://github.com/liuzhuang13/DenseNet
"""
def __init__(self, depth, growth, dropout=0.2, reduction=0.5, bottleneck=True, bn_factor=4):
super(DenseNet, self).__init__(prefix='')
self.depth = depth
self.growth = growth
self.dropout = dropout
self.reduction = reduction
self.bottleneck = bottleneck
self.bn_factor = bn_factor
self.build_cifar10()
def build_cifar10(self):
layers = (self.depth - 4) // 3
if self.bottleneck:
layers //= 2
features = init_channels = 2 * self.growth
with self.name_scope():
self.add(nn.Conv2D(init_channels, kernel_size=3, padding=1, use_bias=False,
weight_initializer=init.Normal(sigma(3, init_channels))))
for i in range(3):
self.add(self.make_dense_block(i, layers))
features = int((features + layers * self.growth) * self.reduction)
self.add(self.make_transition(features, 8 if i == 2 else 0))
self.add(nn.Dense(10))
def make_dense_block(self, stage, layers):
out = nn.HybridSequential(prefix='stage%d_' % stage)
with out.name_scope():
for _ in range(layers):
out.add(self.make_dense_layer())
return out
def make_dense_layer(self):
net = nn.HybridSequential(prefix='')
net.add(nn.BatchNorm())
net.add(nn.Activation('relu'))
if self.bottleneck:
net.add(nn.Conv2D(self.bn_factor * self.growth, kernel_size=1, use_bias=False,
weight_initializer=init.Normal(sigma(1, self.bn_factor * self.growth))))
if self.dropout > 0:
net.add(nn.Dropout(self.dropout))
net.add(nn.BatchNorm())
net.add(nn.Activation('relu'))
net.add(nn.Conv2D(self.growth, kernel_size=3, padding=1, use_bias=False,
weight_initializer=init.Normal(sigma(3, self.growth))))
if self.dropout > 0:
net.add(nn.Dropout(self.dropout))
out = HybridConcurrent(concat_dim=1, prefix='')
out.add(Identity())
out.add(net)
return out
def make_transition(self, channels, last_pool_size=0):
out = nn.HybridSequential(prefix='')
out.add(nn.BatchNorm())
out.add(nn.Activation('relu'))
if last_pool_size:
out.add(nn.AvgPool2D(pool_size=last_pool_size))
else:
out.add(nn.Conv2D(channels, kernel_size=1, use_bias=False,
weight_initializer=init.Normal(sigma(1, channels))))
if self.dropout > 0:
out.add(nn.Dropout(self.dropout))
out.add(nn.AvgPool2D(pool_size=2, strides=2))
return out
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment