Created
June 6, 2017 20:26
-
-
Save taineleau-zz/0410ea5f5a2185f9c39f1b72248dff17 to your computer and use it in GitHub Desktop.
copy pasting code from: https://github.com/andreasveit/densenet-pytorch to fit in: https://github.com/felixgwu/img_classification_pk_pytorch
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import math | |
import torch | |
import torch.nn as nn | |
import torch.nn.functional as F | |
class BasicBlock(nn.Module): | |
def __init__(self, in_planes, out_planes, dropRate=0.0): | |
super(BasicBlock, self).__init__() | |
self.bn1 = nn.BatchNorm2d(in_planes) | |
self.relu = nn.ReLU(inplace=True) | |
self.conv1 = nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=1, | |
padding=1, bias=False) | |
self.droprate = dropRate | |
def forward(self, x): | |
out = self.conv1(self.relu(self.bn1(x))) | |
if self.droprate > 0: | |
out = F.dropout(out, p=self.droprate, training=self.training) | |
return torch.cat([x, out], 1) | |
class BottleneckBlock(nn.Module): | |
def __init__(self, in_planes, out_planes, dropRate=0.0): | |
super(BottleneckBlock, self).__init__() | |
inter_planes = out_planes * 4 | |
self.bn1 = nn.BatchNorm2d(in_planes) | |
self.relu = nn.ReLU(inplace=True) | |
self.conv1 = nn.Conv2d(in_planes, inter_planes, kernel_size=1, stride=1, | |
padding=0, bias=False) | |
self.bn2 = nn.BatchNorm2d(inter_planes) | |
self.conv2 = nn.Conv2d(inter_planes, out_planes, kernel_size=3, stride=1, | |
padding=1, bias=False) | |
self.droprate = dropRate | |
def forward(self, x): | |
out = self.conv1(self.relu(self.bn1(x))) | |
if self.droprate > 0: | |
out = F.dropout(out, p=self.droprate, inplace=False, training=self.training) | |
out = self.conv2(self.relu(self.bn2(out))) | |
if self.droprate > 0: | |
out = F.dropout(out, p=self.droprate, inplace=False, training=self.training) | |
return torch.cat([x, out], 1) | |
class TransitionBlock(nn.Module): | |
def __init__(self, in_planes, out_planes, dropRate=0.0): | |
super(TransitionBlock, self).__init__() | |
self.bn1 = nn.BatchNorm2d(in_planes) | |
self.relu = nn.ReLU(inplace=True) | |
self.conv1 = nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=1, | |
padding=0, bias=False) | |
self.droprate = dropRate | |
def forward(self, x): | |
out = self.conv1(self.relu(self.bn1(x))) | |
if self.droprate > 0: | |
out = F.dropout(out, p=self.droprate, inplace=False, training=self.training) | |
return F.avg_pool2d(out, 2) | |
class DenseBlock(nn.Module): | |
def __init__(self, nb_layers, in_planes, growth_rate, block, dropRate=0.0): | |
super(DenseBlock, self).__init__() | |
self.layer = self._make_layer(block, in_planes, growth_rate, nb_layers, dropRate) | |
def _make_layer(self, block, in_planes, growth_rate, nb_layers, dropRate): | |
layers = [] | |
for i in range(nb_layers): | |
layers.append(block(in_planes+i*growth_rate, growth_rate, dropRate)) | |
return nn.Sequential(*layers) | |
def forward(self, x): | |
return self.layer(x) | |
class DenseNet3(nn.Module): | |
def __init__(self, depth, num_classes, growth_rate=12, | |
reduction=0.5, bottleneck=True, dropRate=0.0): | |
super(DenseNet3, self).__init__() | |
in_planes = 2 * growth_rate | |
n = (depth - 4) // 3 | |
if bottleneck == True: | |
n = n // 2 | |
block = BottleneckBlock | |
else: | |
block = BasicBlock | |
# 1st conv before any dense block | |
self.conv1 = nn.Conv2d(3, in_planes, kernel_size=3, stride=1, | |
padding=1, bias=False) | |
# 1st block | |
self.block1 = DenseBlock(n, in_planes, growth_rate, block, dropRate) | |
in_planes = int(in_planes+n*growth_rate) | |
self.trans1 = TransitionBlock(in_planes, int(math.floor(in_planes*reduction)), dropRate=dropRate) | |
in_planes = int(math.floor(in_planes*reduction)) | |
# 2nd block | |
self.block2 = DenseBlock(n, in_planes, growth_rate, block, dropRate) | |
in_planes = int(in_planes+n*growth_rate) | |
self.trans2 = TransitionBlock(in_planes, int(math.floor(in_planes*reduction)), dropRate=dropRate) | |
in_planes = int(math.floor(in_planes*reduction)) | |
# 3rd block | |
self.block3 = DenseBlock(n, in_planes, growth_rate, block, dropRate) | |
in_planes = int(in_planes+n*growth_rate) | |
# global average pooling and classifier | |
self.bn1 = nn.BatchNorm2d(in_planes) | |
self.relu = nn.ReLU(inplace=True) | |
self.fc = nn.Linear(in_planes, num_classes) | |
self.in_planes = in_planes | |
for m in self.modules(): | |
if isinstance(m, nn.Conv2d): | |
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels | |
m.weight.data.normal_(0, math.sqrt(2. / n)) | |
elif isinstance(m, nn.BatchNorm2d): | |
m.weight.data.fill_(1) | |
m.bias.data.zero_() | |
elif isinstance(m, nn.Linear): | |
m.bias.data.zero_() | |
def forward(self, x): | |
out = self.conv1(x) | |
out = self.trans1(self.block1(out)) | |
out = self.trans2(self.block2(out)) | |
out = self.block3(out) | |
out = self.relu(self.bn1(out)) | |
out = F.avg_pool2d(out, 8) | |
out = out.view(-1, self.in_planes) | |
return self.fc(out) | |
def createModel(data, depth=100, growth_rate=12, num_classes=10, drop_rate=0, | |
num_init_features=24, compression=0.5, bn_size=4, **kwargs): | |
assert (depth - 4) % 3 == 0, 'depth should be one of 3N+4' | |
avgpool_size = 7 if data == 'imagenet' else 8 | |
N = (depth - 4) // 3 | |
suffix = '-' | |
use_bn = None | |
if bn_size > 0: | |
N //= 2 | |
suffix += 'B' | |
use_bn = True | |
else: | |
use_bn = False | |
if compression < 1.: | |
suffix += 'C' | |
if suffix == '-': | |
suffix = '' | |
print('Create DenseNet{}-{:d} for {}'.format(suffix, depth, data)) | |
return DenseNet3(depth, num_classes, growth_rate, | |
compression, use_bn, drop_rate) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment