Skip to content

Instantly share code, notes, and snippets.

@knsong
Created January 16, 2020 08:16
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save knsong/daea712fa06043ccb501ad1c92143bfa to your computer and use it in GitHub Desktop.
Save knsong/daea712fa06043ccb501ad1c92143bfa to your computer and use it in GitHub Desktop.
resnet18.py
'''ResNet in PyTorch.
For Pre-activation ResNet, see 'preact_resnet.py'.
Reference:
[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
Deep Residual Learning for Image Recognition. arXiv:1512.03385
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from inplace_abn import ABN, InPlaceABN
Norm = InPlaceABN
act = 'leaky_relu'
act_param = 0.01
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
#self.bn1 = nn.BatchNorm2d(planes)
self.bn1 = Norm(planes, activation=act, activation_param=act_param)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
#self.bn2 = nn.BatchNorm2d(planes)
self.bn2 = Norm(planes, activation=act, activation_param=act_param)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
#nn.BatchNorm2d(self.expansion*planes)
Norm(self.expansion*planes, activation=act, activation_param=act_param)
)
def forward(self, x):
out = self.bn1(self.conv1(x))
out = self.bn2(self.conv2(out))
out = out + self.shortcut(x)
out = F.relu(out, inplace=True)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
#self.bn1 = nn.BatchNorm2d(planes)
self.bn1 = Norm(planes, activation=act, activation_param=act_param)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
#self.bn2 = nn.BatchNorm2d(planes)
self.bn2 = Norm(planes, activation=act, activation_param=act_param)
self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)
#self.bn3 = nn.BatchNorm2d(self.expansion*planes)
self.bn3 = Norm(self.expansion*planes, activation=act, activation_param=act_param)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
#nn.BatchNorm2d(self.expansion*planes)
Norm(self.expansion*planes, activation=act, activation_param=act_param)
)
def forward(self, x):
out = self.bn1(self.conv1(x))
out = self.bn2(self.conv2(out))
out = self.bn3(self.conv3(out))
out = out + self.shortcut(x)
out = F.relu(out, inplace=True)
return out
class ResNet(nn.Module):
def __init__(self, block, num_blocks, num_classes=10):
super(ResNet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
#self.bn1 = nn.BatchNorm2d(64)
self.bn1 = Norm(64, activation=act, activation_param=act_param)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
self.linear = nn.Linear(512*block.expansion, num_classes)
'''
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm, InPlaceABN)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
'''
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
out = self.bn1(self.conv1(x))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
def ResNet18():
return ResNet(BasicBlock, [2,2,2,2])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment