Skip to content

Instantly share code, notes, and snippets.

@quanhua92
Created June 18, 2019 10:23
Show Gist options
  • Save quanhua92/98959ac7e407b2c3e4ef7fa192ac6954 to your computer and use it in GitHub Desktop.
Save quanhua92/98959ac7e407b2c3e4ef7fa192ac6954 to your computer and use it in GitHub Desktop.
"""
This is the implementation of AlexNet which is modified from [Jeicaoyu's AlexNet].
Note:
- The number of Conv2d filters now matches with the original paper.
- Use PyTorch's Local Response Normalization layer which is implemented in Jan 2018. [PR #4667]
- This is for educational purpose only. We don't have pretrained weights for this model.
References:
- Jeicaoyu's AlexNet Model: [jiecaoyu](https://github.com/jiecaoyu/pytorch_imagenet/blob/984a2a988ba17b37e1173dd2518fa0f4dc4a1879/networks/model_list/alexnet.py)
- PR #4667: https://github.com/pytorch/pytorch/pull/4667
"""
import torch.nn as nn
class AlexNet(nn.Module):
def __init__(self, num_classes=1000):
super(AlexNet, self).__init__()
self.features = nn.Sequential(
nn.Conv2d(3, 96, kernel_size=11, stride=4, padding=0),
nn.ReLU(inplace=True),
nn.LocalResponseNorm(size=5, alpha=0.0001, beta=0.75),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(96, 256, kernel_size=5, padding=2, groups=2),
nn.ReLU(inplace=True),
nn.LocalResponseNorm(size=5, alpha=0.0001, beta=0.75),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(256, 384, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(384, 384, kernel_size=3, padding=1, groups=2),
nn.ReLU(inplace=True),
nn.Conv2d(384, 256, kernel_size=3, padding=1, groups=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
)
self.classifier = nn.Sequential(
nn.Linear(256 * 6 * 6, 4096),
nn.ReLU(inplace=True),
nn.Dropout(),
nn.Linear(4096, 4096),
nn.ReLU(inplace=True),
nn.Dropout(),
nn.Linear(4096, num_classes),
)
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), 256 * 6 * 6)
x = self.classifier(x)
return x
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment