Skip to content

Instantly share code, notes, and snippets.

@collinarnett
Created May 13, 2020 04:28
Show Gist options
  • Save collinarnett/72b5026b614d50c30d7798bef8ea5c14 to your computer and use it in GitHub Desktop.
Save collinarnett/72b5026b614d50c30d7798bef8ea5c14 to your computer and use it in GitHub Desktop.
64x64 Architecture
# 64x64
class Generator64(nn.Module):
def __init__(self, ngpu):
super(Generator64, self).__init__()
self.ngpu = ngpu
self.main = nn.Sequential(
nn.ConvTranspose2d(100, 512, kernel_size=4, stride=1, padding=0),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(512, 256, kernel_size=4, stride=2, padding=1),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(256, 128, kernel_size=4, stride=2, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2),
nn.ConvTranspose2d(128, 64, kernel_size=4, stride=2, padding=1),
nn.BatchNorm2d(64),
nn.ConvTranspose2d(64, 1, kernel_size=4, stride=2, padding=1),
)
def forward(self, input):
return self.main(input)
class Discriminator64(nn.Module):
def __init__(self, ngpu):
super(Discriminator64, self).__init__()
self.ngpu = ngpu
self.main = nn.Sequential(
nn.Conv2d(1, 64, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(0.2),
nn.Dropout(0.1),
nn.Conv2d(64, 128, kernel_size=4, stride=2, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2),
nn.Dropout(0.1),
nn.Conv2d(128, 256, kernel_size=4, stride=2, padding=1),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.2),
nn.Dropout(0.1),
nn.Conv2d(256, 512, kernel_size=4, stride=2, padding=1),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.2),
nn.Dropout(0.1),
nn.Conv2d(512, 1, kernel_size=4, stride=1, padding=0),
nn.Sigmoid()
)
def forward(self, input):
return self.main(input)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment