Skip to content

Instantly share code, notes, and snippets.

@chmodsss
Created December 30, 2019 19:16
Show Gist options
  • Save chmodsss/644396713b51cab066a112f4bae41647 to your computer and use it in GitHub Desktop.
Save chmodsss/644396713b51cab066a112f4bae41647 to your computer and use it in GitHub Desktop.
Discriminator Generator in GAN
class Discriminator(nn.Module):
def __init__(self):
super().__init__()
ip_emb = 784
emb1 = 256
emb2 = 128
out_emb = 1
self.layer1 = nn.Sequential(
nn.Linear(ip_emb, emb1),
nn.LeakyReLU(0.2),
nn.Dropout(0.3))
self.layer2 = nn.Sequential(
nn.Linear(emb1, emb2),
nn.LeakyReLU(0.2),
nn.Dropout(0.3))
self.layer_out = nn.Sequential(
nn.Linear(emb2, out_emb),
nn.Sigmoid())
def forward(self, x):
x = self.layer1(x)
x = self.layer2(x)
x = self.layer_out(x)
return x
class Generator(nn.Module):
def __init__(self):
super().__init__()
ip_emb = 128
emb1 = 256
emb2 = 512
emb3 = 1024
out_emb = 784
self.layer1 = nn.Sequential(
nn.Linear(ip_emb, emb1),
nn.LeakyReLU(0.2))
self.layer2 = nn.Sequential(
nn.Linear(emb1, emb2),
nn.LeakyReLU(0.2))
self.layer3 = nn.Sequential(
nn.Linear(emb2, emb3),
nn.LeakyReLU(0.2))
self.layer_out = nn.Sequential(
nn.Linear(emb3, out_emb),
nn.Tanh())
def forward(self, x):
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer_out(x)
return x
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment