Skip to content

Instantly share code, notes, and snippets.

@wolfecameron
Created January 4, 2023 13:55
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save wolfecameron/67e6637d076a22ea5ecb5098fea80aa0 to your computer and use it in GitHub Desktop.
Save wolfecameron/67e6637d076a22ea5ecb5098fea80aa0 to your computer and use it in GitHub Desktop.
import torch
class FFNN(torch.nn.Module):
def __init__(self, input_size, hidden_size, output_size, num_layers):
super().__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.num_layers = num_layers
self.layers = [
torch.nn.Linear(self.input_size, self.hidden_size),
torch.nn.ReLU(),
]
for i in range(self.num_layers - 1):
self.layers.append(torch.nn.Linear(self.hidden_size, self.hidden_size))
self.layers.append(torch.nn.ReLU())
self.layers.append(torch.nn.Linear(self.hidden_size, self.output_size))
self.layers = torch.nn.Sequential(*self.layers)
def forward(self, x):
return self.layers(x)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment