Skip to content

Instantly share code, notes, and snippets.

@prateekjoshi565
Created July 17, 2020 17:25
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save prateekjoshi565/9f98897cc5ad1574b729c8cfcdfd2e53 to your computer and use it in GitHub Desktop.
Save prateekjoshi565/9f98897cc5ad1574b729c8cfcdfd2e53 to your computer and use it in GitHub Desktop.
class BERT_Arch(nn.Module):
def __init__(self, bert):
super(BERT_Arch, self).__init__()
self.bert = bert
# dropout layer
self.dropout = nn.Dropout(0.1)
# relu activation function
self.relu = nn.ReLU()
# dense layer 1
self.fc1 = nn.Linear(768,512)
# dense layer 2 (Output layer)
self.fc2 = nn.Linear(512,2)
#softmax activation function
self.softmax = nn.LogSoftmax(dim=1)
#define the forward pass
def forward(self, sent_id, mask):
#pass the inputs to the model
_, cls_hs = self.bert(sent_id, attention_mask=mask)
x = self.fc1(cls_hs)
x = self.relu(x)
x = self.dropout(x)
# output layer
x = self.fc2(x)
# apply softmax activation
x = self.softmax(x)
return x
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment