Skip to content

Instantly share code, notes, and snippets.

@SpellOnYou
Last active December 7, 2021 16:21
cs224n-2019w-a3
# Assignment3, 2-(e)
## ParserModel.__init__
self.embed_to_hidden = nn.Linear(self.n_features*self.embed_size, self.hidden_size, bias=True)
nn.init.xavier_uniform_(self.embed_to_hidden.weight) #in-place function
self.hidden_to_logits = nn.Linear(self.hidden_size, self.n_classes, bias=True)
nn.init.xavier_uniform_(self.hidden_to_logits.weight)
self.dropout = nn.Dropout(p=dropout_prob)
## ParserModel.embedding_lookup
x = self.pretrained_embeddings(t).view(t.shape[0], -1)
## ParserModel.forward
embeddings = self.embedding_lookup(t)
h = F.relu(self.embed_to_hidden(embeddings))
logits = self.hidden_to_logits(self.dropout(h))
# Assignment3, 2-(c)
## PartialParse.__init__
self.stack = ['ROOT']
self.buffer = self.sentence.copy()
self.dependencies = list()
## PartialParse.parse_step
if transition is 'S': self.stack.append(self.buffer.pop(0))
elif transition is 'LA': self.dependencies.append((self.stack[-1], self.stack.pop(-2)))
else: self.dependencies.append((self.stack[-2], self.stack.pop(-1)))
# Assignment3, 2-(d)
## minibatch_parse
partial_parses = [PartialParse(s) for s in sentences]
unfinished_parses = partial_parses[:] #shallow copy
while unfinished_parses:
mini_batch = unfinished_parses[:batch_size]
next_trans = model.predict(mini_batch)
for pp, t in zip(mini_batch, next_trans): #pp: each partial parse, t: next step(transition) from model on pp
pp.parse_step(t)
if len(pp.stack)==1 and len(pp.buffer)==0:
unfinished_parses.remove(pp)
dependencies = [pp.dependencies for pp in partial_parses]
# Assignment3, 2-(e)
## train
optimizer = optim.Adam(parser.model.parameters(), lr = lr)
loss_func = nn.CrossEntropyLoss()
## train_for_epoch
out = parser.model(train_x)
loss = loss_func(out, train_y)
loss.backward()
optimizer.step()
@SpellOnYou
Copy link
Author

indentation is ignored

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment