Skip to content

Instantly share code, notes, and snippets.

View changukshin's full-sized avatar

Chang-Uk Shin changukshin

View GitHub Profile
@jihunchoi
jihunchoi / masked_cross_entropy.py
Last active January 22, 2024 19:20
PyTorch workaround for masking cross entropy loss
def _sequence_mask(sequence_length, max_len=None):
if max_len is None:
max_len = sequence_length.data.max()
batch_size = sequence_length.size(0)
seq_range = torch.range(0, max_len - 1).long()
seq_range_expand = seq_range.unsqueeze(0).expand(batch_size, max_len)
seq_range_expand = Variable(seq_range_expand)
if sequence_length.is_cuda:
seq_range_expand = seq_range_expand.cuda()
seq_length_expand = (sequence_length.unsqueeze(1)