Created
October 27, 2023 00:04
-
-
Save YuehChuan/1daab7c49828d925294e4dd9eb813e42 to your computer and use it in GitHub Desktop.
nn.embedding backward grad
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.nn as nn | |
# an Embedding module containing 10 tensors of size 3 | |
embedding = nn.Embedding(6, 3) | |
# a batch of 2 samples of 3 elements each | |
input = torch.LongTensor([[1, 2, 3],[0,2,3]]) | |
loss=torch.sum(embedding(input)) | |
print("embedding" ) | |
print(embedding.weight ) | |
print("input" ) | |
print(input) | |
print("forward pass" ) | |
print(embedding(input)) | |
print("loss") | |
print(loss) | |
loss.backward() | |
print("embedding backward grad ") | |
print(embedding.weight.grad ) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment