-
-
Save chrishkchris/1bce55260b5e771ce974940a855292e2 to your computer and use it in GitHub Desktop.
2D input comparsion of autograd.softmax
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
(pytorch_p36) ubuntu@ip-172-31-23-185:~$ python softmaxtest_pytorch.py | |
x = [[ 1. 2. 3. 4.] | |
[ 5. 6. 7. 8.] | |
[ 9. 10. 11. 12.]] | |
dy = [[ 1. 2. 3. 4.] | |
[ 5. 6. 7. 8.] | |
[ 9. 10. 11. 12.]] | |
dim = 0 | |
y = [[3.2932044e-04 3.2932044e-04 3.2932044e-04 3.2932044e-04] | |
[1.7980287e-02 1.7980287e-02 1.7980287e-02 1.7980287e-02] | |
[9.8169041e-01 9.8169041e-01 9.8169041e-01 9.8169041e-01]] | |
dx = [[-0.00261001 -0.00261001 -0.00261001 -0.00261001] | |
[-0.07058062 -0.07058062 -0.07058062 -0.07058062] | |
[ 0.0731905 0.07319049 0.07319048 0.07319046]] | |
dim = 1 | |
y = [[0.0320586 0.08714432 0.23688284 0.6439143 ] | |
[0.0320586 0.08714432 0.23688284 0.6439143 ] | |
[0.0320586 0.08714432 0.23688284 0.6439143 ]] | |
dx = [[-0.08252098 -0.13268624 -0.11931103 0.32407805] | |
[-0.1504916 -0.20065685 -0.18728164 0.25610745] | |
[-0.00672047 -0.05688574 -0.04351053 0.3998785 ]] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
ubuntu@ip-172-31-24-48:~$ python3 softmaxtest_singa.py | |
x = [[ 1. 2. 3. 4.] | |
[ 5. 6. 7. 8.] | |
[ 9. 10. 11. 12.]] | |
dy = [[ 1. 2. 3. 4.] | |
[ 5. 6. 7. 8.] | |
[ 9. 10. 11. 12.]] | |
dim = 0 | |
y = [[1.0557552e-05 2.8698403e-05 7.8010344e-05 2.1205410e-04] | |
[5.7642284e-04 1.5668797e-03 4.2592203e-03 1.1577762e-02] | |
[3.1471618e-02 8.5548729e-02 2.3254557e-01 6.3212436e-01]] | |
dx = [[ 1.0545408e-05 5.7363799e-05 2.3394130e-04 8.4797252e-04] | |
[ 2.8044584e-03 9.1901887e-03 2.9240739e-02 9.1062337e-02] | |
[-7.1825355e-02 -1.0969275e-01 -6.5630198e-02 4.5372248e-01]] | |
dim = 1 | |
Traceback (most recent call last): | |
File "softmaxtest_singa.py", line 31, in <module> | |
dx = y.creator.backward(dy.data) # CTensor | |
File "/home/ubuntu/singa/build/python/singa/autograd.py", line 867, in backward | |
out_1 = np.einsum("ki,ki->ki", grad, output) | |
File "/usr/local/lib/python3.5/dist-packages/numpy/core/einsumfunc.py", line 1346, in einsum | |
return c_einsum(*operands, **kwargs) | |
ValueError: operands could not be broadcast together with remapped shapes [original->remapped]: (4,3)->(4,3) (3,4)->(3,4) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.nn as nn | |
import numpy as np | |
x = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]).astype(np.float32).reshape((3,4)) | |
dynumpy = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]).astype(np.float32).reshape((3,4)) | |
print("x =", x) | |
print("dy =", dynumpy) | |
input_tensor=torch.tensor(x,dtype=torch.float32) | |
dy = torch.tensor(dynumpy,dtype=torch.float32) | |
input_tensor.requires_grad_(True) | |
dim=0 | |
print("dim =", dim) | |
loss_func = nn.Softmax(dim=dim) | |
y = loss_func(input_tensor) | |
y.backward(dy) | |
print("y =", y.data.numpy()[0]) | |
print("dx =", input_tensor.grad.numpy()[0]) | |
dim=1 | |
print("dim =", dim) | |
loss_func = nn.Softmax(dim=dim) | |
y = loss_func(input_tensor) | |
y.backward(dy) | |
print("y =", y.data.numpy()[0]) | |
print("dx =", input_tensor.grad.numpy()[0]) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from singa import tensor | |
from singa import autograd | |
from singa import device | |
import numpy as np | |
autograd.training=True | |
dev = device.get_default_device() | |
x = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]).astype(np.float32).reshape((3,4)) | |
dynumpy = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]).astype(np.float32).reshape((3,4)) | |
print("x =", x) | |
print("dy =", dynumpy) | |
input_tensor = tensor.Tensor(data=x, device=dev) | |
dy = tensor.Tensor(data=dynumpy, device=dev) | |
dim=0 | |
print("dim =", dim) | |
y = autograd.softmax(input_tensor,axis=dim) # PyTensor | |
dx = y.creator.backward(dy.data) # CTensor | |
print("y =", tensor.to_numpy(tensor.from_raw_tensor(y.data))) | |
print("dx =", tensor.to_numpy(tensor.from_raw_tensor(dx))) | |
dim=1 | |
print("dim =", dim) | |
y = autograd.softmax(input_tensor,axis=dim) # PyTensor | |
dx = y.creator.backward(dy.data) # CTensor | |
print("y =", tensor.to_numpy(tensor.from_raw_tensor(y.data))) | |
print("dx =", tensor.to_numpy(tensor.from_raw_tensor(dx))) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment