Created
December 2, 2021 19:04
-
-
Save wonjoolee95/65bab9af0f3b90aeb34fbecb7de7d992 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
====================================================================== | |
ERROR: test_rnn_retain_variables_xla_float64 (__main__.TestNNDeviceTypeXLA) | |
---------------------------------------------------------------------- | |
Traceback (most recent call last): | |
File "/usr/local/google/home/wonjoo/anaconda3/lib/python3.8/site-packages/torch/testing/_comparison.py", line 1062, in assert_equal | |
pair.compare() | |
File "/usr/local/google/home/wonjoo/anaconda3/lib/python3.8/site-packages/torch/testing/_comparison.py", line 605, in compare | |
self._compare_values(actual, expected) | |
File "/usr/local/google/home/wonjoo/anaconda3/lib/python3.8/site-packages/torch/testing/_comparison.py", line 699, in _compare_values | |
compare_fn(actual, expected, rtol=self.rtol, atol=self.atol, equal_nan=self.equal_nan) | |
File "/usr/local/google/home/wonjoo/anaconda3/lib/python3.8/site-packages/torch/testing/_comparison.py", line 817, in _compare_regular_values | |
mismatches = ~comparison_fn(actual, expected, rtol=rtol, atol=atol, equal_nan=equal_nan) | |
File "/usr/local/google/home/wonjoo/anaconda3/lib/python3.8/site-packages/torch/testing/_comparison.py", line 854, in _isclose_xla | |
mask = a.to(torch.bool) & b.to(torch.bool) | |
RuntimeError: /usr/local/google/home/wonjoo/pytorch/xla/torch_xla/csrc/tensor_methods.cpp:304 : Check failed: xla::ShapeUtil::ElementIsIntegral(shape) || shape.element_type() == xla::PrimitiveType::PRED | |
*** Begin stack trace *** | |
tensorflow::CurrentStackTrace[abi:cxx11]() | |
torch_xla::XLATensor::bitwise_and(torch_xla::XLATensor const&, torch_xla::XLATensor const&) | |
torch_xla::XLANativeFunctions::bitwise_and(at::Tensor const&, at::Tensor const&) | |
at::_ops::bitwise_and_Tensor::call(at::Tensor const&, at::Tensor const&) | |
at::bitwise_and(at::Tensor const&, at::Tensor const&) | |
at::native::__and__(at::Tensor const&, at::Tensor const&) | |
at::_ops::__and___Tensor::call(at::Tensor const&, at::Tensor const&) | |
_PyObject_FastCall_Prepend | |
PyNumber_And | |
_PyEval_EvalFrameDefault | |
_PyEval_EvalCodeWithName | |
_PyEval_EvalCodeWithName | |
_PyFunction_Vectorcall | |
_PyFunction_Vectorcall | |
_PyEval_EvalCodeWithName | |
_PyFunction_Vectorcall | |
_PyEval_EvalCodeWithName | |
_PyFunction_Vectorcall | |
PyObject_Call | |
_PyEval_EvalFrameDefault | |
_PyEval_EvalCodeWithName | |
_PyFunction_Vectorcall | |
_PyFunction_Vectorcall | |
_PyEval_EvalCodeWithName | |
_PyFunction_Vectorcall | |
PyObject_Call | |
_PyEval_EvalFrameDefault | |
_PyEval_EvalCodeWithName | |
_PyFunction_Vectorcall | |
_PyEval_EvalCodeWithName | |
_PyEval_EvalCodeWithName | |
_PyEval_EvalCodeWithName | |
_PyEval_EvalCodeWithName | |
_PyFunction_Vectorcall | |
PyObject_Call | |
_PyEval_EvalFrameDefault | |
_PyEval_EvalCodeWithName | |
_PyObject_Call_Prepend | |
_PyObject_MakeTpCall | |
_PyEval_EvalFrameDefault | |
_PyEval_EvalCodeWithName | |
_PyFunction_Vectorcall | |
PyObject_Call | |
_PyEval_EvalFrameDefault | |
_PyEval_EvalCodeWithName | |
_PyObject_Call_Prepend | |
_PyObject_MakeTpCall | |
_PyEval_EvalFrameDefault | |
_PyEval_EvalCodeWithName | |
_PyFunction_Vectorcall | |
PyObject_Call | |
_PyEval_EvalFrameDefault | |
_PyEval_EvalCodeWithName | |
_PyObject_Call_Prepend | |
_PyObject_MakeTpCall | |
_PyEval_EvalFrameDefault | |
_PyFunction_Vectorcall | |
_PyFunction_Vectorcall | |
_PyEval_EvalCodeWithName | |
_PyFunction_Vectorcall | |
_PyObject_Call_Prepend | |
_PyObject_MakeTpCall | |
_PyEval_EvalFrameDefault | |
_PyEval_EvalCodeWithName | |
PyEval_EvalCode | |
PyRun_SimpleFileExFlags | |
Py_BytesMain | |
*** End stack trace *** | |
Operator __and__ is only supported for integer or boolean type tensors, got: f64[5,6,10]{2,1,0} | |
The above exception was the direct cause of the following exception: | |
Traceback (most recent call last): | |
File "/usr/local/google/home/wonjoo/anaconda3/lib/python3.8/site-packages/torch/testing/_internal/common_device_type.py", line 381, in instantiated_test | |
raise rte | |
File "/usr/local/google/home/wonjoo/anaconda3/lib/python3.8/site-packages/torch/testing/_internal/common_device_type.py", line 376, in instantiated_test | |
result = test(self, **param_kwargs) | |
File "/usr/local/google/home/wonjoo/pytorch/xla/test/../../test/test_nn.py", line 15971, in test_rnn_retain_variables | |
self._test_rnn_retain_variables(device, dtype) | |
File "/usr/local/google/home/wonjoo/pytorch/xla/test/../../test/test_nn.py", line 15966, in _test_rnn_retain_variables | |
self.assertEqual(grads, grads2) | |
File "/usr/local/google/home/wonjoo/pytorch/xla/test/pytorch_test_base.py", line 610, in assertEqual | |
return DeviceTypeTestBase.assertEqual(self, x, y, *args, **kwargs) | |
File "/usr/local/google/home/wonjoo/anaconda3/lib/python3.8/site-packages/torch/testing/_internal/common_utils.py", line 2004, in assertEqual | |
assert_equal( | |
File "/usr/local/google/home/wonjoo/anaconda3/lib/python3.8/site-packages/torch/testing/_comparison.py", line 1068, in assert_equal | |
raise RuntimeError( | |
RuntimeError: Comparing | |
TensorOrArrayPair( | |
id=(0,), | |
actual=tensor([[[ 2.1482e-02, 6.6671e-02, 1.5202e-02, 3.1239e-02, -1.2335e-01, | |
2.2080e-02, 4.2628e-02, 5.5614e-02, 1.2596e-01, -1.0200e-01], | |
[ 6.6665e-02, 9.3709e-02, -2.5195e-02, 7.6823e-02, -1.4017e-01, | |
3.7660e-02, -3.4559e-02, 6.2994e-02, 1.4740e-01, -1.0911e-01], | |
[ 1.2236e-02, 3.3914e-02, 2.0139e-02, 8.6304e-02, -1.5441e-01, | |
4.6683e-02, 2.1014e-03, 8.1676e-02, 1.3075e-01, -1.1148e-01], | |
[ 1.6670e-02, 4.8481e-03, 6.6260e-02, 4.7871e-02, -1.5319e-01, | |
5.4655e-02, 5.8243e-02, 5.2448e-02, 1.0466e-01, -1.8466e-01], | |
[ 4.0890e-02, 7.4031e-02, 3.4232e-02, 5.3748e-02, -1.0732e-01, | |
6.0856e-02, 5.2508e-02, 6.4979e-02, 1.0742e-01, -1.6613e-01], | |
[ 1.5815e-02, 6.3482e-02, 8.3798e-02, -2.6072e-02, -1.5854e-01, | |
5.0857e-02, 1.1184e-01, 3.1886e-03, 3.1781e-02, -1.7608e-01]], | |
[[-2.7875e-02, 4.0316e-02, 1.4813e-03, 4.7940e-02, -1.2574e-01, | |
-2.9045e-02, -6.3594e-03, 3.2919e-02, 8.6431e-02, -9.3862e-02], | |
[ 1.4751e-02, 1.0250e-02, 8.0218e-02, 3.6516e-02, -1.0510e-01, | |
-3.7467e-02, 2.1349e-02, 3.1667e-02, 1.1200e-01, -5.9675e-02], | |
[ 2.5078e-02, 4.5642e-02, 5.8048e-02, 3.8989e-02, -1.3810e-01, | |
9.6030e-03, 3.4022e-02, 6.3427e-02, 1.0015e-01, -8.8575e-02], | |
[ 1.5424e-02, -8.5428e-03, 9.6672e-02, 3.2726e-02, -9.6151e-02, | |
5.3122e-02, 5.7432e-02, 6.7217e-02, 8.5237e-02, -1.4943e-01], | |
[ 5.3847e-02, 5.6021e-02, 4.0440e-02, -1.2914e-02, -1.3117e-01, | |
-2.2860e-04, 4.8631e-02, 8.5770e-04, 2.9270e-02, -1.3900e-01], | |
[-8.5834e-03, 4.6160e-02, 5.9390e-02, 1.7667e-02, -1.0305e-01, | |
3.3183e-02, 7.7939e-02, 1.1797e-02, 7.2167e-02, -1.3576e-01]], | |
[[ 5.0445e-02, -3.2376e-02, 4.9327e-02, -1.2480e-02, -9.2677e-02, | |
1.5601e-02, 5.4991e-02, 3.7303e-02, 1.4710e-02, -1.5356e-01], | |
[ 1.1089e-02, 1.5667e-02, 3.6754e-02, 5.0605e-02, -6.3232e-02, | |
3.2226e-02, 3.0841e-02, 6.3293e-02, 9.5882e-02, -1.0360e-01], | |
[ 3.9332e-02, 2.7130e-02, 7.9292e-02, 2.7974e-02, -1.1478e-01, | |
3.4477e-02, 4.6639e-03, 4.5145e-02, 8.7582e-02, -7.8226e-02], | |
[-1.1988e-03, 6.1262e-02, 2.5192e-02, 4.6423e-02, -6.1332e-02, | |
-3.0328e-03, 5.5394e-03, 4.6386e-02, 9.6711e-02, -7.5974e-02], | |
[ 5.5044e-02, -1.7626e-03, 5.5595e-02, 3.3201e-02, -1.2260e-01, | |
-7.6357e-03, -4.3598e-03, 5.0671e-02, 9.2793e-02, -1.0036e-01], | |
[ 1.8151e-03, 5.1956e-02, 2.8330e-02, 1.6451e-02, -6.1553e-02, | |
2.0999e-02, 6.0088e-02, 2.0066e-02, 7.0601e-02, -1.3477e-01]], | |
[[ 3.5371e-02, 3.2387e-03, -1.9111e-03, 1.7190e-02, -9.4386e-02, | |
-2.7908e-02, -9.6138e-03, 3.2429e-02, 4.6134e-02, -6.6973e-02], | |
[-1.7130e-04, 2.1127e-03, 5.6856e-02, 2.6178e-02, -4.9824e-02, | |
-5.5776e-03, 1.2376e-02, 2.7418e-02, 7.9513e-02, -5.4756e-02], | |
[ 9.6062e-03, 1.2599e-02, 4.5086e-02, 1.7629e-02, -6.3150e-02, | |
-2.1658e-02, 1.4488e-02, 1.6142e-02, 5.7147e-02, -6.0021e-02], | |
[ 7.6090e-03, 1.9441e-02, 1.4322e-02, 2.6235e-02, -6.4189e-02, | |
-1.4463e-02, 1.2109e-02, 1.6807e-02, 6.8416e-02, -7.0903e-02], | |
[ 1.1019e-02, 3.1348e-02, 5.0524e-02, 2.1060e-02, -7.3195e-02, | |
-1.4854e-02, 1.3345e-02, 3.0575e-02, 4.5496e-02, -4.8461e-02], | |
[ 3.5963e-02, 9.0773e-03, 3.7191e-02, -1.5240e-02, -4.6526e-02, | |
-1.9192e-03, 3.7673e-02, 7.8276e-03, 3.0304e-02, -9.8969e-02]], | |
[[ 8.1676e-03, 2.0470e-02, 3.1533e-02, -1.0316e-03, -3.0028e-02, | |
-1.9095e-02, 2.3235e-02, 6.5387e-03, 2.2541e-02, -1.2757e-02], | |
[ 4.2487e-03, 5.7311e-03, 1.5324e-02, 2.2746e-02, -2.2264e-02, | |
5.7180e-03, -1.1162e-02, 2.2939e-02, 4.2777e-02, -4.5310e-02], | |
[ 1.6756e-02, 1.6117e-02, 2.6414e-02, -3.9545e-03, -2.3355e-02, | |
-3.0997e-03, 3.7366e-02, 4.8634e-03, 1.3865e-02, -4.7384e-02], | |
[ 9.3144e-03, 4.0886e-03, 1.2286e-02, 1.0644e-03, -3.2047e-02, | |
-2.1600e-02, -4.0605e-04, 8.7454e-03, 3.8266e-02, -3.3877e-02], | |
[ 1.6430e-02, -4.3319e-03, 2.0794e-02, 1.0169e-02, -3.5891e-02, | |
-1.4310e-03, 1.8626e-02, 1.5835e-02, 1.6215e-02, -4.8231e-02], | |
[-1.8604e-03, -6.2205e-03, 2.1708e-02, 2.5453e-02, -1.7012e-02, | |
-4.1217e-03, 1.2869e-02, 9.0593e-04, 3.9667e-02, -4.0885e-02]]], | |
device='xla:0'), | |
expected=tensor([[[ 2.1482e-02, 6.6671e-02, 1.5202e-02, 3.1239e-02, -1.2335e-01, | |
2.2080e-02, 4.2628e-02, 5.5614e-02, 1.2596e-01, -1.0200e-01], | |
[ 6.6665e-02, 9.3709e-02, -2.5195e-02, 7.6823e-02, -1.4017e-01, | |
3.7660e-02, -3.4559e-02, 6.2994e-02, 1.4740e-01, -1.0911e-01], | |
[ 1.2236e-02, 3.3914e-02, 2.0139e-02, 8.6304e-02, -1.5441e-01, | |
4.6683e-02, 2.1014e-03, 8.1676e-02, 1.3075e-01, -1.1148e-01], | |
[ 1.6670e-02, 4.8481e-03, 6.6260e-02, 4.7871e-02, -1.5319e-01, | |
5.4655e-02, 5.8243e-02, 5.2448e-02, 1.0466e-01, -1.8466e-01], | |
[ 4.0890e-02, 7.4031e-02, 3.4232e-02, 5.3748e-02, -1.0732e-01, | |
6.0856e-02, 5.2508e-02, 6.4979e-02, 1.0742e-01, -1.6613e-01], | |
[ 1.5815e-02, 6.3482e-02, 8.3798e-02, -2.6072e-02, -1.5854e-01, | |
5.0857e-02, 1.1184e-01, 3.1886e-03, 3.1781e-02, -1.7608e-01]], | |
[[-2.7875e-02, 4.0316e-02, 1.4813e-03, 4.7940e-02, -1.2574e-01, | |
-2.9045e-02, -6.3594e-03, 3.2919e-02, 8.6431e-02, -9.3862e-02], | |
[ 1.4751e-02, 1.0250e-02, 8.0218e-02, 3.6516e-02, -1.0510e-01, | |
-3.7467e-02, 2.1349e-02, 3.1667e-02, 1.1200e-01, -5.9675e-02], | |
[ 2.5078e-02, 4.5642e-02, 5.8048e-02, 3.8989e-02, -1.3810e-01, | |
9.6030e-03, 3.4022e-02, 6.3427e-02, 1.0015e-01, -8.8575e-02], | |
[ 1.5424e-02, -8.5428e-03, 9.6672e-02, 3.2726e-02, -9.6151e-02, | |
5.3122e-02, 5.7432e-02, 6.7217e-02, 8.5237e-02, -1.4943e-01], | |
[ 5.3847e-02, 5.6021e-02, 4.0440e-02, -1.2914e-02, -1.3117e-01, | |
-2.2860e-04, 4.8631e-02, 8.5770e-04, 2.9270e-02, -1.3900e-01], | |
[-8.5834e-03, 4.6160e-02, 5.9390e-02, 1.7667e-02, -1.0305e-01, | |
3.3183e-02, 7.7939e-02, 1.1797e-02, 7.2167e-02, -1.3576e-01]], | |
[[ 5.0445e-02, -3.2376e-02, 4.9327e-02, -1.2480e-02, -9.2677e-02, | |
1.5601e-02, 5.4991e-02, 3.7303e-02, 1.4710e-02, -1.5356e-01], | |
[ 1.1089e-02, 1.5667e-02, 3.6754e-02, 5.0605e-02, -6.3232e-02, | |
3.2226e-02, 3.0841e-02, 6.3293e-02, 9.5882e-02, -1.0360e-01], | |
[ 3.9332e-02, 2.7130e-02, 7.9292e-02, 2.7974e-02, -1.1478e-01, | |
3.4477e-02, 4.6639e-03, 4.5145e-02, 8.7582e-02, -7.8226e-02], | |
[-1.1988e-03, 6.1262e-02, 2.5192e-02, 4.6423e-02, -6.1332e-02, | |
-3.0328e-03, 5.5394e-03, 4.6386e-02, 9.6711e-02, -7.5974e-02], | |
[ 5.5044e-02, -1.7626e-03, 5.5595e-02, 3.3201e-02, -1.2260e-01, | |
-7.6357e-03, -4.3598e-03, 5.0671e-02, 9.2793e-02, -1.0036e-01], | |
[ 1.8151e-03, 5.1956e-02, 2.8330e-02, 1.6451e-02, -6.1553e-02, | |
2.0999e-02, 6.0088e-02, 2.0066e-02, 7.0601e-02, -1.3477e-01]], | |
[[ 3.5371e-02, 3.2387e-03, -1.9111e-03, 1.7190e-02, -9.4386e-02, | |
-2.7908e-02, -9.6138e-03, 3.2429e-02, 4.6134e-02, -6.6973e-02], | |
[-1.7130e-04, 2.1127e-03, 5.6856e-02, 2.6178e-02, -4.9824e-02, | |
-5.5776e-03, 1.2376e-02, 2.7418e-02, 7.9513e-02, -5.4756e-02], | |
[ 9.6062e-03, 1.2599e-02, 4.5086e-02, 1.7629e-02, -6.3150e-02, | |
-2.1658e-02, 1.4488e-02, 1.6142e-02, 5.7147e-02, -6.0021e-02], | |
[ 7.6090e-03, 1.9441e-02, 1.4322e-02, 2.6235e-02, -6.4189e-02, | |
-1.4463e-02, 1.2109e-02, 1.6807e-02, 6.8416e-02, -7.0903e-02], | |
[ 1.1019e-02, 3.1348e-02, 5.0524e-02, 2.1060e-02, -7.3195e-02, | |
-1.4854e-02, 1.3345e-02, 3.0575e-02, 4.5496e-02, -4.8461e-02], | |
[ 3.5963e-02, 9.0773e-03, 3.7191e-02, -1.5240e-02, -4.6526e-02, | |
-1.9192e-03, 3.7673e-02, 7.8276e-03, 3.0304e-02, -9.8969e-02]], | |
[[ 8.1676e-03, 2.0470e-02, 3.1533e-02, -1.0316e-03, -3.0028e-02, | |
-1.9095e-02, 2.3235e-02, 6.5387e-03, 2.2541e-02, -1.2757e-02], | |
[ 4.2487e-03, 5.7311e-03, 1.5324e-02, 2.2746e-02, -2.2264e-02, | |
5.7180e-03, -1.1162e-02, 2.2939e-02, 4.2777e-02, -4.5310e-02], | |
[ 1.6756e-02, 1.6117e-02, 2.6414e-02, -3.9545e-03, -2.3355e-02, | |
-3.0997e-03, 3.7366e-02, 4.8634e-03, 1.3865e-02, -4.7384e-02], | |
[ 9.3144e-03, 4.0886e-03, 1.2286e-02, 1.0644e-03, -3.2047e-02, | |
-2.1600e-02, -4.0605e-04, 8.7454e-03, 3.8266e-02, -3.3877e-02], | |
[ 1.6430e-02, -4.3319e-03, 2.0794e-02, 1.0169e-02, -3.5891e-02, | |
-1.4310e-03, 1.8626e-02, 1.5835e-02, 1.6215e-02, -4.8231e-02], | |
[-1.8604e-03, -6.2205e-03, 2.1708e-02, 2.5453e-02, -1.7012e-02, | |
-4.1217e-03, 1.2869e-02, 9.0593e-04, 3.9667e-02, -4.0885e-02]]], | |
device='xla:0'), | |
rtol=0.001, | |
atol=0.001, | |
equal_nan=True, | |
check_device=False, | |
check_dtype=True, | |
check_layout=False, | |
check_stride=False, | |
check_is_coalesced=False, | |
) | |
resulted in the unexpected exception above. If you are a user and see this message during normal operation please file an issue at https://github.com/pytorch/pytorch/issues. If you are a developer and working on the comparison functions, please except the previous error and raise an expressive `ErrorMeta` instead. | |
---------------------------------------------------------------------- | |
Ran 416 tests in 5604.936s | |
FAILED (errors=1, skipped=281) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment