Skip to content

Instantly share code, notes, and snippets.

@ilia-cher
Created September 16, 2019 22:48
Show Gist options
  • Save ilia-cher/f925ace8ed45d9dff400abf2ea176a31 to your computer and use it in GitHub Desktop.
Save ilia-cher/f925ace8ed45d9dff400abf2ea176a31 to your computer and use it in GitHub Desktop.
=================================== FAILURES ===================================
_____________________ TestOperators.test_quantized_linear ______________________
mod = <torch._C.Function object at 0x7f93329439e8>
inputs = (tensor([[0.9231, 0.3115, 0.7289, 0.8404, 0.4845, 0.9304, 0.4694, 0.5023, 0.0949,
0.8016, 0.9406, 0.9025],
...9277, 0.5878, 0.3807, 0.4685, 0.2934,
0.2472, 0.2436, 0.8439, 0.9898, 0.7428, 0.9442, 0.7064, 0.7519, 0.7751]))
running_what = 'trace'
def run_mod_and_filter_tensor_outputs(mod, inputs, running_what):
try:
> outs = wrap_retval(mod(*_clone_inputs(inputs)))
E RuntimeError: [22:33:25] /root/project/tvm/src/relay/backend/graph_plan_memory.cc:299: Check failed: pval != nullptr: Cannot allocate memory symbolic tensor shape [(nullptr)]
E Stack trace:
E [bt] (0) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x43) [0x7f9346600f83]
E [bt] (1) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::StorageAllocator::GetMemorySize(tvm::relay::StorageToken*)+0x180) [0x7f9346c2a220]
E [bt] (2) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::StorageAllocator::CreateToken(tvm::relay::ExprNode const*, bool)+0x1c9) [0x7f9346c2d109]
E [bt] (3) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::StorageAllocator::VisitExpr_(tvm::relay::CallNode const*)+0x1b6) [0x7f9346c2dbf6]
E [bt] (4) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::ExprFunctor<void (tvm::relay::Expr const&)>::VisitExpr(tvm::relay::Expr const&)+0xc5) [0x7f9346b20c05]
E [bt] (5) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::ExprVisitor::VisitExpr(tvm::relay::Expr const&)+0x7f) [0x7f9346cc6c4f]
E [bt] (6) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::StorageAllocaBaseVisitor::GetToken(tvm::relay::Expr const&)+0x28) [0x7f9346c29fa8]
E [bt] (7) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::StorageAllocaBaseVisitor::VisitExpr_(tvm::relay::TupleGetItemNode const*)+0x33) [0x7f9346c2c633]
E [bt] (8) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::ExprFunctor<void (tvm::relay::Expr const&)>::VisitExpr(tvm::relay::Expr const&)+0xc5) [0x7f9346b20c05]
E
E
E The above operation failed in interpreter, with the following stack trace:
env/lib/python3.6/site-packages/torch/jit/__init__.py:615: RuntimeError
During handling of the above exception, another exception occurred:
args = (<test.test_operators.TestOperators testMethod=test_quantized_linear>,)
kwargs = {'out_features': 18, 'shape': [16, 12]}, _ = 0, k = 'out_features'
def f_impl(*args, **kwargs):
for _ in range(examples):
for k in kwargs_:
kwargs[k] = kwargs_[k]()
try:
fn(*args, **kwargs)
except Exception as e:
print("Inputs:", kwargs)
> raise (e)
test/util.py:61:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
test/util.py:58: in f_impl
fn(*args, **kwargs)
test/test_operators.py:345: in test_quantized_linear
ref_out, tvm_out = self.runBoth(fbgemm_quantized_linear, input, weight, bias)
test/util.py:79: in runBoth
trace_tvm = torch.jit.trace(func, inputs)
env/lib/python3.6/site-packages/torch/jit/__init__.py:875: in trace
_check_trace([example_inputs], func, traced, check_tolerance, _force_outplace, False, _module_class)
env/lib/python3.6/site-packages/torch/autograd/grad_mode.py:49: in decorate_no_grad
return func(*args, **kwargs)
env/lib/python3.6/site-packages/torch/jit/__init__.py:653: in _check_trace
traced_outs = run_mod_and_filter_tensor_outputs(traced_func, inputs, 'trace')
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
mod = <torch._C.Function object at 0x7f93329439e8>
inputs = (tensor([[0.9231, 0.3115, 0.7289, 0.8404, 0.4845, 0.9304, 0.4694, 0.5023, 0.0949,
0.8016, 0.9406, 0.9025],
...9277, 0.5878, 0.3807, 0.4685, 0.2934,
0.2472, 0.2436, 0.8439, 0.9898, 0.7428, 0.9442, 0.7064, 0.7519, 0.7751]))
running_what = 'trace'
def run_mod_and_filter_tensor_outputs(mod, inputs, running_what):
try:
outs = wrap_retval(mod(*_clone_inputs(inputs)))
outs = [out for out in outs if isinstance(out, torch.Tensor)]
return outs
except Exception as e:
raise TracingCheckError(*graph_diagnostic_info(),
extra_msg='Encountered an exception while running the ' + running_what +
> ' with test inputs.\nException:\n' + indent(str(e)))
E torch.jit.TracingCheckError: Tracing failed sanity checks!
E Encountered an exception while running the trace with test inputs.
E Exception:
E [22:33:25] /root/project/tvm/src/relay/backend/graph_plan_memory.cc:299: Check failed: pval != nullptr: Cannot allocate memory symbolic tensor shape [(nullptr)]
E Stack trace:
E [bt] (0) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x43) [0x7f9346600f83]
E [bt] (1) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::StorageAllocator::GetMemorySize(tvm::relay::StorageToken*)+0x180) [0x7f9346c2a220]
E [bt] (2) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::StorageAllocator::CreateToken(tvm::relay::ExprNode const*, bool)+0x1c9) [0x7f9346c2d109]
E [bt] (3) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::StorageAllocator::VisitExpr_(tvm::relay::CallNode const*)+0x1b6) [0x7f9346c2dbf6]
E [bt] (4) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::ExprFunctor<void (tvm::relay::Expr const&)>::VisitExpr(tvm::relay::Expr const&)+0xc5) [0x7f9346b20c05]
E [bt] (5) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::ExprVisitor::VisitExpr(tvm::relay::Expr const&)+0x7f) [0x7f9346cc6c4f]
E [bt] (6) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::StorageAllocaBaseVisitor::GetToken(tvm::relay::Expr const&)+0x28) [0x7f9346c29fa8]
E [bt] (7) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::StorageAllocaBaseVisitor::VisitExpr_(tvm::relay::TupleGetItemNode const*)+0x33) [0x7f9346c2c633]
E [bt] (8) /root/project/env/lib/python3.6/site-packages/tvm-0.6.dev0-py3.6-linux-x86_64.egg/tvm/libtvm.so(tvm::relay::ExprFunctor<void (tvm::relay::Expr const&)>::VisitExpr(tvm::relay::Expr const&)+0xc5) [0x7f9346b20c05]
E
E
E The above operation failed in interpreter, with the following stack trace:
env/lib/python3.6/site-packages/torch/jit/__init__.py:621: TracingCheckError
----------------------------- Captured stdout call -----------------------------
Inputs: {'shape': [16, 12], 'out_features': 18}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment