Skip to content

Instantly share code, notes, and snippets.

@masahi
Created April 9, 2020 22:50
Show Gist options
  • Save masahi/a46100128ce91dd83378c2012ff73071 to your computer and use it in GitHub Desktop.
Save masahi/a46100128ce91dd83378c2012ff73071 to your computer and use it in GitHub Desktop.
ANTLR runtime and generated code versions disagree: 4.8!=4.7.2
ANTLR runtime and generated code versions disagree: 4.8!=4.7.2
graph(%self : __torch__.custom_lstms.StackedLSTM,
%input.1 : Tensor,
%states.1 : (Tensor, Tensor)[]):
%i.2 : int = prim::Constant[value=0]() # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:80:8
%i.3 : int = prim::Constant[value=1]() # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:80:8
%output_states.1 : (Tensor, Tensor)[] = prim::ListConstruct()
%6 : __torch__.torch.nn.modules.container.ModuleList = prim::GetAttr[name="layers"](%self)
%7 : __torch__.custom_lstms.LSTMLayer = prim::GetAttr[name="0"](%6)
%8 : __torch__.custom_lstms.LSTMLayer = prim::GetAttr[name="1"](%6)
%state.1 : (Tensor, Tensor) = aten::__getitem__(%states.1, %i.2) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:81:20
%22 : bool = prim::Constant[value=1]() # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:54:8
%23 : int = prim::Constant[value=0]() # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:54:34
%outputs.2 : Tensor[] = prim::ListConstruct()
%25 : int = aten::size(%input.1, %23) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:54:23
%outputs.4 : Tensor[], %state.2 : (Tensor, Tensor) = prim::Loop(%25, %22, %outputs.2, %state.1) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:54:8
block0(%i.4 : int, %outputs.7 : Tensor[], %state.7 : (Tensor, Tensor)):
%31 : __torch__.custom_lstms.LayerNormLSTMCell = prim::GetAttr[name="cell"](%7)
%32 : Tensor = aten::select(%input.1, %23, %i.4) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:55:35
%33 : int = prim::Constant[value=4]() # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:32:60
%34 : int = prim::Constant[value=1]() # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:32:63
%hx.2 : Tensor, %cx.2 : Tensor = prim::TupleUnpack(%state.7)
%37 : __torch__.torch.nn.modules.normalization.LayerNorm = prim::GetAttr[name="layernorm_i"](%31)
%38 : Tensor = prim::GetAttr[name="weight_ih"](%31)
%39 : Tensor = aten::t(%38) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:29:50
%40 : Tensor = aten::mm(%32, %39) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:29:34
%41 : Function = prim::Constant[name="layer_norm"]()
%42 : float = prim::Constant[value=1.0000000000000001e-05]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:66
%43 : int = prim::Constant[value=16]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:19
%44 : Tensor = prim::GetAttr[name="weight"](%37)
%45 : Tensor = prim::GetAttr[name="bias"](%37)
%46 : int[] = prim::ListConstruct(%43)
%47 : bool = prim::Constant[value=1]() # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1938:28
%igates.2 : Tensor = aten::layer_norm(%40, %46, %44, %45, %42, %47) # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1937:11
%49 : __torch__.torch.nn.modules.normalization.LayerNorm = prim::GetAttr[name="layernorm_h"](%31)
%50 : Tensor = prim::GetAttr[name="weight_hh"](%31)
%51 : Tensor = aten::t(%50) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:30:47
%52 : Tensor = aten::mm(%hx.2, %51) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:30:34
%53 : Function = prim::Constant[name="layer_norm"]()
%54 : float = prim::Constant[value=1.0000000000000001e-05]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:66
%55 : int = prim::Constant[value=16]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:19
%56 : Tensor = prim::GetAttr[name="weight"](%49)
%57 : Tensor = prim::GetAttr[name="bias"](%49)
%58 : int[] = prim::ListConstruct(%55)
%59 : bool = prim::Constant[value=1]() # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1938:28
%hgates.2 : Tensor = aten::layer_norm(%52, %58, %56, %57, %54, %59) # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1937:11
%gates.2 : Tensor = aten::add(%igates.2, %hgates.2, %34) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:31:16
%62 : Tensor[] = aten::chunk(%gates.2, %33, %34) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:32:48
%ingate.2 : Tensor, %forgetgate.2 : Tensor, %cellgate.2 : Tensor, %outgate.2 : Tensor = prim::ListUnpack(%62)
%ingate.4 : Tensor = aten::sigmoid(%ingate.2) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:34:17
%forgetgate.4 : Tensor = aten::sigmoid(%forgetgate.2) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:35:21
%cellgate.4 : Tensor = aten::tanh(%cellgate.2) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:36:19
%outgate.4 : Tensor = aten::sigmoid(%outgate.2) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:37:18
%71 : __torch__.torch.nn.modules.normalization.___torch_mangle_0.LayerNorm = prim::GetAttr[name="layernorm_c"](%31)
%72 : Tensor = aten::mul(%forgetgate.4, %cx.2) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:39:31
%73 : Tensor = aten::mul(%ingate.4, %cellgate.4) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:39:51
%74 : Tensor = aten::add(%72, %73, %34) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:39:31
%75 : Function = prim::Constant[name="layer_norm"]()
%76 : float = prim::Constant[value=1.0000000000000001e-05]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:66
%77 : int = prim::Constant[value=4]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:19
%78 : Tensor = prim::GetAttr[name="weight"](%71)
%79 : Tensor = prim::GetAttr[name="bias"](%71)
%80 : int[] = prim::ListConstruct(%77)
%81 : bool = prim::Constant[value=1]() # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1938:28
%cy.2 : Tensor = aten::layer_norm(%74, %80, %78, %79, %76, %81) # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1937:11
%83 : Tensor = aten::tanh(%cy.2) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:40:23
%hy.2 : Tensor = aten::mul(%outgate.4, %83) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:40:13
%85 : (Tensor, Tensor) = prim::TupleConstruct(%hy.2, %cy.2)
%86 : (Tensor, (Tensor, Tensor)) = prim::TupleConstruct(%hy.2, %85)
%out.2 : Tensor, %state.5 : (Tensor, Tensor) = prim::TupleUnpack(%86)
%89 : Tensor[] = prim::ListConstruct(%out.2)
%outputs.5 : Tensor[] = aten::add_(%outputs.7, %89) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:56:12
-> (%22, %outputs.5, %state.5)
%91 : Tensor = aten::stack(%outputs.4, %23) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:57:15
%92 : (Tensor, (Tensor, Tensor)) = prim::TupleConstruct(%91, %state.2)
%output.2 : Tensor, %out_state.1 : (Tensor, Tensor) = prim::TupleUnpack(%92)
%13 : (Tensor, Tensor)[] = prim::ListConstruct(%out_state.1)
%output_states.3 : (Tensor, Tensor)[] = aten::add_(%output_states.1, %13) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:83:12
%state.4 : (Tensor, Tensor) = aten::__getitem__(%states.1, %i.3) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:81:20
%93 : bool = prim::Constant[value=1]() # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:54:8
%94 : int = prim::Constant[value=0]() # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:54:34
%outputs.1 : Tensor[] = prim::ListConstruct()
%96 : int = aten::size(%output.2, %94) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:54:23
%outputs : Tensor[], %state : (Tensor, Tensor) = prim::Loop(%96, %93, %outputs.1, %state.4) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:54:8
block0(%i.1 : int, %outputs.6 : Tensor[], %state.6 : (Tensor, Tensor)):
%102 : __torch__.custom_lstms.LayerNormLSTMCell = prim::GetAttr[name="cell"](%8)
%103 : Tensor = aten::select(%output.2, %94, %i.1) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:55:35
%104 : int = prim::Constant[value=4]() # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:32:60
%105 : int = prim::Constant[value=1]() # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:32:63
%hx.1 : Tensor, %cx.1 : Tensor = prim::TupleUnpack(%state.6)
%108 : __torch__.torch.nn.modules.normalization.LayerNorm = prim::GetAttr[name="layernorm_i"](%102)
%109 : Tensor = prim::GetAttr[name="weight_ih"](%102)
%110 : Tensor = aten::t(%109) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:29:50
%111 : Tensor = aten::mm(%103, %110) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:29:34
%112 : Function = prim::Constant[name="layer_norm"]()
%113 : float = prim::Constant[value=1.0000000000000001e-05]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:66
%114 : int = prim::Constant[value=16]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:19
%115 : Tensor = prim::GetAttr[name="weight"](%108)
%116 : Tensor = prim::GetAttr[name="bias"](%108)
%117 : int[] = prim::ListConstruct(%114)
%118 : bool = prim::Constant[value=1]() # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1938:28
%igates.1 : Tensor = aten::layer_norm(%111, %117, %115, %116, %113, %118) # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1937:11
%120 : __torch__.torch.nn.modules.normalization.LayerNorm = prim::GetAttr[name="layernorm_h"](%102)
%121 : Tensor = prim::GetAttr[name="weight_hh"](%102)
%122 : Tensor = aten::t(%121) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:30:47
%123 : Tensor = aten::mm(%hx.1, %122) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:30:34
%124 : Function = prim::Constant[name="layer_norm"]()
%125 : float = prim::Constant[value=1.0000000000000001e-05]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:66
%126 : int = prim::Constant[value=16]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:19
%127 : Tensor = prim::GetAttr[name="weight"](%120)
%128 : Tensor = prim::GetAttr[name="bias"](%120)
%129 : int[] = prim::ListConstruct(%126)
%130 : bool = prim::Constant[value=1]() # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1938:28
%hgates.1 : Tensor = aten::layer_norm(%123, %129, %127, %128, %125, %130) # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1937:11
%gates.1 : Tensor = aten::add(%igates.1, %hgates.1, %105) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:31:16
%133 : Tensor[] = aten::chunk(%gates.1, %104, %105) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:32:48
%ingate.1 : Tensor, %forgetgate.1 : Tensor, %cellgate.1 : Tensor, %outgate.1 : Tensor = prim::ListUnpack(%133)
%ingate.3 : Tensor = aten::sigmoid(%ingate.1) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:34:17
%forgetgate.3 : Tensor = aten::sigmoid(%forgetgate.1) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:35:21
%cellgate.3 : Tensor = aten::tanh(%cellgate.1) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:36:19
%outgate.3 : Tensor = aten::sigmoid(%outgate.1) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:37:18
%142 : __torch__.torch.nn.modules.normalization.___torch_mangle_0.LayerNorm = prim::GetAttr[name="layernorm_c"](%102)
%143 : Tensor = aten::mul(%forgetgate.3, %cx.1) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:39:31
%144 : Tensor = aten::mul(%ingate.3, %cellgate.3) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:39:51
%145 : Tensor = aten::add(%143, %144, %105) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:39:31
%146 : Function = prim::Constant[name="layer_norm"]()
%147 : float = prim::Constant[value=1.0000000000000001e-05]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:66
%148 : int = prim::Constant[value=4]() # /home/masa/projects/deep/pytorch/torch/nn/modules/normalization.py:153:19
%149 : Tensor = prim::GetAttr[name="weight"](%142)
%150 : Tensor = prim::GetAttr[name="bias"](%142)
%151 : int[] = prim::ListConstruct(%148)
%152 : bool = prim::Constant[value=1]() # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1938:28
%cy.1 : Tensor = aten::layer_norm(%145, %151, %149, %150, %147, %152) # /home/masa/projects/deep/pytorch/torch/nn/functional.py:1937:11
%154 : Tensor = aten::tanh(%cy.1) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:40:23
%hy.1 : Tensor = aten::mul(%outgate.3, %154) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:40:13
%156 : (Tensor, Tensor) = prim::TupleConstruct(%hy.1, %cy.1)
%157 : (Tensor, (Tensor, Tensor)) = prim::TupleConstruct(%hy.1, %156)
%out.1 : Tensor, %state.3 : (Tensor, Tensor) = prim::TupleUnpack(%157)
%160 : Tensor[] = prim::ListConstruct(%out.1)
%outputs.3 : Tensor[] = aten::add_(%outputs.6, %160) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:56:12
-> (%93, %outputs.3, %state.3)
%162 : Tensor = aten::stack(%outputs, %94) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:57:15
%163 : (Tensor, (Tensor, Tensor)) = prim::TupleConstruct(%162, %state)
%output.4 : Tensor, %out_state.3 : (Tensor, Tensor) = prim::TupleUnpack(%163)
%19 : (Tensor, Tensor)[] = prim::ListConstruct(%out_state.3)
%output_states.5 : (Tensor, Tensor)[] = aten::add_(%output_states.3, %19) # /home/masa/projects/dev/torchscript-to-tvm/custom_lstms.py:83:12
%21 : (Tensor, (Tensor, Tensor)[]) = prim::TupleConstruct(%output.4, %output_states.5)
return (%21)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment