Skip to content

Instantly share code, notes, and snippets.

@vivekkhandelwal1
Created February 15, 2022 16:56
Show Gist options
  • Save vivekkhandelwal1/9ac76e88879c2ea3b82ad99289952a0c to your computer and use it in GitHub Desktop.
Save vivekkhandelwal1/9ac76e88879c2ea3b82ad99289952a0c to your computer and use it in GitHub Desktop.
This file has been truncated, but you can view the full file.
Args: ./build/bin/torch-mlir-opt --torchscript-module-to-torch-backend-pipeline --torch-backend-to-linalg-on-tensors-backend-pipeline -debug -print-ir-after-all ../../misc/frontend.mlir
Load new dialect in Context builtin
Load new dialect in Context builtin
Load new dialect in Context torch
Load new dialect in Context std
Load new dialect in Context arith
Load new dialect in Context affine
Load new dialect in Context linalg
Load new dialect in Context math
Load new dialect in Context memref
Load new dialect in Context tensor
Load new dialect in Context scf
Load new dialect in Context torch_c
// -----// IR Dump After SymbolDCE //----- //
module attributes {torch.debug_module_name = "XLMR_model"} {
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention.reorder_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.tensor) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%true_14 = torch.constant.bool true
%false_15 = torch.constant.bool false
%none_16 = torch.constant.none
%int0_17 = torch.constant.int 0
%int9223372036854775807 = torch.constant.int 9223372036854775807
%int1_18 = torch.constant.int 1
%373 = torch.prim.Uninitialized : !torch.bool
%374 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%375 = torch.prim.CallMethod %arg0["_get_input_buffer"] (%374) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%376 = torch.aten.keys.str %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> -> !torch.list<!torch.str>
%377 = torch.aten.len.t %376 : !torch.list<!torch.str> -> !torch.int
%378 = torch.aten.gt.int %377, %int0_17 : !torch.int, !torch.int -> !torch.bool
%379 = torch.prim.Loop %int9223372036854775807, %378, init(%int0_17) {
^bb0(%arg3: !torch.int, %arg4: !torch.int):
%381 = torch.aten.__getitem__.t %376, %arg4 : !torch.list<!torch.str>, !torch.int -> !torch.str
%382 = torch.aten.__getitem__.Dict_str %375, %381 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str -> !torch.optional<!torch.tensor>
%383 = torch.aten.__isnot__ %382, %none_16 : !torch.optional<!torch.tensor>, !torch.none -> !torch.bool
%384:2 = torch.prim.If %383 -> (!torch.bool, !torch.bool) {
%389 = torch.prim.unchecked_cast %382 : !torch.optional<!torch.tensor> -> !torch.tensor
%390 = torch.prim.GetAttr %arg0["encoder_decoder_attention"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention"> -> !torch.bool
%391 = torch.prim.If %390 -> (!torch.bool) {
%393 = torch.aten.size.int %389, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%394 = torch.aten.size.int %arg2, %int0_17 : !torch.tensor, !torch.int -> !torch.int
%395 = torch.aten.eq.int %393, %394 : !torch.int, !torch.int -> !torch.bool
torch.prim.If.yield %395 : !torch.bool
} else {
torch.prim.If.yield %false_15 : !torch.bool
}
%392:2 = torch.prim.If %391 -> (!torch.bool, !torch.bool) {
torch.prim.If.yield %true_14, %false_15 : !torch.bool, !torch.bool
} else {
%393 = torch.aten.index_select %389, %int0_17, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
torch.aten._set_item.str %375, %381, %393 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>, !torch.str, !torch.tensor
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
torch.prim.If.yield %392#0, %392#1 : !torch.bool, !torch.bool
} else {
torch.prim.If.yield %false_15, %373 : !torch.bool, !torch.bool
}
%385 = torch.prim.If %384#0 -> (!torch.bool) {
torch.prim.If.yield %384#1 : !torch.bool
} else {
torch.prim.If.yield %true_14 : !torch.bool
}
%386 = torch.aten.add.int %arg4, %int1_18 : !torch.int, !torch.int -> !torch.int
%387 = torch.aten.lt.int %386, %377 : !torch.int, !torch.int -> !torch.bool
%388 = torch.aten.__and__.bool %387, %385 : !torch.bool, !torch.bool -> !torch.bool
torch.prim.Loop.condition %388, iter(%386 : !torch.int)
} : (!torch.int, !torch.bool, !torch.int) -> !torch.int
%380 = torch.prim.CallMethod %arg0["_set_input_buffer"] (%arg1, %375) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, (!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %380 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention._get_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>> {
%none_14 = torch.constant.none
%str_15 = torch.constant.str "attn_state"
%373 = torch.prim.CallMethod %arg0["get_incremental_state"] (%arg1, %str_15) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%374 = torch.aten.__isnot__ %373, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.none -> !torch.bool
%375 = torch.prim.If %374 -> (!torch.dict<!torch.str, !torch.optional<!torch.tensor>>) {
%376 = torch.prim.unchecked_cast %373 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
} else {
%376 = torch.prim.DictConstruct keys() values() -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
torch.prim.If.yield %376 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
return %375 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention.get_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str) -> !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>> {
%true_14 = torch.constant.bool true
%none_15 = torch.constant.none
%373 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, (!torch.str) -> !torch.str
%374 = torch.aten.__is__ %arg1, %none_15 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%375:2 = torch.prim.If %374 -> (!torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
torch.prim.If.yield %true_14, %arg1 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
%377 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__contains__.str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.bool
%379 = torch.aten.__not__ %378 : !torch.bool -> !torch.bool
%380 = torch.derefine %377 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %379, %380 : !torch.bool, !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
%376 = torch.prim.If %375#0 -> (!torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>) {
%377 = torch.derefine %none_15 : !torch.none to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
} else {
%377 = torch.prim.unchecked_cast %375#1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%378 = torch.aten.__getitem__.Dict_str %377, %373 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str -> !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%379 = torch.derefine %378 : !torch.dict<!torch.str, !torch.optional<!torch.tensor>> to !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
torch.prim.If.yield %379 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
return %376 : !torch.optional<!torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention._get_full_incremental_state_key(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, %arg1: !torch.str) -> !torch.str {
%str_14 = torch.constant.str "{}.{}"
%373 = torch.prim.GetAttr %arg0["_incremental_state_id"] : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention"> -> !torch.str
%374 = torch.aten.format(%str_14, %373, %arg1) : !torch.str, !torch.str, !torch.str -> !torch.str
return %374 : !torch.str
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention._set_input_buffer(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, %arg1: !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, %arg2: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%str_14 = torch.constant.str "attn_state"
%373 = torch.derefine %arg1 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
%374 = torch.prim.CallMethod %arg0["set_incremental_state"] (%373, %str_14, %arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention.set_incremental_state(%arg0: !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, %arg1: !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, %arg2: !torch.str, %arg3: !torch.dict<!torch.str, !torch.optional<!torch.tensor>>) -> !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> {
%none_14 = torch.constant.none
%373 = torch.aten.__isnot__ %arg1, %none_14 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>, !torch.none -> !torch.bool
%374 = torch.prim.If %373 -> (!torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>) {
%375 = torch.prim.unchecked_cast %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>> -> !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>
%376 = torch.prim.CallMethod %arg0["_get_full_incremental_state_key"] (%arg2) : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">, (!torch.str) -> !torch.str
torch.aten._set_item.str %375, %376, %arg3 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>, !torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>
%377 = torch.derefine %375 : !torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>> to !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
torch.prim.If.yield %377 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
} else {
torch.prim.If.yield %arg1 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
return %374 : !torch.optional<!torch.dict<!torch.str, !torch.dict<!torch.str, !torch.optional<!torch.tensor>>>>
}
func private @__torch__.torch.nn.modules.container.___torch_mangle_485.ModuleList.__len__(%arg0: !torch.nn.Module<"__torch__.torch.nn.modules.container.___torch_mangle_485.ModuleList">) -> !torch.int {
%int12_14 = torch.constant.int 12
return %int12_14 : !torch.int
}
func private @__torch__.fairseq.models.transformer.transformer_encoder.___torch_mangle_486.TransformerEncoder.reorder_encoder_out(%arg0: !torch.nn.Module<"__torch__.fairseq.models.transformer.transformer_encoder.___torch_mangle_486.TransformerEncoder">, %arg1: !torch.dict<!torch.str, !torch.list<!torch.tensor>>, %arg2: !torch.tensor) -> !torch.dict<!torch.str, !torch.list<!torch.tensor>> {
%true_14 = torch.constant.bool true
%str_15 = torch.constant.str "encoder_states"
%str_16 = torch.constant.str "src_lengths"
%str_17 = torch.constant.str "src_tokens"
%str_18 = torch.constant.str "encoder_embedding"
%str_19 = torch.constant.str "encoder_padding_mask"
%str_20 = torch.constant.str "encoder_out"
%int0_21 = torch.constant.int 0
%int1_22 = torch.constant.int 1
%int9223372036854775807 = torch.constant.int 9223372036854775807
%373 = torch.aten.__getitem__.Dict_str %arg1, %str_20 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%374 = torch.aten.len.t %373 : !torch.list<!torch.tensor> -> !torch.int
%375 = torch.aten.eq.int %374, %int0_21 : !torch.int, !torch.int -> !torch.bool
%376 = torch.prim.If %375 -> (!torch.list<!torch.tensor>) {
%397 = torch.prim.ListConstruct : () -> !torch.list<!torch.tensor>
torch.prim.If.yield %397 : !torch.list<!torch.tensor>
} else {
%397 = torch.aten.__getitem__.Dict_str %arg1, %str_20 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%398 = torch.aten.__getitem__.t %397, %int0_21 : !torch.list<!torch.tensor>, !torch.int -> !torch.tensor
%399 = torch.aten.index_select %398, %int1_22, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
%400 = torch.prim.ListConstruct %399 : (!torch.tensor) -> !torch.list<!torch.tensor>
torch.prim.If.yield %400 : !torch.list<!torch.tensor>
}
%377 = torch.aten.__getitem__.Dict_str %arg1, %str_19 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%378 = torch.aten.len.t %377 : !torch.list<!torch.tensor> -> !torch.int
%379 = torch.aten.eq.int %378, %int0_21 : !torch.int, !torch.int -> !torch.bool
%380 = torch.prim.If %379 -> (!torch.list<!torch.tensor>) {
%397 = torch.prim.ListConstruct : () -> !torch.list<!torch.tensor>
torch.prim.If.yield %397 : !torch.list<!torch.tensor>
} else {
%397 = torch.aten.__getitem__.Dict_str %arg1, %str_19 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%398 = torch.aten.__getitem__.t %397, %int0_21 : !torch.list<!torch.tensor>, !torch.int -> !torch.tensor
%399 = torch.aten.index_select %398, %int0_21, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
%400 = torch.prim.ListConstruct %399 : (!torch.tensor) -> !torch.list<!torch.tensor>
torch.prim.If.yield %400 : !torch.list<!torch.tensor>
}
%381 = torch.aten.__getitem__.Dict_str %arg1, %str_18 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%382 = torch.aten.len.t %381 : !torch.list<!torch.tensor> -> !torch.int
%383 = torch.aten.eq.int %382, %int0_21 : !torch.int, !torch.int -> !torch.bool
%384 = torch.prim.If %383 -> (!torch.list<!torch.tensor>) {
%397 = torch.prim.ListConstruct : () -> !torch.list<!torch.tensor>
torch.prim.If.yield %397 : !torch.list<!torch.tensor>
} else {
%397 = torch.aten.__getitem__.Dict_str %arg1, %str_18 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%398 = torch.aten.__getitem__.t %397, %int0_21 : !torch.list<!torch.tensor>, !torch.int -> !torch.tensor
%399 = torch.aten.index_select %398, %int0_21, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
%400 = torch.prim.ListConstruct %399 : (!torch.tensor) -> !torch.list<!torch.tensor>
torch.prim.If.yield %400 : !torch.list<!torch.tensor>
}
%385 = torch.aten.__getitem__.Dict_str %arg1, %str_17 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%386 = torch.aten.len.t %385 : !torch.list<!torch.tensor> -> !torch.int
%387 = torch.aten.eq.int %386, %int0_21 : !torch.int, !torch.int -> !torch.bool
%388 = torch.prim.If %387 -> (!torch.list<!torch.tensor>) {
%397 = torch.prim.ListConstruct : () -> !torch.list<!torch.tensor>
torch.prim.If.yield %397 : !torch.list<!torch.tensor>
} else {
%397 = torch.aten.__getitem__.Dict_str %arg1, %str_17 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%398 = torch.aten.__getitem__.t %397, %int0_21 : !torch.list<!torch.tensor>, !torch.int -> !torch.tensor
%399 = torch.aten.index_select %398, %int0_21, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
%400 = torch.prim.ListConstruct %399 : (!torch.tensor) -> !torch.list<!torch.tensor>
torch.prim.If.yield %400 : !torch.list<!torch.tensor>
}
%389 = torch.aten.__getitem__.Dict_str %arg1, %str_16 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%390 = torch.aten.len.t %389 : !torch.list<!torch.tensor> -> !torch.int
%391 = torch.aten.eq.int %390, %int0_21 : !torch.int, !torch.int -> !torch.bool
%392 = torch.prim.If %391 -> (!torch.list<!torch.tensor>) {
%397 = torch.prim.ListConstruct : () -> !torch.list<!torch.tensor>
torch.prim.If.yield %397 : !torch.list<!torch.tensor>
} else {
%397 = torch.aten.__getitem__.Dict_str %arg1, %str_16 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%398 = torch.aten.__getitem__.t %397, %int0_21 : !torch.list<!torch.tensor>, !torch.int -> !torch.tensor
%399 = torch.aten.index_select %398, %int0_21, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
%400 = torch.prim.ListConstruct %399 : (!torch.tensor) -> !torch.list<!torch.tensor>
torch.prim.If.yield %400 : !torch.list<!torch.tensor>
}
%393 = torch.aten.__getitem__.Dict_str %arg1, %str_15 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>, !torch.str -> !torch.list<!torch.tensor>
%394 = torch.aten.len.t %393 : !torch.list<!torch.tensor> -> !torch.int
%395 = torch.aten.gt.int %394, %int0_21 : !torch.int, !torch.int -> !torch.bool
torch.prim.If %395 -> () {
%397 = torch.aten.len.t %393 : !torch.list<!torch.tensor> -> !torch.int
%398 = torch.prim.ListConstruct %int9223372036854775807, %397 : (!torch.int, !torch.int) -> !torch.list<!torch.int>
%399 = torch.prim.min.self_int %398 : !torch.list<!torch.int> -> !torch.int
torch.prim.Loop %399, %true_14, init() {
^bb0(%arg3: !torch.int):
%400 = torch.aten.__getitem__.t %393, %arg3 : !torch.list<!torch.tensor>, !torch.int -> !torch.tensor
%401 = torch.aten.index_select %400, %int1_22, %arg2 : !torch.tensor, !torch.int, !torch.tensor -> !torch.tensor
%402 = torch.aten._set_item.t %393, %arg3, %401 : !torch.list<!torch.tensor>, !torch.int, !torch.tensor -> !torch.list<!torch.tensor>
torch.prim.Loop.condition %true_14, iter()
} : (!torch.int, !torch.bool) -> ()
torch.prim.If.yield
} else {
torch.prim.If.yield
}
%396 = torch.prim.DictConstruct keys(%str_20, %str_19, %str_18, %str_15, %str_17, %str_16 : !torch.str, !torch.str, !torch.str, !torch.str, !torch.str, !torch.str) values(%376, %380, %384, %393, %388, %392 : !torch.list<!torch.tensor>, !torch.list<!torch.tensor>, !torch.list<!torch.tensor>, !torch.list<!torch.tensor>, !torch.list<!torch.tensor>, !torch.list<!torch.tensor>) -> !torch.dict<!torch.str, !torch.list<!torch.tensor>>
return %396 : !torch.dict<!torch.str, !torch.list<!torch.tensor>>
}
func private @__torch__.build_tools.torchscript_e2e_heavydep_tests.xlmr.___torch_mangle_494.XLMR_model.forward(%arg0: !torch.nn.Module<"__torch__.build_tools.torchscript_e2e_heavydep_tests.xlmr.___torch_mangle_494.XLMR_model">, %arg1: !torch.tensor {torch.type_bound = !torch.vtensor<[?,?],si64>}) -> !torch.tensor {
%true_14 = torch.constant.bool true
%373 = torch.tensor.literal(dense<2> : tensor<si32>) : !torch.tensor<[],si32>
%374 = torch.tensor.literal(dense<13452> : tensor<si32>) : !torch.tensor<[],si32>
%375 = torch.tensor.literal(dense<70> : tensor<si32>) : !torch.tensor<[],si32>
%376 = torch.tensor.literal(dense<1884> : tensor<si32>) : !torch.tensor<[],si32>
%377 = torch.tensor.literal(dense<398> : tensor<si32>) : !torch.tensor<[],si32>
%378 = torch.tensor.literal(dense<54> : tensor<si32>) : !torch.tensor<[],si32>
%379 = torch.tensor.literal(dense<3642> : tensor<si32>) : !torch.tensor<[],si32>
%false_15 = torch.constant.bool false
%380 = torch.tensor.literal(dense<0> : tensor<si32>) : !torch.tensor<[],si32>
%none_16 = torch.constant.none
%cpu = torch.constant.device "cpu"
%int8_17 = torch.constant.int 8
%int3 = torch.constant.int 3
%int0_18 = torch.constant.int 0
%int1_19 = torch.constant.int 1
%int2 = torch.constant.int 2
%int4 = torch.constant.int 4
%int5 = torch.constant.int 5
%int6 = torch.constant.int 6
%int7 = torch.constant.int 7
%381 = torch.prim.ListConstruct %int8_17 : (!torch.int) -> !torch.list<!torch.int>
%382 = torch.aten.empty.memory_format %381, %int3, %int0_18, %cpu, %none_16, %none_16 : !torch.list<!torch.int>, !torch.int, !torch.int, !torch.Device, !torch.none, !torch.none -> !torch.tensor
%383 = torch.aten.select.int %382, %int0_18, %int0_18 : !torch.tensor, !torch.int, !torch.int -> !torch.tensor
%384 = torch.aten.copy_ %383, %380, %false_15 : !torch.tensor, !torch.tensor<[],si32>, !torch.bool -> !torch.tensor
%385 = torch.aten.select.int %382, %int0_18, %int1_19 : !torch.tensor, !torch.int, !torch.int -> !torch.tensor
%386 = torch.aten.copy_ %385, %379, %false_15 : !torch.tensor, !torch.tensor<[],si32>, !torch.bool -> !torch.tensor
%387 = torch.aten.select.int %382, %int0_18, %int2 : !torch.tensor, !torch.int, !torch.int -> !torch.tensor
%388 = torch.aten.copy_ %387, %378, %false_15 : !torch.tensor, !torch.tensor<[],si32>, !torch.bool -> !torch.tensor
%389 = torch.aten.select.int %382, %int0_18, %int3 : !torch.tensor, !torch.int, !torch.int -> !torch.tensor
%390 = torch.aten.copy_ %389, %377, %false_15 : !torch.tensor, !torch.tensor<[],si32>, !torch.bool -> !torch.tensor
%391 = torch.aten.select.int %382, %int0_18, %int4 : !torch.tensor, !torch.int, !torch.int -> !torch.tensor
%392 = torch.aten.copy_ %391, %376, %false_15 : !torch.tensor, !torch.tensor<[],si32>, !torch.bool -> !torch.tensor
%393 = torch.aten.select.int %382, %int0_18, %int5 : !torch.tensor, !torch.int, !torch.int -> !torch.tensor
%394 = torch.aten.copy_ %393, %375, %false_15 : !torch.tensor, !torch.tensor<[],si32>, !torch.bool -> !torch.tensor
%395 = torch.aten.select.int %382, %int0_18, %int6 : !torch.tensor, !torch.int, !torch.int -> !torch.tensor
%396 = torch.aten.copy_ %395, %374, %false_15 : !torch.tensor, !torch.tensor<[],si32>, !torch.bool -> !torch.tensor
%397 = torch.aten.select.int %382, %int0_18, %int7 : !torch.tensor, !torch.int, !torch.int -> !torch.tensor
%398 = torch.aten.copy_ %397, %373, %false_15 : !torch.tensor, !torch.tensor<[],si32>, !torch.bool -> !torch.tensor
%399 = torch.aten.to.dtype %382, %int4, %false_15, %false_15, %none_16 : !torch.tensor, !torch.int, !torch.bool, !torch.bool, !torch.none -> !torch.tensor
%400 = torch.aten.detach %399 : !torch.tensor -> !torch.tensor
%401 = torch.aten.to.device %400, %cpu, %int4, %false_15, %true_14, %none_16 : !torch.tensor, !torch.Device, !torch.int, !torch.bool, !torch.bool, !torch.none -> !torch.tensor
%402 = torch.aten.detach %401 : !torch.tensor -> !torch.tensor
return %402 : !torch.tensor
}
torch.class_type @__torch__.build_tools.torchscript_e2e_heavydep_tests.xlmr.___torch_mangle_494.XLMR_model {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "model" : !torch.nn.Module<"__torch__.fairseq.models.roberta.hub_interface.___torch_mangle_493.RobertaHubInterface">
torch.method "forward", @__torch__.build_tools.torchscript_e2e_heavydep_tests.xlmr.___torch_mangle_494.XLMR_model.forward
}
%true = torch.constant.bool true
%none = torch.constant.none
torch.class_type @__torch__.fairseq.models.roberta.hub_interface.___torch_mangle_493.RobertaHubInterface {
torch.attr private "_float_tensor" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "model" : !torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_492.RobertaModel">
}
%0 = torch.tensor.literal(dense<0.000000e+00> : tensor<1xf32>) : !torch.tensor<[1],f32>
%false = torch.constant.bool false
torch.class_type @__torch__.fairseq.models.roberta.model.___torch_mangle_492.RobertaModel {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "encoder" : !torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_490.RobertaEncoder">
torch.attr private "classification_heads" : !torch.nn.Module<"__torch__.torch.nn.modules.container.___torch_mangle_491.ModuleDict">
}
torch.class_type @__torch__.fairseq.models.roberta.model.___torch_mangle_490.RobertaEncoder {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "sentence_encoder" : !torch.nn.Module<"__torch__.fairseq.models.transformer.transformer_encoder.___torch_mangle_486.TransformerEncoder">
torch.attr private "lm_head" : !torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_489.RobertaLMHead">
}
torch.class_type @__torch__.fairseq.models.transformer.transformer_encoder.___torch_mangle_486.TransformerEncoder {
torch.attr private "version" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "encoder_layerdrop" : !torch.int
torch.attr private "return_fc" : !torch.bool
torch.attr private "padding_idx" : !torch.int
torch.attr private "max_source_positions" : !torch.int
torch.attr private "embed_scale" : !torch.float
torch.attr private "quant_noise" : !torch.none
torch.attr private "num_layers" : !torch.int
torch.attr private "layer_norm" : !torch.none
torch.attr private "normalize" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_325.FairseqDropout">
torch.attr private "embed_tokens" : !torch.nn.Module<"__torch__.torch.nn.modules.sparse.___torch_mangle_326.Embedding">
torch.attr private "embed_positions" : !torch.nn.Module<"__torch__.fairseq.modules.learned_positional_embedding.___torch_mangle_327.LearnedPositionalEmbedding">
torch.attr private "layernorm_embedding" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_328.LayerNorm">
torch.attr private "layers" : !torch.nn.Module<"__torch__.torch.nn.modules.container.___torch_mangle_485.ModuleList">
torch.method private "reorder_encoder_out", @__torch__.fairseq.models.transformer.transformer_encoder.___torch_mangle_486.TransformerEncoder.reorder_encoder_out
}
%1 = torch.tensor.literal(dense<1.000000e+00> : tensor<1xf32>) : !torch.tensor<[1],f32>
%int0 = torch.constant.int 0
%int1 = torch.constant.int 1
%int512 = torch.constant.int 512
%float1.000000e00 = torch.constant.float 1.000000e+00
%int12 = torch.constant.int 12
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_325.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%float1.000000e-01 = torch.constant.float 1.000000e-01
%str = torch.constant.str "TransformerEncoder"
%2 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_325.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.sparse.___torch_mangle_326.Embedding {
torch.attr private "weight" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%3 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<250002x768xf32>) : !torch.tensor<[250002,768],f32>
%4 = torch.nn_module {
torch.slot "weight", %3 : !torch.tensor<[250002,768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.sparse.___torch_mangle_326.Embedding">
torch.class_type @__torch__.fairseq.modules.learned_positional_embedding.___torch_mangle_327.LearnedPositionalEmbedding {
torch.attr private "weight" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "max_positions" : !torch.int
}
%5 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<514x768xf32>) : !torch.tensor<[514,768],f32>
%6 = torch.nn_module {
torch.slot "weight", %5 : !torch.tensor<[514,768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "max_positions", %int512 : !torch.int
} : !torch.nn.Module<"__torch__.fairseq.modules.learned_positional_embedding.___torch_mangle_327.LearnedPositionalEmbedding">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_328.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%7 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%8 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%9 = torch.nn_module {
torch.slot "weight", %7 : !torch.tensor<[768],f32>
torch.slot "bias", %8 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_328.LayerNorm">
torch.class_type @__torch__.torch.nn.modules.container.___torch_mangle_485.ModuleList {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
torch.attr private "0" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_341.TransformerEncoderLayerBase">
torch.attr private "1" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_354.TransformerEncoderLayerBase">
torch.attr private "2" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_367.TransformerEncoderLayerBase">
torch.attr private "3" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_380.TransformerEncoderLayerBase">
torch.attr private "4" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_393.TransformerEncoderLayerBase">
torch.attr private "5" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_406.TransformerEncoderLayerBase">
torch.attr private "6" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_419.TransformerEncoderLayerBase">
torch.attr private "7" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_432.TransformerEncoderLayerBase">
torch.attr private "8" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_445.TransformerEncoderLayerBase">
torch.attr private "9" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_458.TransformerEncoderLayerBase">
torch.attr private "10" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_471.TransformerEncoderLayerBase">
torch.attr private "11" : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_484.TransformerEncoderLayerBase">
torch.method private "__len__", @__torch__.torch.nn.modules.container.___torch_mangle_485.ModuleList.__len__
}
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_341.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_335.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_336.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_337.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_338.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_339.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_340.LayerNorm">
}
%int768 = torch.constant.int 768
%int8 = torch.constant.int 8
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_329.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_330.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_331.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_332.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_333.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention.set_incremental_state
}
%str_0 = torch.constant.str "8e70f881-f39a-4593-9294-ec3ba3460657"
%int64 = torch.constant.int 64
%float1.250000e-01 = torch.constant.float 1.250000e-01
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_329.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%str_1 = torch.constant.str "MultiheadAttention"
%10 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_329.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_330.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%11 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%12 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%13 = torch.nn_module {
torch.slot "weight", %11 : !torch.tensor<[768,768],f32>
torch.slot "bias", %12 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_330.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_331.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%14 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%15 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%16 = torch.nn_module {
torch.slot "weight", %14 : !torch.tensor<[768,768],f32>
torch.slot "bias", %15 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_331.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_332.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%17 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%18 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%19 = torch.nn_module {
torch.slot "weight", %17 : !torch.tensor<[768,768],f32>
torch.slot "bias", %18 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_332.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_333.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%20 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%21 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%22 = torch.nn_module {
torch.slot "weight", %20 : !torch.tensor<[768,768],f32>
torch.slot "bias", %21 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_333.Linear">
%23 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_0 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %10 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_329.FairseqDropout">
torch.slot "k_proj", %13 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_330.Linear">
torch.slot "v_proj", %16 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_331.Linear">
torch.slot "q_proj", %19 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_332.Linear">
torch.slot "out_proj", %22 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_333.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_335.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%24 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%25 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%26 = torch.nn_module {
torch.slot "weight", %24 : !torch.tensor<[768],f32>
torch.slot "bias", %25 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_335.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_336.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%str_2 = torch.constant.str "TransformerEncoderLayerBase"
%27 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_336.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_337.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%float0.000000e00 = torch.constant.float 0.000000e+00
%28 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_337.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_338.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%29 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%30 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%31 = torch.nn_module {
torch.slot "weight", %29 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %30 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_338.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_339.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%32 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%33 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%34 = torch.nn_module {
torch.slot "weight", %32 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %33 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_339.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_340.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%35 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%36 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%37 = torch.nn_module {
torch.slot "weight", %35 : !torch.tensor<[768],f32>
torch.slot "bias", %36 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_340.LayerNorm">
%38 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %23 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_334.MultiheadAttention">
torch.slot "self_attn_layer_norm", %26 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_335.LayerNorm">
torch.slot "dropout_module", %27 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_336.FairseqDropout">
torch.slot "activation_dropout_module", %28 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_337.FairseqDropout">
torch.slot "fc1", %31 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_338.Linear">
torch.slot "fc2", %34 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_339.Linear">
torch.slot "final_layer_norm", %37 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_340.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_341.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_354.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_348.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_349.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_350.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_351.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_352.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_353.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_342.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_343.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_344.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_345.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_346.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention.set_incremental_state
}
%str_3 = torch.constant.str "5fb1204c-7e86-48ec-9980-831aa47e6a51"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_342.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%39 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_342.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_343.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%40 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%41 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%42 = torch.nn_module {
torch.slot "weight", %40 : !torch.tensor<[768,768],f32>
torch.slot "bias", %41 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_343.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_344.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%43 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%44 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%45 = torch.nn_module {
torch.slot "weight", %43 : !torch.tensor<[768,768],f32>
torch.slot "bias", %44 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_344.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_345.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%46 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%47 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%48 = torch.nn_module {
torch.slot "weight", %46 : !torch.tensor<[768,768],f32>
torch.slot "bias", %47 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_345.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_346.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%49 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%50 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%51 = torch.nn_module {
torch.slot "weight", %49 : !torch.tensor<[768,768],f32>
torch.slot "bias", %50 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_346.Linear">
%52 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_3 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %39 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_342.FairseqDropout">
torch.slot "k_proj", %42 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_343.Linear">
torch.slot "v_proj", %45 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_344.Linear">
torch.slot "q_proj", %48 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_345.Linear">
torch.slot "out_proj", %51 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_346.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_348.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%53 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%54 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%55 = torch.nn_module {
torch.slot "weight", %53 : !torch.tensor<[768],f32>
torch.slot "bias", %54 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_348.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_349.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%56 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_349.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_350.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%57 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_350.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_351.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%58 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%59 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%60 = torch.nn_module {
torch.slot "weight", %58 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %59 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_351.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_352.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%61 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%62 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%63 = torch.nn_module {
torch.slot "weight", %61 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %62 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_352.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_353.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%64 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%65 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%66 = torch.nn_module {
torch.slot "weight", %64 : !torch.tensor<[768],f32>
torch.slot "bias", %65 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_353.LayerNorm">
%67 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %52 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_347.MultiheadAttention">
torch.slot "self_attn_layer_norm", %55 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_348.LayerNorm">
torch.slot "dropout_module", %56 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_349.FairseqDropout">
torch.slot "activation_dropout_module", %57 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_350.FairseqDropout">
torch.slot "fc1", %60 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_351.Linear">
torch.slot "fc2", %63 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_352.Linear">
torch.slot "final_layer_norm", %66 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_353.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_354.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_367.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_361.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_362.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_363.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_364.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_365.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_366.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_355.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_356.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_357.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_358.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_359.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention.set_incremental_state
}
%str_4 = torch.constant.str "e14a126b-3a57-4929-8f3c-bcb626d46264"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_355.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%68 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_355.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_356.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%69 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%70 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%71 = torch.nn_module {
torch.slot "weight", %69 : !torch.tensor<[768,768],f32>
torch.slot "bias", %70 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_356.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_357.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%72 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%73 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%74 = torch.nn_module {
torch.slot "weight", %72 : !torch.tensor<[768,768],f32>
torch.slot "bias", %73 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_357.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_358.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%75 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%76 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%77 = torch.nn_module {
torch.slot "weight", %75 : !torch.tensor<[768,768],f32>
torch.slot "bias", %76 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_358.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_359.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%78 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%79 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%80 = torch.nn_module {
torch.slot "weight", %78 : !torch.tensor<[768,768],f32>
torch.slot "bias", %79 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_359.Linear">
%81 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_4 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %68 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_355.FairseqDropout">
torch.slot "k_proj", %71 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_356.Linear">
torch.slot "v_proj", %74 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_357.Linear">
torch.slot "q_proj", %77 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_358.Linear">
torch.slot "out_proj", %80 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_359.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_361.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%82 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%83 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%84 = torch.nn_module {
torch.slot "weight", %82 : !torch.tensor<[768],f32>
torch.slot "bias", %83 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_361.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_362.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%85 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_362.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_363.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%86 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_363.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_364.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%87 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%88 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%89 = torch.nn_module {
torch.slot "weight", %87 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %88 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_364.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_365.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%90 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%91 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%92 = torch.nn_module {
torch.slot "weight", %90 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %91 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_365.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_366.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%93 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%94 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%95 = torch.nn_module {
torch.slot "weight", %93 : !torch.tensor<[768],f32>
torch.slot "bias", %94 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_366.LayerNorm">
%96 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %81 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_360.MultiheadAttention">
torch.slot "self_attn_layer_norm", %84 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_361.LayerNorm">
torch.slot "dropout_module", %85 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_362.FairseqDropout">
torch.slot "activation_dropout_module", %86 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_363.FairseqDropout">
torch.slot "fc1", %89 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_364.Linear">
torch.slot "fc2", %92 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_365.Linear">
torch.slot "final_layer_norm", %95 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_366.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_367.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_380.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_374.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_375.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_376.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_377.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_378.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_379.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_368.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_369.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_370.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_371.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_372.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention.set_incremental_state
}
%str_5 = torch.constant.str "f1cc5e06-c931-4349-848c-5d5a8ed67fc7"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_368.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%97 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_368.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_369.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%98 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%99 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%100 = torch.nn_module {
torch.slot "weight", %98 : !torch.tensor<[768,768],f32>
torch.slot "bias", %99 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_369.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_370.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%101 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%102 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%103 = torch.nn_module {
torch.slot "weight", %101 : !torch.tensor<[768,768],f32>
torch.slot "bias", %102 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_370.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_371.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%104 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%105 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%106 = torch.nn_module {
torch.slot "weight", %104 : !torch.tensor<[768,768],f32>
torch.slot "bias", %105 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_371.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_372.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%107 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%108 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%109 = torch.nn_module {
torch.slot "weight", %107 : !torch.tensor<[768,768],f32>
torch.slot "bias", %108 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_372.Linear">
%110 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_5 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %97 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_368.FairseqDropout">
torch.slot "k_proj", %100 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_369.Linear">
torch.slot "v_proj", %103 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_370.Linear">
torch.slot "q_proj", %106 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_371.Linear">
torch.slot "out_proj", %109 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_372.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_374.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%111 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%112 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%113 = torch.nn_module {
torch.slot "weight", %111 : !torch.tensor<[768],f32>
torch.slot "bias", %112 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_374.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_375.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%114 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_375.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_376.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%115 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_376.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_377.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%116 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%117 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%118 = torch.nn_module {
torch.slot "weight", %116 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %117 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_377.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_378.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%119 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%120 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%121 = torch.nn_module {
torch.slot "weight", %119 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %120 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_378.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_379.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%122 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%123 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%124 = torch.nn_module {
torch.slot "weight", %122 : !torch.tensor<[768],f32>
torch.slot "bias", %123 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_379.LayerNorm">
%125 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %110 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_373.MultiheadAttention">
torch.slot "self_attn_layer_norm", %113 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_374.LayerNorm">
torch.slot "dropout_module", %114 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_375.FairseqDropout">
torch.slot "activation_dropout_module", %115 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_376.FairseqDropout">
torch.slot "fc1", %118 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_377.Linear">
torch.slot "fc2", %121 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_378.Linear">
torch.slot "final_layer_norm", %124 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_379.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_380.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_393.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_387.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_388.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_389.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_390.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_391.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_392.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_381.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_382.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_383.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_384.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_385.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention.set_incremental_state
}
%str_6 = torch.constant.str "379b1b12-9487-4011-bb04-7adba4ec7e46"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_381.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%126 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_381.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_382.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%127 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%128 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%129 = torch.nn_module {
torch.slot "weight", %127 : !torch.tensor<[768,768],f32>
torch.slot "bias", %128 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_382.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_383.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%130 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%131 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%132 = torch.nn_module {
torch.slot "weight", %130 : !torch.tensor<[768,768],f32>
torch.slot "bias", %131 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_383.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_384.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%133 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%134 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%135 = torch.nn_module {
torch.slot "weight", %133 : !torch.tensor<[768,768],f32>
torch.slot "bias", %134 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_384.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_385.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%136 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%137 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%138 = torch.nn_module {
torch.slot "weight", %136 : !torch.tensor<[768,768],f32>
torch.slot "bias", %137 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_385.Linear">
%139 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_6 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %126 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_381.FairseqDropout">
torch.slot "k_proj", %129 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_382.Linear">
torch.slot "v_proj", %132 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_383.Linear">
torch.slot "q_proj", %135 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_384.Linear">
torch.slot "out_proj", %138 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_385.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_387.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%140 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%141 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%142 = torch.nn_module {
torch.slot "weight", %140 : !torch.tensor<[768],f32>
torch.slot "bias", %141 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_387.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_388.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%143 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_388.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_389.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%144 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_389.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_390.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%145 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%146 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%147 = torch.nn_module {
torch.slot "weight", %145 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %146 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_390.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_391.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%148 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%149 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%150 = torch.nn_module {
torch.slot "weight", %148 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %149 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_391.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_392.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%151 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%152 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%153 = torch.nn_module {
torch.slot "weight", %151 : !torch.tensor<[768],f32>
torch.slot "bias", %152 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_392.LayerNorm">
%154 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %139 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">
torch.slot "self_attn_layer_norm", %142 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_387.LayerNorm">
torch.slot "dropout_module", %143 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_388.FairseqDropout">
torch.slot "activation_dropout_module", %144 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_389.FairseqDropout">
torch.slot "fc1", %147 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_390.Linear">
torch.slot "fc2", %150 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_391.Linear">
torch.slot "final_layer_norm", %153 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_392.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_393.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_406.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_400.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_401.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_402.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_403.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_404.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_405.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_394.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_395.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_396.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_397.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_398.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention.set_incremental_state
}
%str_7 = torch.constant.str "8e59ad02-eef7-4705-82dd-33e369f53f1e"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_394.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%155 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_394.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_395.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%156 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%157 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%158 = torch.nn_module {
torch.slot "weight", %156 : !torch.tensor<[768,768],f32>
torch.slot "bias", %157 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_395.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_396.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%159 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%160 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%161 = torch.nn_module {
torch.slot "weight", %159 : !torch.tensor<[768,768],f32>
torch.slot "bias", %160 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_396.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_397.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%162 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%163 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%164 = torch.nn_module {
torch.slot "weight", %162 : !torch.tensor<[768,768],f32>
torch.slot "bias", %163 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_397.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_398.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%165 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%166 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%167 = torch.nn_module {
torch.slot "weight", %165 : !torch.tensor<[768,768],f32>
torch.slot "bias", %166 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_398.Linear">
%168 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_7 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %155 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_394.FairseqDropout">
torch.slot "k_proj", %158 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_395.Linear">
torch.slot "v_proj", %161 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_396.Linear">
torch.slot "q_proj", %164 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_397.Linear">
torch.slot "out_proj", %167 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_398.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_400.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%169 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%170 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%171 = torch.nn_module {
torch.slot "weight", %169 : !torch.tensor<[768],f32>
torch.slot "bias", %170 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_400.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_401.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%172 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_401.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_402.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%173 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_402.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_403.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%174 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%175 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%176 = torch.nn_module {
torch.slot "weight", %174 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %175 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_403.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_404.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%177 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%178 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%179 = torch.nn_module {
torch.slot "weight", %177 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %178 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_404.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_405.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%180 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%181 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%182 = torch.nn_module {
torch.slot "weight", %180 : !torch.tensor<[768],f32>
torch.slot "bias", %181 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_405.LayerNorm">
%183 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %168 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">
torch.slot "self_attn_layer_norm", %171 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_400.LayerNorm">
torch.slot "dropout_module", %172 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_401.FairseqDropout">
torch.slot "activation_dropout_module", %173 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_402.FairseqDropout">
torch.slot "fc1", %176 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_403.Linear">
torch.slot "fc2", %179 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_404.Linear">
torch.slot "final_layer_norm", %182 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_405.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_406.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_419.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_413.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_414.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_415.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_416.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_417.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_418.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_407.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_408.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_409.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_410.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_411.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention.set_incremental_state
}
%str_8 = torch.constant.str "e9741474-b15a-4c0a-95bb-750a08d131c6"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_407.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%184 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_407.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_408.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%185 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%186 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%187 = torch.nn_module {
torch.slot "weight", %185 : !torch.tensor<[768,768],f32>
torch.slot "bias", %186 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_408.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_409.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%188 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%189 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%190 = torch.nn_module {
torch.slot "weight", %188 : !torch.tensor<[768,768],f32>
torch.slot "bias", %189 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_409.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_410.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%191 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%192 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%193 = torch.nn_module {
torch.slot "weight", %191 : !torch.tensor<[768,768],f32>
torch.slot "bias", %192 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_410.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_411.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%194 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%195 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%196 = torch.nn_module {
torch.slot "weight", %194 : !torch.tensor<[768,768],f32>
torch.slot "bias", %195 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_411.Linear">
%197 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_8 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %184 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_407.FairseqDropout">
torch.slot "k_proj", %187 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_408.Linear">
torch.slot "v_proj", %190 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_409.Linear">
torch.slot "q_proj", %193 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_410.Linear">
torch.slot "out_proj", %196 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_411.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_413.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%198 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%199 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%200 = torch.nn_module {
torch.slot "weight", %198 : !torch.tensor<[768],f32>
torch.slot "bias", %199 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_413.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_414.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%201 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_414.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_415.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%202 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_415.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_416.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%203 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%204 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%205 = torch.nn_module {
torch.slot "weight", %203 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %204 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_416.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_417.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%206 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%207 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%208 = torch.nn_module {
torch.slot "weight", %206 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %207 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_417.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_418.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%209 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%210 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%211 = torch.nn_module {
torch.slot "weight", %209 : !torch.tensor<[768],f32>
torch.slot "bias", %210 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_418.LayerNorm">
%212 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %197 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">
torch.slot "self_attn_layer_norm", %200 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_413.LayerNorm">
torch.slot "dropout_module", %201 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_414.FairseqDropout">
torch.slot "activation_dropout_module", %202 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_415.FairseqDropout">
torch.slot "fc1", %205 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_416.Linear">
torch.slot "fc2", %208 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_417.Linear">
torch.slot "final_layer_norm", %211 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_418.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_419.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_432.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_426.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_427.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_428.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_429.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_430.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_431.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_420.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_421.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_422.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_423.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_424.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention.set_incremental_state
}
%str_9 = torch.constant.str "00f45d82-b5c3-4f9f-954d-f85f6e01089f"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_420.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%213 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_420.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_421.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%214 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%215 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%216 = torch.nn_module {
torch.slot "weight", %214 : !torch.tensor<[768,768],f32>
torch.slot "bias", %215 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_421.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_422.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%217 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%218 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%219 = torch.nn_module {
torch.slot "weight", %217 : !torch.tensor<[768,768],f32>
torch.slot "bias", %218 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_422.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_423.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%220 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%221 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%222 = torch.nn_module {
torch.slot "weight", %220 : !torch.tensor<[768,768],f32>
torch.slot "bias", %221 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_423.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_424.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%223 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%224 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%225 = torch.nn_module {
torch.slot "weight", %223 : !torch.tensor<[768,768],f32>
torch.slot "bias", %224 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_424.Linear">
%226 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_9 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %213 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_420.FairseqDropout">
torch.slot "k_proj", %216 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_421.Linear">
torch.slot "v_proj", %219 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_422.Linear">
torch.slot "q_proj", %222 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_423.Linear">
torch.slot "out_proj", %225 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_424.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_426.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%227 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%228 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%229 = torch.nn_module {
torch.slot "weight", %227 : !torch.tensor<[768],f32>
torch.slot "bias", %228 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_426.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_427.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%230 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_427.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_428.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%231 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_428.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_429.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%232 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%233 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%234 = torch.nn_module {
torch.slot "weight", %232 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %233 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_429.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_430.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%235 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%236 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%237 = torch.nn_module {
torch.slot "weight", %235 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %236 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_430.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_431.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%238 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%239 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%240 = torch.nn_module {
torch.slot "weight", %238 : !torch.tensor<[768],f32>
torch.slot "bias", %239 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_431.LayerNorm">
%241 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %226 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">
torch.slot "self_attn_layer_norm", %229 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_426.LayerNorm">
torch.slot "dropout_module", %230 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_427.FairseqDropout">
torch.slot "activation_dropout_module", %231 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_428.FairseqDropout">
torch.slot "fc1", %234 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_429.Linear">
torch.slot "fc2", %237 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_430.Linear">
torch.slot "final_layer_norm", %240 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_431.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_432.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_445.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_439.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_440.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_441.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_442.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_443.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_444.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_433.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_434.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_435.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_436.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_437.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention.set_incremental_state
}
%str_10 = torch.constant.str "4607f39e-47c6-4df9-b858-b4c8ec506265"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_433.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%242 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_433.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_434.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%243 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%244 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%245 = torch.nn_module {
torch.slot "weight", %243 : !torch.tensor<[768,768],f32>
torch.slot "bias", %244 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_434.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_435.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%246 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%247 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%248 = torch.nn_module {
torch.slot "weight", %246 : !torch.tensor<[768,768],f32>
torch.slot "bias", %247 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_435.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_436.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%249 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%250 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%251 = torch.nn_module {
torch.slot "weight", %249 : !torch.tensor<[768,768],f32>
torch.slot "bias", %250 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_436.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_437.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%252 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%253 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%254 = torch.nn_module {
torch.slot "weight", %252 : !torch.tensor<[768,768],f32>
torch.slot "bias", %253 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_437.Linear">
%255 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_10 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %242 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_433.FairseqDropout">
torch.slot "k_proj", %245 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_434.Linear">
torch.slot "v_proj", %248 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_435.Linear">
torch.slot "q_proj", %251 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_436.Linear">
torch.slot "out_proj", %254 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_437.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_439.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%256 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%257 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%258 = torch.nn_module {
torch.slot "weight", %256 : !torch.tensor<[768],f32>
torch.slot "bias", %257 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_439.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_440.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%259 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_440.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_441.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%260 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_441.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_442.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%261 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%262 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%263 = torch.nn_module {
torch.slot "weight", %261 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %262 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_442.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_443.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%264 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%265 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%266 = torch.nn_module {
torch.slot "weight", %264 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %265 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_443.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_444.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%267 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%268 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%269 = torch.nn_module {
torch.slot "weight", %267 : !torch.tensor<[768],f32>
torch.slot "bias", %268 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_444.LayerNorm">
%270 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %255 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">
torch.slot "self_attn_layer_norm", %258 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_439.LayerNorm">
torch.slot "dropout_module", %259 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_440.FairseqDropout">
torch.slot "activation_dropout_module", %260 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_441.FairseqDropout">
torch.slot "fc1", %263 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_442.Linear">
torch.slot "fc2", %266 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_443.Linear">
torch.slot "final_layer_norm", %269 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_444.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_445.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_458.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_452.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_453.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_454.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_455.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_456.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_457.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_446.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_447.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_448.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_449.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_450.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention.set_incremental_state
}
%str_11 = torch.constant.str "5ef25f15-cb4f-4ea7-bae1-3fff9241ed78"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_446.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%271 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_446.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_447.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%272 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%273 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%274 = torch.nn_module {
torch.slot "weight", %272 : !torch.tensor<[768,768],f32>
torch.slot "bias", %273 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_447.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_448.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%275 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%276 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%277 = torch.nn_module {
torch.slot "weight", %275 : !torch.tensor<[768,768],f32>
torch.slot "bias", %276 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_448.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_449.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%278 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%279 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%280 = torch.nn_module {
torch.slot "weight", %278 : !torch.tensor<[768,768],f32>
torch.slot "bias", %279 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_449.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_450.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%281 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%282 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%283 = torch.nn_module {
torch.slot "weight", %281 : !torch.tensor<[768,768],f32>
torch.slot "bias", %282 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_450.Linear">
%284 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_11 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %271 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_446.FairseqDropout">
torch.slot "k_proj", %274 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_447.Linear">
torch.slot "v_proj", %277 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_448.Linear">
torch.slot "q_proj", %280 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_449.Linear">
torch.slot "out_proj", %283 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_450.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_452.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%285 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%286 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%287 = torch.nn_module {
torch.slot "weight", %285 : !torch.tensor<[768],f32>
torch.slot "bias", %286 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_452.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_453.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%288 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_453.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_454.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%289 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_454.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_455.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%290 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%291 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%292 = torch.nn_module {
torch.slot "weight", %290 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %291 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_455.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_456.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%293 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%294 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%295 = torch.nn_module {
torch.slot "weight", %293 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %294 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_456.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_457.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%296 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%297 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%298 = torch.nn_module {
torch.slot "weight", %296 : !torch.tensor<[768],f32>
torch.slot "bias", %297 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_457.LayerNorm">
%299 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %284 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">
torch.slot "self_attn_layer_norm", %287 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_452.LayerNorm">
torch.slot "dropout_module", %288 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_453.FairseqDropout">
torch.slot "activation_dropout_module", %289 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_454.FairseqDropout">
torch.slot "fc1", %292 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_455.Linear">
torch.slot "fc2", %295 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_456.Linear">
torch.slot "final_layer_norm", %298 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_457.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_458.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_471.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_465.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_466.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_467.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_468.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_469.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_470.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_459.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_460.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_461.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_462.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_463.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention.set_incremental_state
}
%str_12 = torch.constant.str "1c6c1dc4-850d-4190-b099-fec9c17f71ba"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_459.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%300 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_459.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_460.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%301 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%302 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%303 = torch.nn_module {
torch.slot "weight", %301 : !torch.tensor<[768,768],f32>
torch.slot "bias", %302 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_460.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_461.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%304 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%305 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%306 = torch.nn_module {
torch.slot "weight", %304 : !torch.tensor<[768,768],f32>
torch.slot "bias", %305 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_461.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_462.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%307 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%308 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%309 = torch.nn_module {
torch.slot "weight", %307 : !torch.tensor<[768,768],f32>
torch.slot "bias", %308 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_462.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_463.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%310 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%311 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%312 = torch.nn_module {
torch.slot "weight", %310 : !torch.tensor<[768,768],f32>
torch.slot "bias", %311 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_463.Linear">
%313 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_12 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %300 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_459.FairseqDropout">
torch.slot "k_proj", %303 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_460.Linear">
torch.slot "v_proj", %306 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_461.Linear">
torch.slot "q_proj", %309 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_462.Linear">
torch.slot "out_proj", %312 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_463.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_465.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%314 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%315 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%316 = torch.nn_module {
torch.slot "weight", %314 : !torch.tensor<[768],f32>
torch.slot "bias", %315 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_465.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_466.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%317 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_466.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_467.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%318 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_467.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_468.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%319 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%320 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%321 = torch.nn_module {
torch.slot "weight", %319 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %320 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_468.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_469.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%322 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%323 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%324 = torch.nn_module {
torch.slot "weight", %322 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %323 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_469.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_470.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%325 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%326 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%327 = torch.nn_module {
torch.slot "weight", %325 : !torch.tensor<[768],f32>
torch.slot "bias", %326 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_470.LayerNorm">
%328 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %313 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">
torch.slot "self_attn_layer_norm", %316 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_465.LayerNorm">
torch.slot "dropout_module", %317 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_466.FairseqDropout">
torch.slot "activation_dropout_module", %318 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_467.FairseqDropout">
torch.slot "fc1", %321 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_468.Linear">
torch.slot "fc2", %324 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_469.Linear">
torch.slot "final_layer_norm", %327 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_470.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_471.TransformerEncoderLayerBase">
torch.class_type @__torch__.fairseq.modules.transformer_layer.___torch_mangle_484.TransformerEncoderLayerBase {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "return_fc" : !torch.bool
torch.attr private "embed_dim" : !torch.int
torch.attr private "quant_noise" : !torch.int
torch.attr private "quant_noise_block_size" : !torch.int
torch.attr private "normalize_before" : !torch.bool
torch.attr private "self_attn" : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">
torch.attr private "self_attn_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_478.LayerNorm">
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_479.FairseqDropout">
torch.attr private "activation_dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_480.FairseqDropout">
torch.attr private "fc1" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_481.Linear">
torch.attr private "fc2" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_482.Linear">
torch.attr private "final_layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_483.LayerNorm">
}
torch.class_type @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "_incremental_state_id" : !torch.str
torch.attr private "embed_dim" : !torch.int
torch.attr private "kdim" : !torch.int
torch.attr private "vdim" : !torch.int
torch.attr private "qkv_same_dim" : !torch.bool
torch.attr private "num_heads" : !torch.int
torch.attr private "head_dim" : !torch.int
torch.attr private "scaling" : !torch.float
torch.attr private "self_attention" : !torch.bool
torch.attr private "encoder_decoder_attention" : !torch.bool
torch.attr private "bias_k" : !torch.none
torch.attr private "bias_v" : !torch.none
torch.attr private "add_zero_attn" : !torch.bool
torch.attr private "onnx_trace" : !torch.bool
torch.attr private "skip_embed_dim_check" : !torch.bool
torch.attr private "dropout_module" : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_472.FairseqDropout">
torch.attr private "k_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_473.Linear">
torch.attr private "v_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_474.Linear">
torch.attr private "q_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_475.Linear">
torch.attr private "out_proj" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_476.Linear">
torch.method private "reorder_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention.reorder_incremental_state
torch.method private "_get_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention._get_input_buffer
torch.method private "get_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention.get_incremental_state
torch.method private "_get_full_incremental_state_key", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention._get_full_incremental_state_key
torch.method private "_set_input_buffer", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention._set_input_buffer
torch.method private "set_incremental_state", @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention.set_incremental_state
}
%str_13 = torch.constant.str "e61886ec-52ce-4444-95f7-246157985b51"
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_472.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%329 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_1 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_472.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_473.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%330 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%331 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%332 = torch.nn_module {
torch.slot "weight", %330 : !torch.tensor<[768,768],f32>
torch.slot "bias", %331 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_473.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_474.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%333 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%334 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%335 = torch.nn_module {
torch.slot "weight", %333 : !torch.tensor<[768,768],f32>
torch.slot "bias", %334 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_474.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_475.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%336 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%337 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%338 = torch.nn_module {
torch.slot "weight", %336 : !torch.tensor<[768,768],f32>
torch.slot "bias", %337 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_475.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_476.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%339 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%340 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%341 = torch.nn_module {
torch.slot "weight", %339 : !torch.tensor<[768,768],f32>
torch.slot "bias", %340 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_476.Linear">
%342 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "_incremental_state_id", %str_13 : !torch.str
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "kdim", %int768 : !torch.int
torch.slot "vdim", %int768 : !torch.int
torch.slot "qkv_same_dim", %true : !torch.bool
torch.slot "num_heads", %int12 : !torch.int
torch.slot "head_dim", %int64 : !torch.int
torch.slot "scaling", %float1.250000e-01 : !torch.float
torch.slot "self_attention", %true : !torch.bool
torch.slot "encoder_decoder_attention", %false : !torch.bool
torch.slot "bias_k", %none : !torch.none
torch.slot "bias_v", %none : !torch.none
torch.slot "add_zero_attn", %false : !torch.bool
torch.slot "onnx_trace", %false : !torch.bool
torch.slot "skip_embed_dim_check", %false : !torch.bool
torch.slot "dropout_module", %329 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_472.FairseqDropout">
torch.slot "k_proj", %332 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_473.Linear">
torch.slot "v_proj", %335 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_474.Linear">
torch.slot "q_proj", %338 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_475.Linear">
torch.slot "out_proj", %341 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_476.Linear">
} : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_478.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%343 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%344 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%345 = torch.nn_module {
torch.slot "weight", %343 : !torch.tensor<[768],f32>
torch.slot "bias", %344 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_478.LayerNorm">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_479.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%346 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float1.000000e-01 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_479.FairseqDropout">
torch.class_type @__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_480.FairseqDropout {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "p" : !torch.float
torch.attr private "module_name" : !torch.str
torch.attr private "apply_during_inference" : !torch.bool
}
%347 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "p", %float0.000000e00 : !torch.float
torch.slot "module_name", %str_2 : !torch.str
torch.slot "apply_during_inference", %false : !torch.bool
} : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_480.FairseqDropout">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_481.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%348 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>) : !torch.tensor<[3072,768],f32>
%349 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>) : !torch.tensor<[3072],f32>
%350 = torch.nn_module {
torch.slot "weight", %348 : !torch.tensor<[3072,768],f32>
torch.slot "bias", %349 : !torch.tensor<[3072],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_481.Linear">
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_482.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%351 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>) : !torch.tensor<[768,3072],f32>
%352 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%353 = torch.nn_module {
torch.slot "weight", %351 : !torch.tensor<[768,3072],f32>
torch.slot "bias", %352 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_482.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_483.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.none
}
%354 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%355 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%356 = torch.nn_module {
torch.slot "weight", %354 : !torch.tensor<[768],f32>
torch.slot "bias", %355 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_483.LayerNorm">
%357 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "return_fc", %false : !torch.bool
torch.slot "embed_dim", %int768 : !torch.int
torch.slot "quant_noise", %int0 : !torch.int
torch.slot "quant_noise_block_size", %int8 : !torch.int
torch.slot "normalize_before", %false : !torch.bool
torch.slot "self_attn", %342 : !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">
torch.slot "self_attn_layer_norm", %345 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_478.LayerNorm">
torch.slot "dropout_module", %346 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_479.FairseqDropout">
torch.slot "activation_dropout_module", %347 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_480.FairseqDropout">
torch.slot "fc1", %350 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_481.Linear">
torch.slot "fc2", %353 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_482.Linear">
torch.slot "final_layer_norm", %356 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_483.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_484.TransformerEncoderLayerBase">
%358 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "0", %38 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_341.TransformerEncoderLayerBase">
torch.slot "1", %67 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_354.TransformerEncoderLayerBase">
torch.slot "2", %96 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_367.TransformerEncoderLayerBase">
torch.slot "3", %125 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_380.TransformerEncoderLayerBase">
torch.slot "4", %154 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_393.TransformerEncoderLayerBase">
torch.slot "5", %183 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_406.TransformerEncoderLayerBase">
torch.slot "6", %212 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_419.TransformerEncoderLayerBase">
torch.slot "7", %241 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_432.TransformerEncoderLayerBase">
torch.slot "8", %270 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_445.TransformerEncoderLayerBase">
torch.slot "9", %299 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_458.TransformerEncoderLayerBase">
torch.slot "10", %328 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_471.TransformerEncoderLayerBase">
torch.slot "11", %357 : !torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_484.TransformerEncoderLayerBase">
} : !torch.nn.Module<"__torch__.torch.nn.modules.container.___torch_mangle_485.ModuleList">
%359 = torch.nn_module {
torch.slot "version", %1 : !torch.tensor<[1],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "encoder_layerdrop", %int0 : !torch.int
torch.slot "return_fc", %false : !torch.bool
torch.slot "padding_idx", %int1 : !torch.int
torch.slot "max_source_positions", %int512 : !torch.int
torch.slot "embed_scale", %float1.000000e00 : !torch.float
torch.slot "quant_noise", %none : !torch.none
torch.slot "num_layers", %int12 : !torch.int
torch.slot "layer_norm", %none : !torch.none
torch.slot "normalize", %false : !torch.bool
torch.slot "dropout_module", %2 : !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_325.FairseqDropout">
torch.slot "embed_tokens", %4 : !torch.nn.Module<"__torch__.torch.nn.modules.sparse.___torch_mangle_326.Embedding">
torch.slot "embed_positions", %6 : !torch.nn.Module<"__torch__.fairseq.modules.learned_positional_embedding.___torch_mangle_327.LearnedPositionalEmbedding">
torch.slot "layernorm_embedding", %9 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_328.LayerNorm">
torch.slot "layers", %358 : !torch.nn.Module<"__torch__.torch.nn.modules.container.___torch_mangle_485.ModuleList">
} : !torch.nn.Module<"__torch__.fairseq.models.transformer.transformer_encoder.___torch_mangle_486.TransformerEncoder">
torch.class_type @__torch__.fairseq.models.roberta.model.___torch_mangle_489.RobertaLMHead {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
torch.attr private "dense" : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_487.Linear">
torch.attr private "layer_norm" : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_488.LayerNorm">
}
%360 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<250002xf32>) : !torch.tensor<[250002],f32>
torch.class_type @__torch__.torch.nn.modules.linear.___torch_mangle_487.Linear {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
}
%361 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>) : !torch.tensor<[768,768],f32>
%362 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%363 = torch.nn_module {
torch.slot "weight", %361 : !torch.tensor<[768,768],f32>
torch.slot "bias", %362 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_487.Linear">
torch.class_type @__torch__.torch.nn.modules.normalization.___torch_mangle_488.LayerNorm {
torch.attr private "weight" : !torch.tensor
torch.attr private "bias" : !torch.tensor
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
}
%364 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%365 = torch.tensor.literal(opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>) : !torch.tensor<[768],f32>
%366 = torch.nn_module {
torch.slot "weight", %364 : !torch.tensor<[768],f32>
torch.slot "bias", %365 : !torch.tensor<[768],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_488.LayerNorm">
%367 = torch.nn_module {
torch.slot "weight", %3 : !torch.tensor<[250002,768],f32>
torch.slot "bias", %360 : !torch.tensor<[250002],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "dense", %363 : !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_487.Linear">
torch.slot "layer_norm", %366 : !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_488.LayerNorm">
} : !torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_489.RobertaLMHead">
%368 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "sentence_encoder", %359 : !torch.nn.Module<"__torch__.fairseq.models.transformer.transformer_encoder.___torch_mangle_486.TransformerEncoder">
torch.slot "lm_head", %367 : !torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_489.RobertaLMHead">
} : !torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_490.RobertaEncoder">
torch.class_type @__torch__.torch.nn.modules.container.___torch_mangle_491.ModuleDict {
torch.attr private "training" : !torch.bool
torch.attr private "_is_full_backward_hook" : !torch.optional<!torch.bool>
}
%369 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
} : !torch.nn.Module<"__torch__.torch.nn.modules.container.___torch_mangle_491.ModuleDict">
%370 = torch.nn_module {
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "encoder", %368 : !torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_490.RobertaEncoder">
torch.slot "classification_heads", %369 : !torch.nn.Module<"__torch__.torch.nn.modules.container.___torch_mangle_491.ModuleDict">
} : !torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_492.RobertaModel">
%371 = torch.nn_module {
torch.slot "_float_tensor", %0 : !torch.tensor<[1],f32>
torch.slot "training", %false : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "model", %370 : !torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_492.RobertaModel">
} : !torch.nn.Module<"__torch__.fairseq.models.roberta.hub_interface.___torch_mangle_493.RobertaHubInterface">
%372 = torch.nn_module {
torch.slot "training", %true : !torch.bool
torch.slot "_is_full_backward_hook", %none : !torch.none
torch.slot "model", %371 : !torch.nn.Module<"__torch__.fairseq.models.roberta.hub_interface.___torch_mangle_493.RobertaHubInterface">
} : !torch.nn.Module<"__torch__.build_tools.torchscript_e2e_heavydep_tests.xlmr.___torch_mangle_494.XLMR_model">
}
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5b6e70) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5b5c50) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b6ed0) {
"torch.slot"(%400) {name = "model"} : (!torch.nn.Module<"__torch__.fairseq.models.roberta.hub_interface.___torch_mangle_493.RobertaHubInterface">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b5d80) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b5d00) {
"torch.slot"(%0) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5b5bf0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5b5900) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b5b60) {
"torch.slot"(%399) {name = "model"} : (!torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_492.RobertaModel">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b5a90) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b5a10) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b5990) {
"torch.slot"(%2) {name = "_float_tensor"} : (!torch.tensor<[1],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5b58a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5b5590) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b5810) {
"torch.slot"(%398) {name = "classification_heads"} : (!torch.nn.Module<"__torch__.torch.nn.modules.container.___torch_mangle_491.ModuleDict">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b5740) {
"torch.slot"(%397) {name = "encoder"} : (!torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_490.RobertaEncoder">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b5660) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b55e0) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5b5530) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5b5380) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b5450) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b53d0) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5b37c0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5b5160) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5b52e0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5b5210) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5b3fd0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5b3db0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b4030) {
"torch.slot"(%396) {name = "lm_head"} : (!torch.nn.Module<"__torch__.fairseq.models.roberta.model.___torch_mangle_489.RobertaLMHead">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b3f40) {
"torch.slot"(%388) {name = "sentence_encoder"} : (!torch.nn.Module<"__torch__.fairseq.models.transformer.transformer_encoder.___torch_mangle_486.TransformerEncoder">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b3ec0) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b3e40) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5b3d50) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5b38e0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b3c60) {
"torch.slot"(%395) {name = "layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_488.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b3be0) {
"torch.slot"(%392) {name = "dense"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_487.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b3b10) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b3a90) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b3a10) {
"torch.slot"(%389) {name = "bias"} : (!torch.tensor<[250002],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b3970) {
"torch.slot"(%13) {name = "weight"} : (!torch.tensor<[250002,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5b3880) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f55ee10) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5b3740) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55efc0) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55ef40) {
"torch.slot"(%394) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55eec0) {
"torch.slot"(%393) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55ed70) {
%394 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55ed10) {
%393 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f55e990) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f55eab0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55ecb0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55ebe0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55eb90) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55eb40) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f55ea50) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f55e760) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55e910) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55e890) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55e810) {
"torch.slot"(%391) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55def0) {
"torch.slot"(%390) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55e6c0) {
%391 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55e660) {
%390 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f55e5f0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f55e400) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55e5a0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55e4f0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55e4a0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55e450) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55e360) {
%389 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<250002xf32>} : () -> !torch.tensor<[250002],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f55c320) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f55df70) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55e230) {
"torch.attr"() {isPrivate, name = "layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_488.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55e1e0) {
"torch.attr"() {isPrivate, name = "dense", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_487.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55e190) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55e0c0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55e070) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55e020) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f55dbc0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f55d500) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55de70) {
"torch.slot"(%387) {name = "layers"} : (!torch.nn.Module<"__torch__.torch.nn.modules.container.___torch_mangle_485.ModuleList">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55ddf0) {
"torch.slot"(%19) {name = "layernorm_embedding"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_328.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55dd70) {
"torch.slot"(%16) {name = "embed_positions"} : (!torch.nn.Module<"__torch__.fairseq.modules.learned_positional_embedding.___torch_mangle_327.LearnedPositionalEmbedding">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55dcf0) {
"torch.slot"(%14) {name = "embed_tokens"} : (!torch.nn.Module<"__torch__.torch.nn.modules.sparse.___torch_mangle_326.Embedding">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55dc20) {
"torch.slot"(%12) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_325.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55db30) {
"torch.slot"(%3) {name = "normalize"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55dab0) {
"torch.slot"(%1) {name = "layer_norm"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55da30) {
"torch.slot"(%9) {name = "num_layers"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d9b0) {
"torch.slot"(%1) {name = "quant_noise"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d930) {
"torch.slot"(%8) {name = "embed_scale"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d8b0) {
"torch.slot"(%7) {name = "max_source_positions"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d830) {
"torch.slot"(%6) {name = "padding_idx"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d7b0) {
"torch.slot"(%3) {name = "return_fc"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d730) {
"torch.slot"(%5) {name = "encoder_layerdrop"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d6b0) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d630) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d5b0) {
"torch.slot"(%4) {name = "version"} : (!torch.tensor<[1],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f55ced0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f55cd10) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d430) {
"torch.slot"(%386) {name = "11"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_484.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d3b0) {
"torch.slot"(%356) {name = "10"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_471.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d330) {
"torch.slot"(%326) {name = "9"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_458.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d2b0) {
"torch.slot"(%296) {name = "8"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_445.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d230) {
"torch.slot"(%266) {name = "7"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_432.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d1b0) {
"torch.slot"(%236) {name = "6"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_419.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d130) {
"torch.slot"(%206) {name = "5"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_406.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d0b0) {
"torch.slot"(%176) {name = "4"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_393.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55d030) {
"torch.slot"(%146) {name = "3"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_380.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55cfb0) {
"torch.slot"(%116) {name = "2"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_367.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55cf30) {
"torch.slot"(%86) {name = "1"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_354.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55ce40) {
"torch.slot"(%56) {name = "0"} : (!torch.nn.Module<"__torch__.fairseq.modules.transformer_layer.___torch_mangle_341.TransformerEncoderLayerBase">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55cdc0) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55cc90) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f55c860) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f55c440) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55cc10) {
"torch.slot"(%385) {name = "final_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_483.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55cb90) {
"torch.slot"(%382) {name = "fc2"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_482.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55cb10) {
"torch.slot"(%379) {name = "fc1"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_481.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55ca90) {
"torch.slot"(%376) {name = "activation_dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_480.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55ca10) {
"torch.slot"(%375) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_479.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c990) {
"torch.slot"(%374) {name = "self_attn_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_478.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c8c0) {
"torch.slot"(%371) {name = "self_attn"} : (!torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c7d0) {
"torch.slot"(%3) {name = "normalize_before"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c750) {
"torch.slot"(%21) {name = "quant_noise_block_size"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c6d0) {
"torch.slot"(%5) {name = "quant_noise"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c650) {
"torch.slot"(%20) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c5d0) {
"torch.slot"(%3) {name = "return_fc"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c550) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c4d0) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f55c3e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f55c050) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c2a0) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c220) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c1a0) {
"torch.slot"(%384) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55c120) {
"torch.slot"(%383) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5ac640) {
%384 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5ac5e0) {
%383 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5ac2e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5ac400) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ac580) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ac530) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ac4e0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ac490) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5ac3a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5ac010) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5ac260) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5ac1e0) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5ac160) {
"torch.slot"(%381) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5ac0e0) {
"torch.slot"(%380) {name = "weight"} : (!torch.tensor<[768,3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5abf10) {
%381 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5abeb0) {
%380 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>} : () -> !torch.tensor<[768,3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5abbb0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5abcd0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5abe50) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5abe00) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5abdb0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5abd60) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5abc70) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5ab8e0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5abb30) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5abab0) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5aba30) {
"torch.slot"(%378) {name = "bias"} : (!torch.tensor<[3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5ab9b0) {
"torch.slot"(%377) {name = "weight"} : (!torch.tensor<[3072,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5ab800) {
%378 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>} : () -> !torch.tensor<[3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5ab7a0) {
%377 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>} : () -> !torch.tensor<[3072,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5ab550) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5ab600) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ab740) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ab6f0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ab6a0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ab650) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5ab4f0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5ab210) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5ab460) {
"torch.slot"(%3) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5ab3e0) {
"torch.slot"(%43) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5ab360) {
"torch.slot"(%45) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5ab2e0) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5ab260) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a9e70) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a9f20) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ab170) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ab120) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ab0d0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5ab080) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a9f70) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a9e10) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a9b30) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a9d80) {
"torch.slot"(%3) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a9d00) {
"torch.slot"(%43) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a9c80) {
"torch.slot"(%10) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a9c00) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a9b80) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a9720) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a9840) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a9a90) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a9a40) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a99f0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a99a0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a98d0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a97e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a9450) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a96a0) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a9620) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a95a0) {
"torch.slot"(%373) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a9520) {
"torch.slot"(%372) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a9350) {
%373 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a92f0) {
%372 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a8380) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a90f0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a9290) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a9240) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a91f0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a91a0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a8dc0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a84a0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a9070) {
"torch.slot"(%370) {name = "out_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_476.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8ff0) {
"torch.slot"(%367) {name = "q_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_475.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8f70) {
"torch.slot"(%364) {name = "v_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_474.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8ef0) {
"torch.slot"(%361) {name = "k_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_473.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8e20) {
"torch.slot"(%358) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_472.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8d30) {
"torch.slot"(%3) {name = "skip_embed_dim_check"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8cb0) {
"torch.slot"(%3) {name = "onnx_trace"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8c30) {
"torch.slot"(%3) {name = "add_zero_attn"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8bb0) {
"torch.slot"(%1) {name = "bias_v"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8b30) {
"torch.slot"(%1) {name = "bias_k"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8ab0) {
"torch.slot"(%3) {name = "encoder_decoder_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8a30) {
"torch.slot"(%0) {name = "self_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a89b0) {
"torch.slot"(%24) {name = "scaling"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8930) {
"torch.slot"(%23) {name = "head_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a88b0) {
"torch.slot"(%9) {name = "num_heads"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8830) {
"torch.slot"(%0) {name = "qkv_same_dim"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a87b0) {
"torch.slot"(%20) {name = "vdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8730) {
"torch.slot"(%20) {name = "kdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a86b0) {
"torch.slot"(%20) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8630) {
"torch.slot"(%357) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a85b0) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8530) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a8440) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a80b0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8300) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8280) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8200) {
"torch.slot"(%369) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8180) {
"torch.slot"(%368) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a7fb0) {
%369 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a7f50) {
%368 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a7c50) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a7d70) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a7ef0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a7ea0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a7e50) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a7e00) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a7d10) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a7980) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a7bd0) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a7b50) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a7ad0) {
"torch.slot"(%366) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a7a50) {
"torch.slot"(%365) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a7880) {
%366 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a7820) {
%365 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a7520) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a7640) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a77c0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a7770) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a7720) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a76d0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a75e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a7250) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a74a0) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a7420) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a73a0) {
"torch.slot"(%363) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a7320) {
"torch.slot"(%362) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a7150) {
%363 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a70f0) {
%362 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a6df0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a6f10) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a7090) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a7040) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a6ff0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a6fa0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a6eb0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a6b20) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a6d70) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a6cf0) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a6c70) {
"torch.slot"(%360) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a6bf0) {
"torch.slot"(%359) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a6a40) {
%360 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a69e0) {
%359 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a6790) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a6840) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a6980) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a6930) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a68e0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a6890) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a51f0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a6540) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a6710) {
"torch.slot"(%3) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a6690) {
"torch.slot"(%25) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a6610) {
"torch.slot"(%10) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a6590) {
"torch.slot"(%1) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a5160) {
"torch.slot"(%3) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a6200) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a62b0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a64a0) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a6450) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a6400) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a63b0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a6300) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f5a61b0) {
%357 = "torch.constant.str"() {value = "e61886ec-52ce-4444-95f7-246157985b51"} : () -> !torch.str
} -> success : operation was folded
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a8630) {
"torch.slot"(%0) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f5d4730) {
%0 = "torch.constant.str"() {value = "e61886ec-52ce-4444-95f7-246157985b51"} : () -> !torch.str
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a57e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a5890) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f5a6150) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention.set_incremental_state, isPrivate, name = "set_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f5a6100) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention._set_input_buffer, isPrivate, name = "_set_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f5a60b0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention._get_full_incremental_state_key, isPrivate, name = "_get_full_incremental_state_key"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f5a6060) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention.get_incremental_state, isPrivate, name = "get_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f5a6010) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention._get_input_buffer, isPrivate, name = "_get_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f5a5fc0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention.reorder_incremental_state, isPrivate, name = "reorder_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5f70) {
"torch.attr"() {isPrivate, name = "out_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_476.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5f20) {
"torch.attr"() {isPrivate, name = "q_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_475.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5ed0) {
"torch.attr"() {isPrivate, name = "v_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_474.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5e80) {
"torch.attr"() {isPrivate, name = "k_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_473.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5e30) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_472.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5de0) {
"torch.attr"() {isPrivate, name = "skip_embed_dim_check", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5d90) {
"torch.attr"() {isPrivate, name = "onnx_trace", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5d40) {
"torch.attr"() {isPrivate, name = "add_zero_attn", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5cf0) {
"torch.attr"() {isPrivate, name = "bias_v", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5ca0) {
"torch.attr"() {isPrivate, name = "bias_k", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5c50) {
"torch.attr"() {isPrivate, name = "encoder_decoder_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5c00) {
"torch.attr"() {isPrivate, name = "self_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5bb0) {
"torch.attr"() {isPrivate, name = "scaling", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5b60) {
"torch.attr"() {isPrivate, name = "head_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5b10) {
"torch.attr"() {isPrivate, name = "num_heads", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5ac0) {
"torch.attr"() {isPrivate, name = "qkv_same_dim", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5a70) {
"torch.attr"() {isPrivate, name = "vdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5a20) {
"torch.attr"() {isPrivate, name = "kdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a59d0) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5980) {
"torch.attr"() {isPrivate, name = "_incremental_state_id", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5930) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a58e0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a47f0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a5250) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5790) {
"torch.attr"() {isPrivate, name = "final_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_483.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5740) {
"torch.attr"() {isPrivate, name = "fc2", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_482.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a56f0) {
"torch.attr"() {isPrivate, name = "fc1", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_481.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a56a0) {
"torch.attr"() {isPrivate, name = "activation_dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_480.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5650) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_479.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5600) {
"torch.attr"() {isPrivate, name = "self_attn_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_478.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a55b0) {
"torch.attr"() {isPrivate, name = "self_attn", type = !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_477.MultiheadAttention">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5560) {
"torch.attr"() {isPrivate, name = "normalize_before", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5510) {
"torch.attr"() {isPrivate, name = "quant_noise_block_size", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a54c0) {
"torch.attr"() {isPrivate, name = "quant_noise", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5470) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5420) {
"torch.attr"() {isPrivate, name = "return_fc", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a53d0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a5300) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a4d30) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a4910) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a50e0) {
"torch.slot"(%356) {name = "final_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_470.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a5060) {
"torch.slot"(%353) {name = "fc2"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_469.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4fe0) {
"torch.slot"(%350) {name = "fc1"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_468.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4f60) {
"torch.slot"(%347) {name = "activation_dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_467.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4ee0) {
"torch.slot"(%346) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_466.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4e60) {
"torch.slot"(%345) {name = "self_attn_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_465.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4d90) {
"torch.slot"(%342) {name = "self_attn"} : (!torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4ca0) {
"torch.slot"(%4) {name = "normalize_before"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4c20) {
"torch.slot"(%22) {name = "quant_noise_block_size"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4ba0) {
"torch.slot"(%6) {name = "quant_noise"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4b20) {
"torch.slot"(%21) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4aa0) {
"torch.slot"(%4) {name = "return_fc"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4a20) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a49a0) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a48b0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a4520) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4770) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a46f0) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a4670) {
"torch.slot"(%355) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a45f0) {
"torch.slot"(%354) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a4420) {
%355 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a43c0) {
%354 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f466f50) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f467070) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a4360) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a4310) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a42c0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f467100) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f467010) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f466c80) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f466ed0) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f466e50) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f466dd0) {
"torch.slot"(%352) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f466d50) {
"torch.slot"(%351) {name = "weight"} : (!torch.tensor<[768,3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f466b80) {
%352 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f466b20) {
%351 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>} : () -> !torch.tensor<[768,3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a3190) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f466940) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f466ac0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f466a70) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f466a20) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f4669d0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a3250) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a2ec0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a3110) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a3090) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a3010) {
"torch.slot"(%349) {name = "bias"} : (!torch.tensor<[3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a2f90) {
"torch.slot"(%348) {name = "weight"} : (!torch.tensor<[3072,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a2de0) {
%349 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>} : () -> !torch.tensor<[3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a2d80) {
%348 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>} : () -> !torch.tensor<[3072,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a2b30) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a2be0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a2d20) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a2cd0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a2c80) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a2c30) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a2ad0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a27f0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a2a40) {
"torch.slot"(%4) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a29c0) {
"torch.slot"(%44) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a2940) {
"torch.slot"(%46) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a28c0) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a2840) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f532a40) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a2580) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a2750) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a2700) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a26b0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a2660) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a25d0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5329e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f532700) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f532950) {
"torch.slot"(%4) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5328d0) {
"torch.slot"(%44) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f532850) {
"torch.slot"(%11) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5327d0) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f532750) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a1660) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f532410) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f532660) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f532610) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5325c0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f532570) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5324a0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a1720) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a1390) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a15e0) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a1560) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a14e0) {
"torch.slot"(%344) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a1460) {
"torch.slot"(%343) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a1290) {
%344 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5a1230) {
%343 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5a02c0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5a1030) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a11d0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a1180) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a1130) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5a10e0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a0d00) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5a03e0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0fb0) {
"torch.slot"(%341) {name = "out_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_463.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0f30) {
"torch.slot"(%338) {name = "q_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_462.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0eb0) {
"torch.slot"(%335) {name = "v_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_461.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0e30) {
"torch.slot"(%332) {name = "k_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_460.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0d60) {
"torch.slot"(%329) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_459.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0c70) {
"torch.slot"(%4) {name = "skip_embed_dim_check"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0bf0) {
"torch.slot"(%4) {name = "onnx_trace"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0b70) {
"torch.slot"(%4) {name = "add_zero_attn"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0af0) {
"torch.slot"(%2) {name = "bias_v"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0a70) {
"torch.slot"(%2) {name = "bias_k"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a09f0) {
"torch.slot"(%4) {name = "encoder_decoder_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0970) {
"torch.slot"(%1) {name = "self_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a08f0) {
"torch.slot"(%25) {name = "scaling"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0870) {
"torch.slot"(%24) {name = "head_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a07f0) {
"torch.slot"(%10) {name = "num_heads"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0770) {
"torch.slot"(%1) {name = "qkv_same_dim"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a06f0) {
"torch.slot"(%21) {name = "vdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0670) {
"torch.slot"(%21) {name = "kdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a05f0) {
"torch.slot"(%21) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0570) {
"torch.slot"(%328) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a04f0) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0470) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5a0380) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f59fff0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0240) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a01c0) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0140) {
"torch.slot"(%340) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a00c0) {
"torch.slot"(%339) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f59fef0) {
%340 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f59fe90) {
%339 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f59fb90) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f59fcb0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59fe30) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59fde0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59fd90) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59fd40) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f59fc50) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f59f8c0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59fb10) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59fa90) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59fa10) {
"torch.slot"(%337) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59f990) {
"torch.slot"(%336) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f59f7c0) {
%337 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f59f760) {
%336 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f59f460) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f59f580) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59f700) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59f6b0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59f660) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59f610) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f59f520) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f59f190) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59f3e0) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59f360) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59f2e0) {
"torch.slot"(%334) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59f260) {
"torch.slot"(%333) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f59f090) {
%334 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f59f030) {
%333 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f59ed30) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f59ee50) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59efd0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59ef80) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59ef30) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59eee0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f59edf0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f59ea60) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59ecb0) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59ec30) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59ebb0) {
"torch.slot"(%331) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59eb30) {
"torch.slot"(%330) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f59e980) {
%331 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f59e920) {
%330 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f59e6d0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f59e780) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59e8c0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59e870) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59e820) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59e7d0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f59c120) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f59e480) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59e650) {
"torch.slot"(%4) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59e5d0) {
"torch.slot"(%26) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59e550) {
"torch.slot"(%11) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59e4d0) {
"torch.slot"(%2) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59c090) {
"torch.slot"(%4) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f59e140) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f59e1f0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59e3e0) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59e390) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59e340) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59e2f0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59e240) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f59e0f0) {
%328 = "torch.constant.str"() {value = "1c6c1dc4-850d-4190-b099-fec9c17f71ba"} : () -> !torch.str
} -> success : operation was folded
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5a0570) {
"torch.slot"(%0) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f5a61b0) {
%0 = "torch.constant.str"() {value = "1c6c1dc4-850d-4190-b099-fec9c17f71ba"} : () -> !torch.str
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f59c710) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f59c7c0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f59e090) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention.set_incremental_state, isPrivate, name = "set_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f59e040) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention._set_input_buffer, isPrivate, name = "_set_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f59dff0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention._get_full_incremental_state_key, isPrivate, name = "_get_full_incremental_state_key"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f59dfa0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention.get_incremental_state, isPrivate, name = "get_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f59df50) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention._get_input_buffer, isPrivate, name = "_get_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f59df00) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention.reorder_incremental_state, isPrivate, name = "reorder_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59deb0) {
"torch.attr"() {isPrivate, name = "out_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_463.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59de60) {
"torch.attr"() {isPrivate, name = "q_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_462.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59ce00) {
"torch.attr"() {isPrivate, name = "v_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_461.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59cdb0) {
"torch.attr"() {isPrivate, name = "k_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_460.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59cd60) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_459.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59cd10) {
"torch.attr"() {isPrivate, name = "skip_embed_dim_check", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59ccc0) {
"torch.attr"() {isPrivate, name = "onnx_trace", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59cc70) {
"torch.attr"() {isPrivate, name = "add_zero_attn", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59cc20) {
"torch.attr"() {isPrivate, name = "bias_v", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59cbd0) {
"torch.attr"() {isPrivate, name = "bias_k", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59cb80) {
"torch.attr"() {isPrivate, name = "encoder_decoder_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59cb30) {
"torch.attr"() {isPrivate, name = "self_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59cae0) {
"torch.attr"() {isPrivate, name = "scaling", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59ca90) {
"torch.attr"() {isPrivate, name = "head_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59ca40) {
"torch.attr"() {isPrivate, name = "num_heads", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c9f0) {
"torch.attr"() {isPrivate, name = "qkv_same_dim", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c9a0) {
"torch.attr"() {isPrivate, name = "vdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c950) {
"torch.attr"() {isPrivate, name = "kdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c900) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c8b0) {
"torch.attr"() {isPrivate, name = "_incremental_state_id", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c860) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c810) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f59b720) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f59c180) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c6c0) {
"torch.attr"() {isPrivate, name = "final_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_470.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c670) {
"torch.attr"() {isPrivate, name = "fc2", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_469.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c620) {
"torch.attr"() {isPrivate, name = "fc1", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_468.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c5d0) {
"torch.attr"() {isPrivate, name = "activation_dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_467.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c580) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_466.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c530) {
"torch.attr"() {isPrivate, name = "self_attn_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_465.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c4e0) {
"torch.attr"() {isPrivate, name = "self_attn", type = !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_464.MultiheadAttention">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c490) {
"torch.attr"() {isPrivate, name = "normalize_before", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c440) {
"torch.attr"() {isPrivate, name = "quant_noise_block_size", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c3f0) {
"torch.attr"() {isPrivate, name = "quant_noise", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c3a0) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c350) {
"torch.attr"() {isPrivate, name = "return_fc", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c300) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59c230) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f59bc60) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f59b840) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59c010) {
"torch.slot"(%327) {name = "final_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_457.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59bf90) {
"torch.slot"(%324) {name = "fc2"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_456.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59bf10) {
"torch.slot"(%321) {name = "fc1"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_455.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59be90) {
"torch.slot"(%318) {name = "activation_dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_454.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59be10) {
"torch.slot"(%317) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_453.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59bd90) {
"torch.slot"(%316) {name = "self_attn_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_452.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59bcc0) {
"torch.slot"(%313) {name = "self_attn"} : (!torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59bbd0) {
"torch.slot"(%5) {name = "normalize_before"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59bb50) {
"torch.slot"(%23) {name = "quant_noise_block_size"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59bad0) {
"torch.slot"(%7) {name = "quant_noise"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59ba50) {
"torch.slot"(%22) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59b9d0) {
"torch.slot"(%5) {name = "return_fc"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59b950) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59b8d0) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f59b7e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f59b450) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59b6a0) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59b620) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59b5a0) {
"torch.slot"(%326) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f59b520) {
"torch.slot"(%325) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f59b350) {
%326 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f59b2f0) {
%325 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f599fe0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f59b110) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59b290) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59b240) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59b1f0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f59b1a0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f59a0a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f599d10) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f599f60) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f599ee0) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f599e60) {
"torch.slot"(%323) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f599de0) {
"torch.slot"(%322) {name = "weight"} : (!torch.tensor<[768,3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f599c10) {
%323 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f599bb0) {
%322 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>} : () -> !torch.tensor<[768,3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5998b0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5999d0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f599b50) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f599b00) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f599ab0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f599a60) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f599970) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5995e0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f599830) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5997b0) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f599730) {
"torch.slot"(%320) {name = "bias"} : (!torch.tensor<[3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5996b0) {
"torch.slot"(%319) {name = "weight"} : (!torch.tensor<[3072,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f599500) {
%320 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>} : () -> !torch.tensor<[3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5994a0) {
%319 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>} : () -> !torch.tensor<[3072,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f599250) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f599300) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f599440) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5993f0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5993a0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f599350) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5991f0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f598f10) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f599160) {
"torch.slot"(%5) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5990e0) {
"torch.slot"(%45) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f599060) {
"torch.slot"(%47) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f598fe0) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f598f60) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f598bd0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f598c80) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f598e70) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f598e20) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f598dd0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f598d80) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f598cd0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f598b70) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f598890) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f598ae0) {
"torch.slot"(%5) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f598a60) {
"torch.slot"(%45) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5989e0) {
"torch.slot"(%12) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f598960) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5988e0) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f598480) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5985a0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5987f0) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5987a0) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f598750) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f598700) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f598630) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f598540) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5981b0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f598400) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f598380) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f598300) {
"torch.slot"(%315) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f598280) {
"torch.slot"(%314) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5980b0) {
%315 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f598050) {
%314 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5970e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f597e50) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f597ff0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f597fa0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f597f50) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f597f00) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f597b20) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f597200) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597dd0) {
"torch.slot"(%312) {name = "out_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_450.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597d50) {
"torch.slot"(%309) {name = "q_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_449.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597cd0) {
"torch.slot"(%306) {name = "v_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_448.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597c50) {
"torch.slot"(%303) {name = "k_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_447.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597b80) {
"torch.slot"(%300) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_446.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597a90) {
"torch.slot"(%5) {name = "skip_embed_dim_check"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597a10) {
"torch.slot"(%5) {name = "onnx_trace"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597990) {
"torch.slot"(%5) {name = "add_zero_attn"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597910) {
"torch.slot"(%3) {name = "bias_v"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597890) {
"torch.slot"(%3) {name = "bias_k"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597810) {
"torch.slot"(%5) {name = "encoder_decoder_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597790) {
"torch.slot"(%2) {name = "self_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597710) {
"torch.slot"(%26) {name = "scaling"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597690) {
"torch.slot"(%25) {name = "head_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597610) {
"torch.slot"(%11) {name = "num_heads"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597590) {
"torch.slot"(%2) {name = "qkv_same_dim"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597510) {
"torch.slot"(%22) {name = "vdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597490) {
"torch.slot"(%22) {name = "kdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597410) {
"torch.slot"(%22) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597390) {
"torch.slot"(%299) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597310) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597290) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5971a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f596e10) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597060) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f596fe0) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f596f60) {
"torch.slot"(%311) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f596ee0) {
"torch.slot"(%310) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f596d10) {
%311 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f596cb0) {
%310 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5969b0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f596ad0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f596c50) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f596c00) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f596bb0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f596b60) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f596a70) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5966e0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f596930) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5968b0) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f596830) {
"torch.slot"(%308) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5967b0) {
"torch.slot"(%307) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5965e0) {
%308 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f596580) {
%307 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f596280) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5963a0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f596520) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5964d0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f596480) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f596430) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f596340) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f595fb0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f596200) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f596180) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f596100) {
"torch.slot"(%305) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f596080) {
"torch.slot"(%304) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f595eb0) {
%305 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f595e50) {
%304 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f595b50) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f595c70) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f595df0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f595da0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f595d50) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f595d00) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f595c10) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f595880) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f595ad0) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f595a50) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5959d0) {
"torch.slot"(%302) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f595950) {
"torch.slot"(%301) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5957a0) {
%302 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f595740) {
%301 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5954f0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5955a0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5956e0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f595690) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f595640) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5955f0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f591f50) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5952a0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f595470) {
"torch.slot"(%5) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5953f0) {
"torch.slot"(%27) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f595370) {
"torch.slot"(%12) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5952f0) {
"torch.slot"(%3) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f591ec0) {
"torch.slot"(%5) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f594f60) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f595010) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f595200) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5951b0) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f595160) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f595110) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f595060) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f594f10) {
%299 = "torch.constant.str"() {value = "5ef25f15-cb4f-4ea7-bae1-3fff9241ed78"} : () -> !torch.str
} -> success : operation was folded
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f597390) {
"torch.slot"(%0) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f59e0f0) {
%0 = "torch.constant.str"() {value = "5ef25f15-cb4f-4ea7-bae1-3fff9241ed78"} : () -> !torch.str
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f593530) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5935e0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f594eb0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention.set_incremental_state, isPrivate, name = "set_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f594e60) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention._set_input_buffer, isPrivate, name = "_set_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f594e10) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention._get_full_incremental_state_key, isPrivate, name = "_get_full_incremental_state_key"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f594dc0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention.get_incremental_state, isPrivate, name = "get_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f593d60) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention._get_input_buffer, isPrivate, name = "_get_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f593d10) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention.reorder_incremental_state, isPrivate, name = "reorder_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593cc0) {
"torch.attr"() {isPrivate, name = "out_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_450.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593c70) {
"torch.attr"() {isPrivate, name = "q_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_449.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593c20) {
"torch.attr"() {isPrivate, name = "v_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_448.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593bd0) {
"torch.attr"() {isPrivate, name = "k_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_447.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593b80) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_446.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593b30) {
"torch.attr"() {isPrivate, name = "skip_embed_dim_check", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593ae0) {
"torch.attr"() {isPrivate, name = "onnx_trace", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593a90) {
"torch.attr"() {isPrivate, name = "add_zero_attn", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593a40) {
"torch.attr"() {isPrivate, name = "bias_v", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5939f0) {
"torch.attr"() {isPrivate, name = "bias_k", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5939a0) {
"torch.attr"() {isPrivate, name = "encoder_decoder_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593950) {
"torch.attr"() {isPrivate, name = "self_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593900) {
"torch.attr"() {isPrivate, name = "scaling", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5938b0) {
"torch.attr"() {isPrivate, name = "head_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593860) {
"torch.attr"() {isPrivate, name = "num_heads", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593810) {
"torch.attr"() {isPrivate, name = "qkv_same_dim", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5937c0) {
"torch.attr"() {isPrivate, name = "vdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593770) {
"torch.attr"() {isPrivate, name = "kdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593720) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5936d0) {
"torch.attr"() {isPrivate, name = "_incremental_state_id", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593680) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593630) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f51bf00) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f591fb0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5934e0) {
"torch.attr"() {isPrivate, name = "final_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_457.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593490) {
"torch.attr"() {isPrivate, name = "fc2", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_456.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f593440) {
"torch.attr"() {isPrivate, name = "fc1", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_455.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5933f0) {
"torch.attr"() {isPrivate, name = "activation_dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_454.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5933a0) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_453.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f592340) {
"torch.attr"() {isPrivate, name = "self_attn_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_452.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5922f0) {
"torch.attr"() {isPrivate, name = "self_attn", type = !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_451.MultiheadAttention">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5922a0) {
"torch.attr"() {isPrivate, name = "normalize_before", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f592250) {
"torch.attr"() {isPrivate, name = "quant_noise_block_size", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f592200) {
"torch.attr"() {isPrivate, name = "quant_noise", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5921b0) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f592160) {
"torch.attr"() {isPrivate, name = "return_fc", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f592110) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f592040) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f591a90) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f51c020) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f591e40) {
"torch.slot"(%298) {name = "final_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_444.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f591dc0) {
"torch.slot"(%295) {name = "fc2"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_443.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f591d40) {
"torch.slot"(%292) {name = "fc1"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_442.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f591cc0) {
"torch.slot"(%289) {name = "activation_dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_441.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f591c40) {
"torch.slot"(%288) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_440.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f591bc0) {
"torch.slot"(%287) {name = "self_attn_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_439.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f591af0) {
"torch.slot"(%284) {name = "self_attn"} : (!torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f591a00) {
"torch.slot"(%6) {name = "normalize_before"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c330) {
"torch.slot"(%24) {name = "quant_noise_block_size"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c2b0) {
"torch.slot"(%8) {name = "quant_noise"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c230) {
"torch.slot"(%23) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c1b0) {
"torch.slot"(%6) {name = "return_fc"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c130) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c0b0) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f51bfc0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f51bc30) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51be80) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51be00) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51bd80) {
"torch.slot"(%297) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51bd00) {
"torch.slot"(%296) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f51bb30) {
%297 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f51bad0) {
%296 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f51b7d0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f51b8f0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51ba70) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51ba20) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51b9d0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51b980) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f51b890) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f51b500) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51b750) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51b6d0) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51b650) {
"torch.slot"(%294) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51b5d0) {
"torch.slot"(%293) {name = "weight"} : (!torch.tensor<[768,3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f51b440) {
%294 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f51b3e0) {
%293 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>} : () -> !torch.tensor<[768,3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f52bbe0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f52bd00) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52be80) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52be30) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52bde0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52bd90) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f52bca0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f52b910) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52bb60) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52bae0) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52ba60) {
"torch.slot"(%291) {name = "bias"} : (!torch.tensor<[3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52b9e0) {
"torch.slot"(%290) {name = "weight"} : (!torch.tensor<[3072,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f52b830) {
%291 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>} : () -> !torch.tensor<[3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f52b7d0) {
%290 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>} : () -> !torch.tensor<[3072,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f52b580) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f52b630) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52b770) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52b720) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52b6d0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52b680) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f52b520) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f52b240) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52b490) {
"torch.slot"(%6) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52b410) {
"torch.slot"(%46) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52b390) {
"torch.slot"(%48) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52b310) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52b290) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f462fd0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f463080) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52b1a0) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52b150) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52b100) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52b0b0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f4630d0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f462f70) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f462c90) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f462ee0) {
"torch.slot"(%6) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f462e60) {
"torch.slot"(%46) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f462de0) {
"torch.slot"(%13) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f462d60) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f462ce0) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f52af90) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f4629a0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f462bf0) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f462ba0) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f462b50) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f462b00) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f462a30) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f52b050) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f52dcc0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52af10) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52de90) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52de10) {
"torch.slot"(%286) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52dd90) {
"torch.slot"(%285) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f52dbc0) {
%286 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f52db60) {
%285 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f583680) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f52d960) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52db00) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52dab0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52da60) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f52da10) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f52d630) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5837a0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52d8e0) {
"torch.slot"(%283) {name = "out_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_437.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52d860) {
"torch.slot"(%280) {name = "q_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_436.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52d7e0) {
"torch.slot"(%277) {name = "v_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_435.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52d760) {
"torch.slot"(%274) {name = "k_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_434.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52d690) {
"torch.slot"(%271) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_433.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52d5a0) {
"torch.slot"(%6) {name = "skip_embed_dim_check"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52d520) {
"torch.slot"(%6) {name = "onnx_trace"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52d4a0) {
"torch.slot"(%6) {name = "add_zero_attn"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52d420) {
"torch.slot"(%4) {name = "bias_v"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52c390) {
"torch.slot"(%4) {name = "bias_k"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52c310) {
"torch.slot"(%6) {name = "encoder_decoder_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52c290) {
"torch.slot"(%3) {name = "self_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52c210) {
"torch.slot"(%27) {name = "scaling"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52c190) {
"torch.slot"(%26) {name = "head_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52c110) {
"torch.slot"(%12) {name = "num_heads"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52c090) {
"torch.slot"(%3) {name = "qkv_same_dim"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52c010) {
"torch.slot"(%23) {name = "vdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52bf90) {
"torch.slot"(%23) {name = "kdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f52bf10) {
"torch.slot"(%23) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f583930) {
"torch.slot"(%270) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5838b0) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f583830) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f583740) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5833b0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f583600) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f583580) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f583500) {
"torch.slot"(%282) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f583480) {
"torch.slot"(%281) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5832b0) {
%282 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f583250) {
%281 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f582f50) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f583070) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5831f0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5831a0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f583150) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f583100) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f583010) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f582c80) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f582ed0) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f582e50) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f582dd0) {
"torch.slot"(%279) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f582d50) {
"torch.slot"(%278) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f582b80) {
%279 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f582b20) {
%278 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f582820) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f582940) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f582ac0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f582a70) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f582a20) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5829d0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5828e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f582550) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5827a0) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f582720) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5826a0) {
"torch.slot"(%276) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f582620) {
"torch.slot"(%275) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f582450) {
%276 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5823f0) {
%275 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5820f0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f582210) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f582390) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f582340) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5822f0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5822a0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5821b0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f581e20) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f582070) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f581ff0) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f581f70) {
"torch.slot"(%273) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f581ef0) {
"torch.slot"(%272) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f581d40) {
%273 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f581ce0) {
%272 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f581a90) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f581b40) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581c80) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581c30) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581be0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581b90) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f57f4e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f581840) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f581a10) {
"torch.slot"(%6) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f581990) {
"torch.slot"(%28) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f581910) {
"torch.slot"(%13) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f581890) {
"torch.slot"(%4) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57f450) {
"torch.slot"(%6) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f581500) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5815b0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5817a0) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581750) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581700) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5816b0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581600) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f5814b0) {
%270 = "torch.constant.str"() {value = "4607f39e-47c6-4df9-b858-b4c8ec506265"} : () -> !torch.str
} -> success : operation was folded
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f583930) {
"torch.slot"(%0) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f594f10) {
%0 = "torch.constant.str"() {value = "4607f39e-47c6-4df9-b858-b4c8ec506265"} : () -> !torch.str
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f580ae0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f580b90) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f581450) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention.set_incremental_state, isPrivate, name = "set_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f581400) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention._set_input_buffer, isPrivate, name = "_set_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f5813b0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention._get_full_incremental_state_key, isPrivate, name = "_get_full_incremental_state_key"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f581360) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention.get_incremental_state, isPrivate, name = "get_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f581310) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention._get_input_buffer, isPrivate, name = "_get_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f5812c0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention.reorder_incremental_state, isPrivate, name = "reorder_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581270) {
"torch.attr"() {isPrivate, name = "out_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_437.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581220) {
"torch.attr"() {isPrivate, name = "q_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_436.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5811d0) {
"torch.attr"() {isPrivate, name = "v_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_435.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581180) {
"torch.attr"() {isPrivate, name = "k_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_434.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581130) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_433.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5810e0) {
"torch.attr"() {isPrivate, name = "skip_embed_dim_check", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581090) {
"torch.attr"() {isPrivate, name = "onnx_trace", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f581040) {
"torch.attr"() {isPrivate, name = "add_zero_attn", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580ff0) {
"torch.attr"() {isPrivate, name = "bias_v", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580fa0) {
"torch.attr"() {isPrivate, name = "bias_k", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580f50) {
"torch.attr"() {isPrivate, name = "encoder_decoder_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580f00) {
"torch.attr"() {isPrivate, name = "self_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580eb0) {
"torch.attr"() {isPrivate, name = "scaling", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580e60) {
"torch.attr"() {isPrivate, name = "head_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580e10) {
"torch.attr"() {isPrivate, name = "num_heads", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580dc0) {
"torch.attr"() {isPrivate, name = "qkv_same_dim", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580d70) {
"torch.attr"() {isPrivate, name = "vdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580d20) {
"torch.attr"() {isPrivate, name = "kdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580cd0) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580c80) {
"torch.attr"() {isPrivate, name = "_incremental_state_id", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580c30) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580be0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f57eae0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f57f540) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580a90) {
"torch.attr"() {isPrivate, name = "final_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_444.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580a40) {
"torch.attr"() {isPrivate, name = "fc2", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_443.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5809f0) {
"torch.attr"() {isPrivate, name = "fc1", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_442.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5809a0) {
"torch.attr"() {isPrivate, name = "activation_dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_441.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f580950) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_440.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57f8f0) {
"torch.attr"() {isPrivate, name = "self_attn_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_439.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57f8a0) {
"torch.attr"() {isPrivate, name = "self_attn", type = !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_438.MultiheadAttention">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57f850) {
"torch.attr"() {isPrivate, name = "normalize_before", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57f800) {
"torch.attr"() {isPrivate, name = "quant_noise_block_size", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57f7b0) {
"torch.attr"() {isPrivate, name = "quant_noise", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57f760) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57f710) {
"torch.attr"() {isPrivate, name = "return_fc", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57f6c0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57f5f0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f57f020) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f57ec00) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57f3d0) {
"torch.slot"(%269) {name = "final_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_431.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57f350) {
"torch.slot"(%266) {name = "fc2"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_430.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57f2d0) {
"torch.slot"(%263) {name = "fc1"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_429.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57f250) {
"torch.slot"(%260) {name = "activation_dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_428.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57f1d0) {
"torch.slot"(%259) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_427.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57f150) {
"torch.slot"(%258) {name = "self_attn_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_426.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57f080) {
"torch.slot"(%255) {name = "self_attn"} : (!torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57ef90) {
"torch.slot"(%7) {name = "normalize_before"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57ef10) {
"torch.slot"(%25) {name = "quant_noise_block_size"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57ee90) {
"torch.slot"(%9) {name = "quant_noise"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57ee10) {
"torch.slot"(%24) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57ed90) {
"torch.slot"(%7) {name = "return_fc"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57ed10) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57ec90) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f57eba0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f57e810) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57ea60) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57e9e0) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57e960) {
"torch.slot"(%268) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57e8e0) {
"torch.slot"(%267) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f57e710) {
%268 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f57e6b0) {
%267 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f57e3b0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f57e4d0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57e650) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57e600) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57e5b0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57e560) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f57e470) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f57e0e0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57e330) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57e2b0) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57e230) {
"torch.slot"(%265) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57e1b0) {
"torch.slot"(%264) {name = "weight"} : (!torch.tensor<[768,3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f57dfe0) {
%265 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f57df80) {
%264 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>} : () -> !torch.tensor<[768,3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f57dc80) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f57dda0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57df20) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57ded0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57de80) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f57de30) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f57dd40) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f57d9b0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57dc00) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57db80) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57db00) {
"torch.slot"(%262) {name = "bias"} : (!torch.tensor<[3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f57da60) {
"torch.slot"(%261) {name = "weight"} : (!torch.tensor<[3072,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f57d920) {
%262 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>} : () -> !torch.tensor<[3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f57d8c0) {
%261 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>} : () -> !torch.tensor<[3072,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f518120) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5181d0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f518310) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5182c0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f518270) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f518220) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5180c0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f517de0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f518030) {
"torch.slot"(%7) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f517fb0) {
"torch.slot"(%47) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f517f30) {
"torch.slot"(%49) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f517eb0) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f517e30) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f517aa0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f517b50) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f517d40) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f517cf0) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f517ca0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f517c50) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f517ba0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f517a40) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f517760) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5179b0) {
"torch.slot"(%7) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f517930) {
"torch.slot"(%47) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5178b0) {
"torch.slot"(%14) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f517830) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5177b0) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f520360) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f517470) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5176c0) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f517670) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f517620) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5175d0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f517500) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f517410) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f520090) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5202e0) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f520260) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5201e0) {
"torch.slot"(%257) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f520160) {
"torch.slot"(%256) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f51ff90) {
%257 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f51ff30) {
%256 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f526270) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f51fd30) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51fed0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51fe80) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51fe30) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51fde0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f51fa00) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f526390) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51fcb0) {
"torch.slot"(%254) {name = "out_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_424.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51fc30) {
"torch.slot"(%251) {name = "q_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_423.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51fbb0) {
"torch.slot"(%248) {name = "v_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_422.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51fb30) {
"torch.slot"(%245) {name = "k_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_421.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51fa60) {
"torch.slot"(%242) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_420.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51f970) {
"torch.slot"(%7) {name = "skip_embed_dim_check"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51f8f0) {
"torch.slot"(%7) {name = "onnx_trace"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51f870) {
"torch.slot"(%7) {name = "add_zero_attn"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51f7f0) {
"torch.slot"(%5) {name = "bias_v"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51f770) {
"torch.slot"(%5) {name = "bias_k"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51f6f0) {
"torch.slot"(%7) {name = "encoder_decoder_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51f670) {
"torch.slot"(%4) {name = "self_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51f5f0) {
"torch.slot"(%28) {name = "scaling"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51f570) {
"torch.slot"(%27) {name = "head_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51e4e0) {
"torch.slot"(%13) {name = "num_heads"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51e460) {
"torch.slot"(%4) {name = "qkv_same_dim"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51e3e0) {
"torch.slot"(%24) {name = "vdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f527630) {
"torch.slot"(%24) {name = "kdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5265a0) {
"torch.slot"(%24) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f526520) {
"torch.slot"(%241) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5264a0) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f526420) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f526330) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f525fa0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5261f0) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f526170) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5260f0) {
"torch.slot"(%253) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f526070) {
"torch.slot"(%252) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f525ea0) {
%253 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f525e40) {
%252 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f525b40) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f525c60) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f525de0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f525d90) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f525d40) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f525cf0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f525c00) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f525870) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f525ac0) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f525a40) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5259c0) {
"torch.slot"(%250) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f525940) {
"torch.slot"(%249) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f525770) {
%250 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f525710) {
%249 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5715e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f571700) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5256b0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f571830) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5717e0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f571790) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5716a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f51a170) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f571560) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51a340) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51a2c0) {
"torch.slot"(%247) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51a240) {
"torch.slot"(%246) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f51a070) {
%247 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f51a010) {
%246 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f519d10) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f519e30) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f519fb0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f519f60) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f519f10) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f519ec0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f519dd0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f519a40) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f519c90) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f519c10) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f519b90) {
"torch.slot"(%244) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f519b10) {
"torch.slot"(%243) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f519960) {
%244 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f519900) {
%243 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5186a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f518750) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5198a0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f519850) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f519800) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5197b0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f51dab0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f518450) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f518620) {
"torch.slot"(%7) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5185a0) {
"torch.slot"(%29) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f518520) {
"torch.slot"(%14) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5184a0) {
"torch.slot"(%5) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51da20) {
"torch.slot"(%7) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f56d2b0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f56d360) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5183b0) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56d500) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56d4b0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56d460) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56d3b0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f56d260) {
%241 = "torch.constant.str"() {value = "00f45d82-b5c3-4f9f-954d-f85f6e01089f"} : () -> !torch.str
} -> success : operation was folded
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f526520) {
"torch.slot"(%0) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f5814b0) {
%0 = "torch.constant.str"() {value = "00f45d82-b5c3-4f9f-954d-f85f6e01089f"} : () -> !torch.str
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f51e0a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f51e150) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f56d200) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention.set_incremental_state, isPrivate, name = "set_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f56d1b0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention._set_input_buffer, isPrivate, name = "_set_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f56d160) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention._get_full_incremental_state_key, isPrivate, name = "_get_full_incremental_state_key"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f56d110) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention.get_incremental_state, isPrivate, name = "get_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f56d0c0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention._get_input_buffer, isPrivate, name = "_get_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f56d070) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention.reorder_incremental_state, isPrivate, name = "reorder_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56c010) {
"torch.attr"() {isPrivate, name = "out_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_424.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bfc0) {
"torch.attr"() {isPrivate, name = "q_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_423.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bf70) {
"torch.attr"() {isPrivate, name = "v_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_422.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bf20) {
"torch.attr"() {isPrivate, name = "k_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_421.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bed0) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_420.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56be80) {
"torch.attr"() {isPrivate, name = "skip_embed_dim_check", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56be30) {
"torch.attr"() {isPrivate, name = "onnx_trace", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bde0) {
"torch.attr"() {isPrivate, name = "add_zero_attn", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bd90) {
"torch.attr"() {isPrivate, name = "bias_v", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bd40) {
"torch.attr"() {isPrivate, name = "bias_k", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bcf0) {
"torch.attr"() {isPrivate, name = "encoder_decoder_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bca0) {
"torch.attr"() {isPrivate, name = "self_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bc50) {
"torch.attr"() {isPrivate, name = "scaling", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bc00) {
"torch.attr"() {isPrivate, name = "head_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f56bbb0) {
"torch.attr"() {isPrivate, name = "num_heads", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51e380) {
"torch.attr"() {isPrivate, name = "qkv_same_dim", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51e330) {
"torch.attr"() {isPrivate, name = "vdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51e2e0) {
"torch.attr"() {isPrivate, name = "kdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51e290) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51e240) {
"torch.attr"() {isPrivate, name = "_incremental_state_id", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51e1f0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51e1a0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f567870) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f51db10) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51e050) {
"torch.attr"() {isPrivate, name = "final_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_431.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51e000) {
"torch.attr"() {isPrivate, name = "fc2", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_430.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51dfb0) {
"torch.attr"() {isPrivate, name = "fc1", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_429.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51df60) {
"torch.attr"() {isPrivate, name = "activation_dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_428.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51df10) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_427.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51dec0) {
"torch.attr"() {isPrivate, name = "self_attn_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_426.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51de70) {
"torch.attr"() {isPrivate, name = "self_attn", type = !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_425.MultiheadAttention">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51de20) {
"torch.attr"() {isPrivate, name = "normalize_before", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51ddd0) {
"torch.attr"() {isPrivate, name = "quant_noise_block_size", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51dd80) {
"torch.attr"() {isPrivate, name = "quant_noise", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51dd30) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51dce0) {
"torch.attr"() {isPrivate, name = "return_fc", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51dc90) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f51dbc0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f51c5e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f567990) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51d9a0) {
"torch.slot"(%240) {name = "final_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_418.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51d920) {
"torch.slot"(%237) {name = "fc2"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_417.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51d8a0) {
"torch.slot"(%234) {name = "fc1"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_416.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51d820) {
"torch.slot"(%231) {name = "activation_dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_415.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51d7a0) {
"torch.slot"(%230) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_414.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51d720) {
"torch.slot"(%229) {name = "self_attn_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_413.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c640) {
"torch.slot"(%226) {name = "self_attn"} : (!torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c550) {
"torch.slot"(%8) {name = "normalize_before"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c4d0) {
"torch.slot"(%26) {name = "quant_noise_block_size"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c450) {
"torch.slot"(%10) {name = "quant_noise"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f51c3d0) {
"torch.slot"(%25) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f567b20) {
"torch.slot"(%8) {name = "return_fc"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f567aa0) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f567a20) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f567930) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5675a0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5677f0) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f567770) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5676f0) {
"torch.slot"(%239) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f567670) {
"torch.slot"(%238) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5674a0) {
%239 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f567440) {
%238 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f567140) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f567260) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5673e0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f567390) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f567340) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5672f0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f567200) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f566e70) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5670c0) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f567040) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f566fc0) {
"torch.slot"(%236) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f566f40) {
"torch.slot"(%235) {name = "weight"} : (!torch.tensor<[768,3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f566d70) {
%236 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f566d10) {
%235 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>} : () -> !torch.tensor<[768,3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f566a10) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f566b30) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f566cb0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f566c60) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f566c10) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f566bc0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f566ad0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f566740) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f566990) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f566910) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f566890) {
"torch.slot"(%233) {name = "bias"} : (!torch.tensor<[3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f566810) {
"torch.slot"(%232) {name = "weight"} : (!torch.tensor<[3072,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f566660) {
%233 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>} : () -> !torch.tensor<[3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f566600) {
%232 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>} : () -> !torch.tensor<[3072,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5663b0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f566460) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5665a0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f566550) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f566500) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5664b0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f566350) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f566070) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5662c0) {
"torch.slot"(%8) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f566240) {
"torch.slot"(%48) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5661c0) {
"torch.slot"(%50) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f566140) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5660c0) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f565d30) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f565de0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565fd0) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565f80) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565f30) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565ee0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565e30) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f565cd0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5659f0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f565c40) {
"torch.slot"(%8) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f565bc0) {
"torch.slot"(%48) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f565b40) {
"torch.slot"(%15) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f565ac0) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f565a40) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5655e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f565700) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565950) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565900) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5658b0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565860) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565790) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5656a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f565310) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f565560) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5654e0) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f565460) {
"torch.slot"(%228) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5653e0) {
"torch.slot"(%227) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f565210) {
%228 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5651b0) {
%227 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f564240) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f564fb0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565150) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565100) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5650b0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f565060) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f564c80) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f564360) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564f30) {
"torch.slot"(%225) {name = "out_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_411.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564eb0) {
"torch.slot"(%222) {name = "q_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_410.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564e30) {
"torch.slot"(%219) {name = "v_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_409.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564db0) {
"torch.slot"(%216) {name = "k_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_408.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564ce0) {
"torch.slot"(%213) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_407.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564bf0) {
"torch.slot"(%8) {name = "skip_embed_dim_check"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564b70) {
"torch.slot"(%8) {name = "onnx_trace"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564af0) {
"torch.slot"(%8) {name = "add_zero_attn"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564a70) {
"torch.slot"(%6) {name = "bias_v"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5649f0) {
"torch.slot"(%6) {name = "bias_k"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564970) {
"torch.slot"(%8) {name = "encoder_decoder_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5648f0) {
"torch.slot"(%5) {name = "self_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564870) {
"torch.slot"(%29) {name = "scaling"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5647f0) {
"torch.slot"(%28) {name = "head_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564770) {
"torch.slot"(%14) {name = "num_heads"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5646f0) {
"torch.slot"(%5) {name = "qkv_same_dim"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564670) {
"torch.slot"(%25) {name = "vdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5645f0) {
"torch.slot"(%25) {name = "kdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564570) {
"torch.slot"(%25) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5644f0) {
"torch.slot"(%212) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564470) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5643f0) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f564300) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f563f70) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5641c0) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564140) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5640c0) {
"torch.slot"(%224) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f564040) {
"torch.slot"(%223) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f563e70) {
%224 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f563e10) {
%223 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f563b10) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f563c30) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f563db0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f563d60) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f563d10) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f563cc0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f563bd0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f563840) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f563a90) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f563a10) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f563990) {
"torch.slot"(%221) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f563910) {
"torch.slot"(%220) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f563740) {
%221 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5636e0) {
%220 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5633e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f563500) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f563680) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f563630) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5635e0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f563590) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5634a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f563110) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f563360) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5632e0) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f563260) {
"torch.slot"(%218) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5631e0) {
"torch.slot"(%217) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f563010) {
%218 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f562fb0) {
%217 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f562cb0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f562dd0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f562f50) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f562f00) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f562eb0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f562e60) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f562d70) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5629e0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f562c30) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f562bb0) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f562b30) {
"torch.slot"(%215) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f562ab0) {
"torch.slot"(%214) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f562900) {
%215 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5628a0) {
%214 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f562650) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f562700) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f562840) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5627f0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5627a0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f562750) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5610b0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f562400) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5625d0) {
"torch.slot"(%8) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f562550) {
"torch.slot"(%30) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5624d0) {
"torch.slot"(%15) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f562450) {
"torch.slot"(%6) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f561020) {
"torch.slot"(%8) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5620c0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f562170) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f562360) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f562310) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5622c0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f562270) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5621c0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f562070) {
%212 = "torch.constant.str"() {value = "e9741474-b15a-4c0a-95bb-750a08d131c6"} : () -> !torch.str
} -> success : operation was folded
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5644f0) {
"torch.slot"(%0) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f56d260) {
%0 = "torch.constant.str"() {value = "e9741474-b15a-4c0a-95bb-750a08d131c6"} : () -> !torch.str
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5616a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f561750) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f562010) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention.set_incremental_state, isPrivate, name = "set_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f561fc0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention._set_input_buffer, isPrivate, name = "_set_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f561f70) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention._get_full_incremental_state_key, isPrivate, name = "_get_full_incremental_state_key"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f561f20) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention.get_incremental_state, isPrivate, name = "get_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f561ed0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention._get_input_buffer, isPrivate, name = "_get_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f561e80) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention.reorder_incremental_state, isPrivate, name = "reorder_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561e30) {
"torch.attr"() {isPrivate, name = "out_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_411.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561de0) {
"torch.attr"() {isPrivate, name = "q_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_410.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561d90) {
"torch.attr"() {isPrivate, name = "v_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_409.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561d40) {
"torch.attr"() {isPrivate, name = "k_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_408.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561cf0) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_407.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561ca0) {
"torch.attr"() {isPrivate, name = "skip_embed_dim_check", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561c50) {
"torch.attr"() {isPrivate, name = "onnx_trace", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561c00) {
"torch.attr"() {isPrivate, name = "add_zero_attn", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561bb0) {
"torch.attr"() {isPrivate, name = "bias_v", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561b60) {
"torch.attr"() {isPrivate, name = "bias_k", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561b10) {
"torch.attr"() {isPrivate, name = "encoder_decoder_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561ac0) {
"torch.attr"() {isPrivate, name = "self_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561a70) {
"torch.attr"() {isPrivate, name = "scaling", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561a20) {
"torch.attr"() {isPrivate, name = "head_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5619d0) {
"torch.attr"() {isPrivate, name = "num_heads", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561980) {
"torch.attr"() {isPrivate, name = "qkv_same_dim", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561930) {
"torch.attr"() {isPrivate, name = "vdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5618e0) {
"torch.attr"() {isPrivate, name = "kdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561890) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561840) {
"torch.attr"() {isPrivate, name = "_incremental_state_id", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5617f0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5617a0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f5606b0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f561110) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561650) {
"torch.attr"() {isPrivate, name = "final_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_418.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561600) {
"torch.attr"() {isPrivate, name = "fc2", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_417.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5615b0) {
"torch.attr"() {isPrivate, name = "fc1", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_416.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561560) {
"torch.attr"() {isPrivate, name = "activation_dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_415.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561510) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_414.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5614c0) {
"torch.attr"() {isPrivate, name = "self_attn_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_413.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561470) {
"torch.attr"() {isPrivate, name = "self_attn", type = !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_412.MultiheadAttention">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561420) {
"torch.attr"() {isPrivate, name = "normalize_before", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5613d0) {
"torch.attr"() {isPrivate, name = "quant_noise_block_size", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561380) {
"torch.attr"() {isPrivate, name = "quant_noise", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561330) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5612e0) {
"torch.attr"() {isPrivate, name = "return_fc", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f561290) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5611c0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f560bf0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5607d0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560fa0) {
"torch.slot"(%211) {name = "final_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_405.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560f20) {
"torch.slot"(%208) {name = "fc2"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_404.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560ea0) {
"torch.slot"(%205) {name = "fc1"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_403.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560e20) {
"torch.slot"(%202) {name = "activation_dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_402.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560da0) {
"torch.slot"(%201) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_401.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560d20) {
"torch.slot"(%200) {name = "self_attn_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_400.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560c50) {
"torch.slot"(%197) {name = "self_attn"} : (!torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560b60) {
"torch.slot"(%9) {name = "normalize_before"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560ae0) {
"torch.slot"(%27) {name = "quant_noise_block_size"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560a60) {
"torch.slot"(%11) {name = "quant_noise"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5609e0) {
"torch.slot"(%26) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560960) {
"torch.slot"(%9) {name = "return_fc"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5608e0) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560860) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f560770) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5603e0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560630) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5605b0) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f560530) {
"torch.slot"(%210) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5604b0) {
"torch.slot"(%209) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5602e0) {
%210 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f560280) {
%209 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f55ff80) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5600a0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f560220) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5601d0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f560180) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f560130) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f560040) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f55fcb0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55ff00) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55fe80) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55fe00) {
"torch.slot"(%207) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55fd80) {
"torch.slot"(%206) {name = "weight"} : (!torch.tensor<[768,3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55fbb0) {
%207 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55fb50) {
%206 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x3072xf32>} : () -> !torch.tensor<[768,3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f55f850) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f55f970) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55faf0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55faa0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55fa50) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55fa00) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f55f910) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f53a040) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f53a290) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f53a210) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f53a190) {
"torch.slot"(%204) {name = "bias"} : (!torch.tensor<[3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f53a110) {
"torch.slot"(%203) {name = "weight"} : (!torch.tensor<[3072,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f539f60) {
%204 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072xf32>} : () -> !torch.tensor<[3072],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f539f00) {
%203 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<3072x768xf32>} : () -> !torch.tensor<[3072,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f539cb0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f539d60) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539ea0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539e50) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539e00) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539db0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f539c50) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f539970) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f539bc0) {
"torch.slot"(%9) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f539b40) {
"torch.slot"(%49) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f539ac0) {
"torch.slot"(%51) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f539a40) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5399c0) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f539630) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f5396e0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5398d0) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539880) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539830) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5397e0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539730) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5395d0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5392f0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f539540) {
"torch.slot"(%9) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5394c0) {
"torch.slot"(%49) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f539440) {
"torch.slot"(%16) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5393c0) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f539340) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f538ee0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f539000) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539250) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539200) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5391b0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539160) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f539090) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f538fa0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f538c10) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f538e60) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f538de0) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f538d60) {
"torch.slot"(%199) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f538ce0) {
"torch.slot"(%198) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f538b10) {
%199 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55bfb0) {
%198 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f55b040) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f55bdb0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55bf50) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55bf00) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55beb0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55be60) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f55ba80) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f55b160) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55bd30) {
"torch.slot"(%196) {name = "out_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_398.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55bcb0) {
"torch.slot"(%193) {name = "q_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_397.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55bc30) {
"torch.slot"(%190) {name = "v_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_396.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55bbb0) {
"torch.slot"(%187) {name = "k_proj"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_395.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55bae0) {
"torch.slot"(%184) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_394.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b9f0) {
"torch.slot"(%9) {name = "skip_embed_dim_check"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b970) {
"torch.slot"(%9) {name = "onnx_trace"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b8f0) {
"torch.slot"(%9) {name = "add_zero_attn"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b870) {
"torch.slot"(%7) {name = "bias_v"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b7f0) {
"torch.slot"(%7) {name = "bias_k"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b770) {
"torch.slot"(%9) {name = "encoder_decoder_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b6f0) {
"torch.slot"(%6) {name = "self_attention"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b670) {
"torch.slot"(%30) {name = "scaling"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b5f0) {
"torch.slot"(%29) {name = "head_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b570) {
"torch.slot"(%15) {name = "num_heads"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b4f0) {
"torch.slot"(%6) {name = "qkv_same_dim"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b470) {
"torch.slot"(%26) {name = "vdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b3f0) {
"torch.slot"(%26) {name = "kdim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b370) {
"torch.slot"(%26) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b2f0) {
"torch.slot"(%183) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b270) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b1f0) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f55b100) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f55ad70) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55afc0) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55af40) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55aec0) {
"torch.slot"(%195) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55ae40) {
"torch.slot"(%194) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55ac70) {
%195 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55ac10) {
%194 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f55a910) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f55aa30) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55abb0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55ab60) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55ab10) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55aac0) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f55a9d0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f55a640) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55a890) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55a810) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55a790) {
"torch.slot"(%192) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55a710) {
"torch.slot"(%191) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55a540) {
%192 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f55a4e0) {
%191 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f55a1e0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f55a300) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55a480) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55a430) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55a3e0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f55a390) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f55a2a0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f559f10) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55a160) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55a0e0) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55a060) {
"torch.slot"(%189) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f559fe0) {
"torch.slot"(%188) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f559e10) {
%189 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f559db0) {
%188 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f559ab0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f559bd0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f559d50) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f559d00) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f559cb0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f559c60) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f559b70) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5597e0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f559a30) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5599b0) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f559930) {
"torch.slot"(%186) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5598b0) {
"torch.slot"(%185) {name = "weight"} : (!torch.tensor<[768,768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f559700) {
%186 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5596a0) {
%185 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768x768xf32>} : () -> !torch.tensor<[768,768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f559450) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f559500) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f559640) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5595f0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5595a0) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f559550) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f555e80) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f559200) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5593d0) {
"torch.slot"(%9) {name = "apply_during_inference"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f559350) {
"torch.slot"(%31) {name = "module_name"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5592d0) {
"torch.slot"(%16) {name = "p"} : (!torch.float) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f559250) {
"torch.slot"(%7) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555df0) {
"torch.slot"(%9) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f558ec0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f558f70) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f559160) {
"torch.attr"() {isPrivate, name = "apply_during_inference", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f559110) {
"torch.attr"() {isPrivate, name = "module_name", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5590c0) {
"torch.attr"() {isPrivate, name = "p", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f559070) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f558fc0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f558e70) {
%183 = "torch.constant.str"() {value = "8e59ad02-eef7-4705-82dd-33e369f53f1e"} : () -> !torch.str
} -> success : operation was folded
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f55b2f0) {
"torch.slot"(%0) {name = "_incremental_state_id"} : (!torch.str) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.constant.str'(0x56429f562070) {
%0 = "torch.constant.str"() {value = "8e59ad02-eef7-4705-82dd-33e369f53f1e"} : () -> !torch.str
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f556470) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f556520) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f558e10) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention.set_incremental_state, isPrivate, name = "set_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f558dc0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention._set_input_buffer, isPrivate, name = "_set_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f558d70) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention._get_full_incremental_state_key, isPrivate, name = "_get_full_incremental_state_key"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f558d20) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention.get_incremental_state, isPrivate, name = "get_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f558cd0) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention._get_input_buffer, isPrivate, name = "_get_input_buffer"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.method'(0x56429f558470) {
"torch.method"() {function = @__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention.reorder_incremental_state, isPrivate, name = "reorder_incremental_state"} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f557c10) {
"torch.attr"() {isPrivate, name = "out_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_398.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f557bc0) {
"torch.attr"() {isPrivate, name = "q_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_397.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f557b70) {
"torch.attr"() {isPrivate, name = "v_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_396.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556b10) {
"torch.attr"() {isPrivate, name = "k_proj", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_395.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556ac0) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_394.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556a70) {
"torch.attr"() {isPrivate, name = "skip_embed_dim_check", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556a20) {
"torch.attr"() {isPrivate, name = "onnx_trace", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5569d0) {
"torch.attr"() {isPrivate, name = "add_zero_attn", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556980) {
"torch.attr"() {isPrivate, name = "bias_v", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556930) {
"torch.attr"() {isPrivate, name = "bias_k", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5568e0) {
"torch.attr"() {isPrivate, name = "encoder_decoder_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556890) {
"torch.attr"() {isPrivate, name = "self_attention", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556840) {
"torch.attr"() {isPrivate, name = "scaling", type = !torch.float} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5567f0) {
"torch.attr"() {isPrivate, name = "head_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5567a0) {
"torch.attr"() {isPrivate, name = "num_heads", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556750) {
"torch.attr"() {isPrivate, name = "qkv_same_dim", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556700) {
"torch.attr"() {isPrivate, name = "vdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5566b0) {
"torch.attr"() {isPrivate, name = "kdim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556660) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556610) {
"torch.attr"() {isPrivate, name = "_incremental_state_id", type = !torch.str} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5565c0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556570) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f555480) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f555ee0) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556420) {
"torch.attr"() {isPrivate, name = "final_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_405.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5563d0) {
"torch.attr"() {isPrivate, name = "fc2", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_404.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556380) {
"torch.attr"() {isPrivate, name = "fc1", type = !torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_403.Linear">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556330) {
"torch.attr"() {isPrivate, name = "activation_dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_402.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5562e0) {
"torch.attr"() {isPrivate, name = "dropout_module", type = !torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_401.FairseqDropout">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556290) {
"torch.attr"() {isPrivate, name = "self_attn_layer_norm", type = !torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_400.LayerNorm">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556240) {
"torch.attr"() {isPrivate, name = "self_attn", type = !torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_399.MultiheadAttention">} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5561f0) {
"torch.attr"() {isPrivate, name = "normalize_before", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5561a0) {
"torch.attr"() {isPrivate, name = "quant_noise_block_size", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556150) {
"torch.attr"() {isPrivate, name = "quant_noise", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556100) {
"torch.attr"() {isPrivate, name = "embed_dim", type = !torch.int} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f5560b0) {
"torch.attr"() {isPrivate, name = "return_fc", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f556060) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.optional<!torch.bool>} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f555f90) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f5559c0) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5555a0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555d70) {
"torch.slot"(%182) {name = "final_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_392.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555cf0) {
"torch.slot"(%179) {name = "fc2"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_391.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555c70) {
"torch.slot"(%176) {name = "fc1"} : (!torch.nn.Module<"__torch__.torch.nn.modules.linear.___torch_mangle_390.Linear">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555bf0) {
"torch.slot"(%173) {name = "activation_dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_389.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555b70) {
"torch.slot"(%172) {name = "dropout_module"} : (!torch.nn.Module<"__torch__.fairseq.modules.fairseq_dropout.___torch_mangle_388.FairseqDropout">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555af0) {
"torch.slot"(%171) {name = "self_attn_layer_norm"} : (!torch.nn.Module<"__torch__.torch.nn.modules.normalization.___torch_mangle_387.LayerNorm">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555a20) {
"torch.slot"(%168) {name = "self_attn"} : (!torch.nn.Module<"__torch__.fairseq.modules.multihead_attention.___torch_mangle_386.MultiheadAttention">) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555930) {
"torch.slot"(%10) {name = "normalize_before"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5558b0) {
"torch.slot"(%28) {name = "quant_noise_block_size"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555830) {
"torch.slot"(%12) {name = "quant_noise"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5557b0) {
"torch.slot"(%27) {name = "embed_dim"} : (!torch.int) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555730) {
"torch.slot"(%10) {name = "return_fc"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f5556b0) {
"torch.slot"(%8) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555630) {
"torch.slot"(%10) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f555540) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f5551b0) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555400) {
"torch.slot"(%8) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555380) {
"torch.slot"(%10) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555300) {
"torch.slot"(%181) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f555280) {
"torch.slot"(%180) {name = "weight"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f5550b0) {
%181 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f555050) {
%180 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type'(0x56429f554d50) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.class_type_terminator'(0x56429f554e70) {
"torch.class_type_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f554ff0) {
"torch.attr"() {isPrivate, name = "_is_full_backward_hook", type = !torch.none} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f554fa0) {
"torch.attr"() {isPrivate, name = "training", type = !torch.bool} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f554f50) {
"torch.attr"() {isPrivate, name = "bias", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.attr'(0x56429f554f00) {
"torch.attr"() {isPrivate, name = "weight", type = !torch.tensor} : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module'(0x56429f554e10) {
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.nn_module_terminator'(0x56429f554a80) {
"torch.nn_module_terminator"() : () -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f554cd0) {
"torch.slot"(%8) {name = "_is_full_backward_hook"} : (!torch.none) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f554c50) {
"torch.slot"(%10) {name = "training"} : (!torch.bool) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f554bd0) {
"torch.slot"(%178) {name = "bias"} : (!torch.tensor<[768],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.slot'(0x56429f554b50) {
"torch.slot"(%177) {name = "weight"} : (!torch.tensor<[768,3072],f32>) -> ()
} -> failure : pattern failed to match
//===-------------------------------------------===//
//===-------------------------------------------===//
Processing operation : 'torch.tensor.literal'(0x56429f554980) {
%178 = "torch.tensor.literal"() {value = opaque<"elided_large_const", "0xDEADBEEF"> : tensor<768xf32>} : () -> !torch.tensor<[768],f32>
} -> failure : pattern failed to match
//===-------------------------------------------===//
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment