Script can be found at examples/onnx_logsoftmax.py
import torch
import torch_mlir
class ToyModel(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, input: torch.Tensor):
return torch.nn.functional.log_softmax(input, dim=1)
model = ToyModel()
module = torch_mlir.compile(model, torch.ones(1, 3, 224, 224), output_type="onnx")
print(module)
module {
func.func @main_graph(%arg0: tensor<1x3x224x224xf32>) -> tensor<1x3x224x224xf32> attributes {input_names = ["input.1"], output_names = ["ret"]} {
%0 = "onnx.LogSoftmax"(%arg0) {axis = 1 : si64, onnx_node_name = "LogSoftmax_0"} : (tensor<1x3x224x224xf32>) -> tensor<1x3x224x224xf32>
return %0 : tensor<1x3x224x224xf32>
}
"onnx.EntryPoint"() {func = @main_graph} : () -> ()
}
module {
func.func @main_graph(%arg0: tensor<1x3x224x224xf32>) -> tensor<1x3x224x224xf32> attributes {input_names = ["input.1"], output_names = ["ret"]} {
%0 = "onnx.Softmax"(%arg0) {axis = 1 : si64} : (tensor<1x3x224x224xf32>) -> tensor<*xf32>
%1 = "onnx.Log"(%0) : (tensor<*xf32>) -> tensor<1x3x224x224xf32>
return %1 : tensor<1x3x224x224xf32>
}
"onnx.EntryPoint"() {func = @main_graph} : () -> ()
}