View gist:e3247ff6b8de52f5adf31773dc3a41e2
commit 60a1d2169b3e1efff3a08b5b534a4967863a4dfa
Author: James Reed <jamesreed@fb.com>
Date: Tue Aug 14 18:32:08 2018 -0400
Peephole pass to erase aten::index before ONNX export
diff --git a/test/onnx/test_pytorch_onnx_caffe2.py b/test/onnx/test_pytorch_onnx_caffe2.py
index f0a9ee4eb..93eee7e7a 100644
--- a/test/onnx/test_pytorch_onnx_caffe2.py
+++ b/test/onnx/test_pytorch_onnx_caffe2.py
View gist:229acab660154c715a57a7fb960295c8
diff --git a/torch/onnx/utils.py b/torch/onnx/utils.py
index b770b900c..e940537f1 100644
--- a/torch/onnx/utils.py
+++ b/torch/onnx/utils.py
@@ -245,6 +245,7 @@ def _export(model, args, f, export_params=True, verbose=False, training=False,
training, input_names,
output_names, operator_export_type,
example_outputs, propagate)
+ print(graph)
View gist:4f9936085d8adaaf64ce5d880d72957f
diff --git a/torch/onnx/symbolic.py b/torch/onnx/symbolic.py
index 43fff76a1..ca5702837 100644
--- a/torch/onnx/symbolic.py
+++ b/torch/onnx/symbolic.py
@@ -710,6 +710,12 @@ def type_as(g, self, other):
return g.op("ATen", self, other, operator_s="type_as")
+@parse_args('v', 'is', 'v', 'v', 'f', 'i')
+def layer_norm(g, self, normalized_shape, weight, bias, eps, cudnn_enable):
View gist:3ac9e931c50bf34663ef2927393446be
import torch
@torch.jit.script
def foo(x, y):
for i in range(100):
if torch.fmod(_to_tensor(i), 3) == 0:
y += x
else:
x += y
return x, y
View gist:a960cd47be0ae7cf1a554417ddf345fa
commit 96d6beb5d300da71c2e5eee0eec9a012480af8f0
Author: James Reed <jamesreed@fb.com>
Date: Mon Jul 23 15:47:43 2018 -0700
Bugfix for stateful module export
diff --git a/test/test_jit.py b/test/test_jit.py
index 9363c0954..39deb0f74 100644
--- a/test/test_jit.py
+++ b/test/test_jit.py
View gist:afc084db32116a05a0a90a5f9c51ccb7
commit bd1b4dbe99e6040ded6c4ab2a59798536a2d6a45
Author: James Reed <jamesreed@fb.com>
Date: Mon Jul 23 13:59:34 2018 -0700
Fix zipfile export
diff --git a/test/expect/TestScript.test_script_module_file_export.expect b/test/expect/TestScript.test_script_module_file_export.expect
new file mode 100644
index 000000000..1d7ce966b
--- /dev/null
View gist:f3cddce667b22898113ce0be3a2ec3f1
import torch
class ClampMod(torch.nn.Module):
def forward(self, x):
return x.clamp(max=3.14, min=4.13)
import io
f = io.BytesIO()
View gist:ffd170e4b03517f9866e42d569c73061
======================================================================
ERROR: test_python_call_annotation (__main__.TestScript)
----------------------------------------------------------------------
Traceback (most recent call last):
File "test/test_jit.py", line 1932, in test_python_call_annotation
@torch.jit.script
File "/Users/jamesreed/onnx-fairseq/pytorch/torch/jit/__init__.py", line 373, in script
graph = _jit_script_compile(torch._C.TypedDef(ast, schema), rcb)
TypeError: _jit_script_compile(): incompatible function arguments. The followingargument types are supported:
1. (arg0: torch._C.TypedDef, arg1: Callable[[str], function]) -> torch._C.Graph
View gist:62e8fd83bcb23ffcffd4033055fa7a20
import torch
x = torch.randn(3, 3, requires_grad=True)
print(x)
min = float('NaN')
max = 0.0
y = torch.clamp(x, min, max)
print('y', y)
y.sum().backward()
View gist:1c3133737d274016e7dc38a5e7a54a95
def test_call_python_fn_from_script_module(self):
def python_fn(x):
return torch.neg(x)
class ScriptMod(torch.jit.ScriptModule):
def __init__(self):
super(ScriptMod, self).__init__()
self.param = torch.nn.Parameter(torch.rand(4, 3))
@torch.jit.script_method