Skip to content

Instantly share code, notes, and snippets.

View jamesr66a's full-sized avatar

James Reed jamesr66a

View GitHub Profile
#ifndef CONV_TBC_OP_H
#define CONV_TBC_OP_H
#include <ATen/ATen.h>
#include <caffe2/core/context.h>
#include <caffe2/core/operator.h>
namespace caffe2 {
using at::Half;
@jamesr66a
jamesr66a / parition_algo.cc
Created November 8, 2017 20:07
Invariant hoisting algorithm
#include "onnx.pb.h"
#include <algorithm>
#include <fstream>
#include <iostream>
#include <numeric>
#include <queue>
#include <tuple>
#include <unordered_map>
#include <unordered_set>
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
+-Module
+-ClassDef(Sequence)
+-FuncDef(__init__)
+-Args
+-Arg(self)
+-Expr
+-Call
+-Attribute
+-Call
+-Name(super)
def test_script_fibb(self):
cu = torch.jit._jit_script_compile('''
def test_while(lim) -> (third):
first = 1
second = 1
i = 1
somenum = 5
dontmutateme = 3
third = 0 # TODO: python lexical scoping
while i < lim:
import torch
import torch.onnx
class MyCastModule(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
x = x.int()
return torch.add(x, x)
10:10:59 ~/onnx-fairseq/pytorch (fixpacktrace2) $ python test/test_jit.py
Traceback (most recent call last):
File "test/test_jit.py", line 1, in <module>
import torch
File "/Users/jamesreed/onnx-fairseq/pytorch/torch/__init__.py", line 78, in <module>
from torch._C import *
ImportError: dlopen(/Users/jamesreed/onnx-fairseq/pytorch/torch/_C.cpython-36m-darwin.so, 9): Library not loaded: @rpath/libATen.dylib
Referenced from: /Users/jamesreed/onnx-fairseq/pytorch/torch/lib/libshm.dylib
Reason: Incompatible library version: libshm.dylib requires version 1.0.0 or later, but libATen.dylib provides version 0.0.0
scan_outputs_0 = ...
hidden_0 = ...
for t in range(timesteps):
scan_outputs_0[t], hidden_0 = LSTM(inputs[t], hidden_0)
scan_outputs_1 = ...
hidden_1 = ...
for t in range(timesteps):
def test_trace_size(self):
def fn(x):
return x.view(x.shape[1] * 2, x.size(0), 2)
x = torch.randn(5, 2, 4, requires_grad=True)
y = torch.randn(4, 8, 4)
# Check that it behaves as expected
traced_fn = torch.jit.trace(x)(fn)
self.assertEqual(traced_fn(y), fn(y))
import torch
class TracedForward(torch.jit.ScriptModule):
@torch.jit.trace(torch.rand(5)):
def forward(self, x):
return torch.neg(x)