Skip to content

Instantly share code, notes, and snippets.

@comaniac
Last active September 26, 2020 00:56
Show Gist options
  • Save comaniac/b45ae6c3f8c022164a208578f2ea5c2c to your computer and use it in GitHub Desktop.
Save comaniac/b45ae6c3f8c022164a208578f2ea5c2c to your computer and use it in GitHub Desktop.
import numpy as np
import tvm
from tvm import auto_scheduler, te, topi
# The last layer in resnet
H, W, CO, CI, KH, KW, strides, padding = 7, 7, 512, 512, 3, 3, (1, 1), (1, 1)
def conv2d_diff(N, H, W, CO, CI, KH, KW, stride, padding):
data = te.placeholder((N, CI, H, W), name="data")
kernel = te.placeholder((CO, CI, KH, KW), name="kernel")
out = topi.nn.conv2d_nchw(data, kernel, stride, padding, dilation=1, out_dtype="float32")
dy = te.placeholder(out.shape, name="dy")
dx, dw = te.gradient(out, [data, kernel], dy)
return [data, kernel, dy, dx, dw]
data, kernel, dy, dx, dw = conv2d_diff(1, H, W, CO, CI, KH, KW, strides, padding)
s = te.create_schedule([dy.op, dx.op, dw.op])
target = 'llvm'
ctx = tvm.cpu(0)
func = tvm.build(s, [data, kernel, dy, dx, dw], target)
data_np = np.random.uniform(size=[v.value for v in data.shape]).astype(np.float32)
weight_np = np.random.uniform(size=[v.value for v in kernel.shape]).astype(np.float32)
dy_np = np.random.uniform(size=[v.value for v in dy.shape]).astype(np.float32)
data_tvm = tvm.nd.array(data_np, ctx=ctx)
weight_tvm = tvm.nd.array(weight_np, ctx=ctx)
dy_tvm = tvm.nd.array(dy_np, ctx=ctx)
dx_tvm = tvm.nd.empty([v.value for v in dx.shape], ctx=ctx)
dw_tvm = tvm.nd.empty([v.value for v in dw.shape], ctx=ctx)
evaluator = func.time_evaluator(func.entry_name, ctx, min_repeat_ms=500)
print(
"Median execution time: %.3f ms"
% (np.median(evaluator(data_tvm, weight_tvm, dy_tvm, dx_tvm, dw_tvm).results) * 1000)
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment