Skip to content

Instantly share code, notes, and snippets.

@ajtulloch
Created June 12, 2019 03:30
Show Gist options
  • Save ajtulloch/b8a6a8f9479958916f934b4b3ce764b9 to your computer and use it in GitHub Desktop.
Save ajtulloch/b8a6a8f9479958916f934b4b3ce764b9 to your computer and use it in GitHub Desktop.
from tvm import relay
from mxnet.gluon import nn
import mxnet as mx
class TestBlock(nn.HybridBlock):
def __init__(self):
super(TestBlock, self).__init__()
self.conv = nn.Conv2D(8, 3, 1, 1, use_bias=False)
self.a000 = nn.Activation("relu")
self.a0_0 = nn.MaxPool2D(pool_size=2, strides=2)
self.b_0 = nn.BatchNorm()
def hybrid_forward(self, F, x):
base = self.conv(x)
base = self.a000(base)
a0 = self.a0_0(base)
b0 = self.b_0(a0)
return b0
def get_test_block():
block = TestBlock()
block.initialize()
block.hybridize()
data = mx.nd.zeros((1, 3, 224, 224))
block(data)
return block
def load_gluon_cv():
block = get_test_block()
net, params = relay.frontend.from_mxnet(
block, shape={"data": (1, 3, 224, 224)})
return net, params
net, params = load_gluon_cv()
with relay.quantize.qconfig(
skip_k_conv=0, global_scale=4.0,
round_for_shift=False, store_lowbit_output=False):
net = relay.quantize.quantize(net, params)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment