Skip to content

Commit

Permalink
[BUG][ConvertLayout] Fix qnn.conv2d layout conversion too many values…
Browse files Browse the repository at this point in the history
… to unpack (#6442)

This patch follows a previous bugfix in #6419. I made a very simple oversight for qnn.conv2d in that tinfos also contains qnn parameters. Therefore, we need to extract data_info and weight_info differently.

Change-Id: Ib0ad01f427543371380d0bb604a77b5e0ec1103d
  • Loading branch information
lhutton1 committed Sep 16, 2020
1 parent b8d385c commit bdfefbb
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 1 deletion.
3 changes: 2 additions & 1 deletion python/tvm/relay/qnn/op/layout_conversions.py
Expand Up @@ -63,7 +63,8 @@ def convert_qnn_conv2d(attrs, inputs, tinfos, desired_layouts):
return relay.qnn.op.conv2d(*inputs, **new_attrs)
if desired_data_layout == "NHWC":
# Check for depthwise convolution.
data_info, weight_info = tinfos
data_info = tinfos[0]
weight_info = tinfos[1]
if is_depthwise_conv2d(
data_info.shape,
attrs["data_layout"],
Expand Down
46 changes: 46 additions & 0 deletions tests/python/relay/test_pass_convert_op_layout.py
Expand Up @@ -749,6 +749,51 @@ def expected():
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)


def test_qnn_conv_nhwc_convert_layout():
def before():
x = relay.var("x", shape=(1, 64, 56, 56), dtype='int8')
weight = relay.var('weight', shape=(64, 64, 3, 3), dtype='int8')
y = relay.qnn.op.conv2d(x, weight,
relay.const(1, 'int32'),
relay.const(1, 'int32'),
relay.const(1, 'float32'),
relay.const(1, 'float32'),
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
data_layout='NCHW',
kernel_layout='OIHW')
y = relay.nn.relu(y)
y = relay.Function([x, weight], y)
return y

def expected():
x = relay.var("x", shape=(1, 64, 56, 56), dtype='int8')
weight = relay.var('weight', shape=(64, 64, 3, 3), dtype='int8')
x = relay.layout_transform(x, 'NCHW', 'NHWC')
weight = relay.layout_transform(weight, 'OIHW', 'HWIO')
y = relay.qnn.op.conv2d(x, weight,
relay.const(1, 'int32'),
relay.const(1, 'int32'),
relay.const(1, 'float32'),
relay.const(1, 'float32'),
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NHWC",
kernel_layout="HWIO")
y = relay.nn.relu(y)
y = relay.layout_transform(y, 'NHWC', 'NCHW')
y = relay.Function(relay.analysis.free_vars(y), y)
return y

a = before()
a = run_opt_pass(a, transform.ConvertLayout({'qnn.conv2d': ['NHWC', 'default']}))
b = run_opt_pass(expected(), transform.InferType())

assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)


def test_conv_convert_kernel_layout():
""" Check that convolution kernel layout is correctly transformed. """

Expand Down Expand Up @@ -951,6 +996,7 @@ def expected():
test_qnn_conv_requantize_convert_layout()
test_qnn_conv_concat_convert_layout()
test_qnn_conv_add_convert_layout()
test_qnn_conv_nhwc_convert_layout()
test_conv_convert_kernel_layout()
test_conv_transpose_convert_layout()
test_default_keyword()
Expand Down

0 comments on commit bdfefbb

Please sign in to comment.