Skip to content

Commit

Permalink
iadd conv2d_transpose alter layout (#6358)
Browse files Browse the repository at this point in the history
Co-authored-by: honghua.cao <[email protected]>
  • Loading branch information
Beya2019 and honghua.cao authored Sep 7, 2020
1 parent 3004285 commit 4b48d89
Show file tree
Hide file tree
Showing 2 changed files with 100 additions and 1 deletion.
43 changes: 43 additions & 0 deletions python/tvm/relay/op/nn/_nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,49 @@ def legalize_conv2d_transpose(attrs, inputs, types):
"""
return topi.nn.conv2d_transpose_legalize(attrs, inputs, types)

@reg.register_convert_op_layout("nn.conv2d_transpose")
def convert_conv2d_transpose(attrs, inputs, tinfos, desired_layouts):
"""Convert Layout pass registration for conv2d_transpose op.
Parameters
----------
attrs : tvm.ir.Attrs
Attributes of current convolution
inputs : list of tvm.relay.Expr
The args of the Relay expr to be legalized
tinfos : list of types
List of input and output types
desired_layouts : list of layout strings
List of layouts defining our desired
layout for the data and kernel inputs respectively.
Returns
-------
result : tvm.relay.Expr
The transformed expr
"""
# pylint: disable=import-outside-toplevel
from tvm import relay
data, weight = inputs
new_attrs = dict(attrs)
assert len(desired_layouts) == 2, "A desired layout is expected for both of nn.conv2d's inputs"
desired_data_layout, desired_kernel_layout = map(str, desired_layouts)
assert desired_data_layout != "default", "Data layout cannot be default"
new_attrs['data_layout'] = desired_data_layout

if desired_kernel_layout != "default":
new_attrs['kernel_layout'] = desired_kernel_layout
return relay.nn.conv2d_transpose(data, weight, **new_attrs)

# Handle default kernel layouts
if desired_data_layout == 'NCHW':
new_attrs['kernel_layout'] = 'OIHW'
return relay.nn.conv2d_transpose(data, weight, **new_attrs)
elif desired_data_layout == 'NHWC':
new_attrs['kernel_layout'] = 'HWIO'
return relay.nn.conv2d_transpose(data, weight, **new_attrs)

raise ValueError("Layout %s is not yet supported." % desired_data_layout)

# conv3d_transpose
reg.register_strategy("nn.conv3d_transpose", strategy.conv3d_transpose_strategy)
Expand Down
58 changes: 57 additions & 1 deletion tests/python/relay/test_pass_convert_op_layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,41 @@ def expected():
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)


def test_conv_transpose_convert_layout():
def before():
x = relay.var("x", shape=(1, 56, 56, 64))
weight = relay.var('weight', shape=(3, 3, 64, 64))
y = relay.nn.conv2d_transpose(x, weight,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
data_layout='NHWC',
kernel_layout='HWIO')
y = relay.nn.relu(y)
y = relay.Function([x, weight], y)
return y

def expected():
x = relay.var("x", shape=(1, 56, 56, 64))
weight = relay.var('weight', shape=(3, 3, 64, 64))
x = relay.layout_transform(x, 'NHWC', 'NCHW')
weight = relay.layout_transform(weight, 'HWIO', 'OIHW')
y = relay.nn.conv2d_transpose(x, weight,
channels=64,
kernel_size=(3, 3),
padding=(1, 1))
y = relay.nn.relu(y)
y = relay.layout_transform(y, 'NCHW', 'NHWC')
y = relay.Function(relay.analysis.free_vars(y), y)
return y

a = before()
a = run_opt_pass(a, transform.ConvertLayout({'nn.conv2d_transpose': ['NCHW', 'OIHW']}))
b = run_opt_pass(expected(), transform.InferType())

assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)


def test_conv_bias_pool_convert_layout():
def before():
x = relay.var("x", shape=(1, 56, 56, 64))
Expand Down Expand Up @@ -680,6 +715,7 @@ def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1", shape=(64, 3, 3, 64))
weight2 = relay.var("weight2", shape=(64, 3, 3, 64), dtype='int8')
weight3 = relay.var("weight3", shape=(64, 3, 3, 64))
out = relay.nn.conv2d(x, weight1,
channels=64,
kernel_size=(3, 3),
Expand All @@ -697,13 +733,21 @@ def before():
padding=(1, 1),
data_layout='NCHW',
kernel_layout='OHWI')
out = relay.cast(out, 'float32')
out = relay.nn.conv2d_transpose(out, weight3,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
data_layout='NCHW',
kernel_layout='OHWI')
out = relay.Function(analysis.free_vars(out), out)
return out

def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1", shape=(64, 3, 3, 64))
weight2 = relay.var("weight2", shape=(64, 3, 3, 64), dtype='int8')
weight3 = relay.var("weight3", shape=(64, 3, 3, 64))
x = relay.layout_transform(x, 'NCHW', 'NHWC')
weight1 = relay.layout_transform(weight1, 'OHWI', 'HWIO')
out = relay.nn.conv2d(x, weight1,
Expand All @@ -725,12 +769,23 @@ def expected():
padding=(1, 1),
data_layout='NCHW',
kernel_layout='OIHW')
out = relay.cast(out, 'float32')
out = relay.layout_transform(out, 'NCHW', 'NHWC')
weight3 = relay.layout_transform(weight3, 'OHWI', 'HWIO')
out = relay.nn.conv2d_transpose(out, weight3,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
data_layout='NHWC',
kernel_layout='HWIO')
out = relay.layout_transform(out, 'NHWC', 'NCHW')
out = relay.Function(analysis.free_vars(out), out)
return out

a = before()
desired_layouts = {'nn.conv2d': ['NHWC', 'HWIO'],
'qnn.conv2d': ['NCHW', 'OIHW']}
'qnn.conv2d': ['NCHW', 'OIHW'],
'nn.conv2d_transpose': ['NHWC', 'HWIO'],}
a = run_opt_pass(a, transform.ConvertLayout(desired_layouts))
b = run_opt_pass(expected(), transform.InferType())

Expand All @@ -751,5 +806,6 @@ def expected():
test_qnn_conv_concat_convert_layout()
test_qnn_conv_add_convert_layout()
test_conv_convert_kernel_layout()
test_conv_transpose_convert_layout()
test_default_keyword()
test_different_ops_convert_layout()

0 comments on commit 4b48d89

Please sign in to comment.