Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Relay] add conv2d_transpose alter layout #6358

Merged
merged 1 commit into from
Sep 7, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions python/tvm/relay/op/nn/_nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,49 @@ def legalize_conv2d_transpose(attrs, inputs, types):
"""
return topi.nn.conv2d_transpose_legalize(attrs, inputs, types)

@reg.register_convert_op_layout("nn.conv2d_transpose")
def convert_conv2d_transpose(attrs, inputs, tinfos, desired_layouts):
"""Convert Layout pass registration for conv2d_transpose op.

Parameters
----------
attrs : tvm.ir.Attrs
Attributes of current convolution
inputs : list of tvm.relay.Expr
The args of the Relay expr to be legalized
tinfos : list of types
List of input and output types
desired_layouts : list of layout strings
List of layouts defining our desired
layout for the data and kernel inputs respectively.

Returns
-------
result : tvm.relay.Expr
The transformed expr
"""
# pylint: disable=import-outside-toplevel
from tvm import relay
data, weight = inputs
new_attrs = dict(attrs)
assert len(desired_layouts) == 2, "A desired layout is expected for both of nn.conv2d's inputs"
desired_data_layout, desired_kernel_layout = map(str, desired_layouts)
assert desired_data_layout != "default", "Data layout cannot be default"
new_attrs['data_layout'] = desired_data_layout

if desired_kernel_layout != "default":
new_attrs['kernel_layout'] = desired_kernel_layout
return relay.nn.conv2d_transpose(data, weight, **new_attrs)

# Handle default kernel layouts
if desired_data_layout == 'NCHW':
new_attrs['kernel_layout'] = 'OIHW'
return relay.nn.conv2d_transpose(data, weight, **new_attrs)
elif desired_data_layout == 'NHWC':
new_attrs['kernel_layout'] = 'HWIO'
return relay.nn.conv2d_transpose(data, weight, **new_attrs)

raise ValueError("Layout %s is not yet supported." % desired_data_layout)

# conv3d_transpose
reg.register_strategy("nn.conv3d_transpose", strategy.conv3d_transpose_strategy)
Expand Down
58 changes: 57 additions & 1 deletion tests/python/relay/test_pass_convert_op_layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,41 @@ def expected():
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)


def test_conv_transpose_convert_layout():
def before():
x = relay.var("x", shape=(1, 56, 56, 64))
weight = relay.var('weight', shape=(3, 3, 64, 64))
y = relay.nn.conv2d_transpose(x, weight,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
data_layout='NHWC',
kernel_layout='HWIO')
y = relay.nn.relu(y)
y = relay.Function([x, weight], y)
return y

def expected():
x = relay.var("x", shape=(1, 56, 56, 64))
weight = relay.var('weight', shape=(3, 3, 64, 64))
x = relay.layout_transform(x, 'NHWC', 'NCHW')
weight = relay.layout_transform(weight, 'HWIO', 'OIHW')
y = relay.nn.conv2d_transpose(x, weight,
channels=64,
kernel_size=(3, 3),
padding=(1, 1))
y = relay.nn.relu(y)
y = relay.layout_transform(y, 'NCHW', 'NHWC')
y = relay.Function(relay.analysis.free_vars(y), y)
return y

a = before()
a = run_opt_pass(a, transform.ConvertLayout({'nn.conv2d_transpose': ['NCHW', 'OIHW']}))
b = run_opt_pass(expected(), transform.InferType())

assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)


def test_conv_bias_pool_convert_layout():
def before():
x = relay.var("x", shape=(1, 56, 56, 64))
Expand Down Expand Up @@ -680,6 +715,7 @@ def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1", shape=(64, 3, 3, 64))
weight2 = relay.var("weight2", shape=(64, 3, 3, 64), dtype='int8')
weight3 = relay.var("weight3", shape=(64, 3, 3, 64))
out = relay.nn.conv2d(x, weight1,
channels=64,
kernel_size=(3, 3),
Expand All @@ -697,13 +733,21 @@ def before():
padding=(1, 1),
data_layout='NCHW',
kernel_layout='OHWI')
out = relay.cast(out, 'float32')
out = relay.nn.conv2d_transpose(out, weight3,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
data_layout='NCHW',
kernel_layout='OHWI')
out = relay.Function(analysis.free_vars(out), out)
return out

def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1", shape=(64, 3, 3, 64))
weight2 = relay.var("weight2", shape=(64, 3, 3, 64), dtype='int8')
weight3 = relay.var("weight3", shape=(64, 3, 3, 64))
x = relay.layout_transform(x, 'NCHW', 'NHWC')
weight1 = relay.layout_transform(weight1, 'OHWI', 'HWIO')
out = relay.nn.conv2d(x, weight1,
Expand All @@ -725,12 +769,23 @@ def expected():
padding=(1, 1),
data_layout='NCHW',
kernel_layout='OIHW')
out = relay.cast(out, 'float32')
out = relay.layout_transform(out, 'NCHW', 'NHWC')
weight3 = relay.layout_transform(weight3, 'OHWI', 'HWIO')
out = relay.nn.conv2d_transpose(out, weight3,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
data_layout='NHWC',
kernel_layout='HWIO')
out = relay.layout_transform(out, 'NHWC', 'NCHW')
out = relay.Function(analysis.free_vars(out), out)
return out

a = before()
desired_layouts = {'nn.conv2d': ['NHWC', 'HWIO'],
'qnn.conv2d': ['NCHW', 'OIHW']}
'qnn.conv2d': ['NCHW', 'OIHW'],
'nn.conv2d_transpose': ['NHWC', 'HWIO'],}
a = run_opt_pass(a, transform.ConvertLayout(desired_layouts))
b = run_opt_pass(expected(), transform.InferType())

Expand All @@ -751,5 +806,6 @@ def expected():
test_qnn_conv_concat_convert_layout()
test_qnn_conv_add_convert_layout()
test_conv_convert_kernel_layout()
test_conv_transpose_convert_layout()
test_default_keyword()
test_different_ops_convert_layout()