From d29243038b0f6071851e53e945597c069f4b2e98 Mon Sep 17 00:00:00 2001 From: "honghua.cao" Date: Fri, 28 Aug 2020 23:23:33 +0800 Subject: [PATCH] iadd conv2d_transpose alter layout --- python/tvm/relay/op/nn/_nn.py | 43 ++++++++++++++ .../relay/test_pass_convert_op_layout.py | 58 ++++++++++++++++++- 2 files changed, 100 insertions(+), 1 deletion(-) diff --git a/python/tvm/relay/op/nn/_nn.py b/python/tvm/relay/op/nn/_nn.py index 2f0966c62b65..43fca6d5f80f 100644 --- a/python/tvm/relay/op/nn/_nn.py +++ b/python/tvm/relay/op/nn/_nn.py @@ -192,6 +192,49 @@ def legalize_conv2d_transpose(attrs, inputs, types): """ return topi.nn.conv2d_transpose_legalize(attrs, inputs, types) +@reg.register_convert_op_layout("nn.conv2d_transpose") +def convert_conv2d_transpose(attrs, inputs, tinfos, desired_layouts): + """Convert Layout pass registration for conv2d_transpose op. + + Parameters + ---------- + attrs : tvm.ir.Attrs + Attributes of current convolution + inputs : list of tvm.relay.Expr + The args of the Relay expr to be legalized + tinfos : list of types + List of input and output types + desired_layouts : list of layout strings + List of layouts defining our desired + layout for the data and kernel inputs respectively. + + Returns + ------- + result : tvm.relay.Expr + The transformed expr + """ + # pylint: disable=import-outside-toplevel + from tvm import relay + data, weight = inputs + new_attrs = dict(attrs) + assert len(desired_layouts) == 2, "A desired layout is expected for both of nn.conv2d's inputs" + desired_data_layout, desired_kernel_layout = map(str, desired_layouts) + assert desired_data_layout != "default", "Data layout cannot be default" + new_attrs['data_layout'] = desired_data_layout + + if desired_kernel_layout != "default": + new_attrs['kernel_layout'] = desired_kernel_layout + return relay.nn.conv2d_transpose(data, weight, **new_attrs) + + # Handle default kernel layouts + if desired_data_layout == 'NCHW': + new_attrs['kernel_layout'] = 'OIHW' + return relay.nn.conv2d_transpose(data, weight, **new_attrs) + elif desired_data_layout == 'NHWC': + new_attrs['kernel_layout'] = 'HWIO' + return relay.nn.conv2d_transpose(data, weight, **new_attrs) + + raise ValueError("Layout %s is not yet supported." % desired_data_layout) # conv3d_transpose reg.register_strategy("nn.conv3d_transpose", strategy.conv3d_transpose_strategy) diff --git a/tests/python/relay/test_pass_convert_op_layout.py b/tests/python/relay/test_pass_convert_op_layout.py index f3cdbfc86e51..aec758d7b5fe 100644 --- a/tests/python/relay/test_pass_convert_op_layout.py +++ b/tests/python/relay/test_pass_convert_op_layout.py @@ -90,6 +90,41 @@ def expected(): assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) +def test_conv_transpose_convert_layout(): + def before(): + x = relay.var("x", shape=(1, 56, 56, 64)) + weight = relay.var('weight', shape=(3, 3, 64, 64)) + y = relay.nn.conv2d_transpose(x, weight, + channels=64, + kernel_size=(3, 3), + padding=(1, 1), + data_layout='NHWC', + kernel_layout='HWIO') + y = relay.nn.relu(y) + y = relay.Function([x, weight], y) + return y + + def expected(): + x = relay.var("x", shape=(1, 56, 56, 64)) + weight = relay.var('weight', shape=(3, 3, 64, 64)) + x = relay.layout_transform(x, 'NHWC', 'NCHW') + weight = relay.layout_transform(weight, 'HWIO', 'OIHW') + y = relay.nn.conv2d_transpose(x, weight, + channels=64, + kernel_size=(3, 3), + padding=(1, 1)) + y = relay.nn.relu(y) + y = relay.layout_transform(y, 'NCHW', 'NHWC') + y = relay.Function(relay.analysis.free_vars(y), y) + return y + + a = before() + a = run_opt_pass(a, transform.ConvertLayout({'nn.conv2d_transpose': ['NCHW', 'OIHW']})) + b = run_opt_pass(expected(), transform.InferType()) + + assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) + + def test_conv_bias_pool_convert_layout(): def before(): x = relay.var("x", shape=(1, 56, 56, 64)) @@ -680,6 +715,7 @@ def before(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1", shape=(64, 3, 3, 64)) weight2 = relay.var("weight2", shape=(64, 3, 3, 64), dtype='int8') + weight3 = relay.var("weight3", shape=(64, 3, 3, 64)) out = relay.nn.conv2d(x, weight1, channels=64, kernel_size=(3, 3), @@ -697,6 +733,13 @@ def before(): padding=(1, 1), data_layout='NCHW', kernel_layout='OHWI') + out = relay.cast(out, 'float32') + out = relay.nn.conv2d_transpose(out, weight3, + channels=64, + kernel_size=(3, 3), + padding=(1, 1), + data_layout='NCHW', + kernel_layout='OHWI') out = relay.Function(analysis.free_vars(out), out) return out @@ -704,6 +747,7 @@ def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1", shape=(64, 3, 3, 64)) weight2 = relay.var("weight2", shape=(64, 3, 3, 64), dtype='int8') + weight3 = relay.var("weight3", shape=(64, 3, 3, 64)) x = relay.layout_transform(x, 'NCHW', 'NHWC') weight1 = relay.layout_transform(weight1, 'OHWI', 'HWIO') out = relay.nn.conv2d(x, weight1, @@ -725,12 +769,23 @@ def expected(): padding=(1, 1), data_layout='NCHW', kernel_layout='OIHW') + out = relay.cast(out, 'float32') + out = relay.layout_transform(out, 'NCHW', 'NHWC') + weight3 = relay.layout_transform(weight3, 'OHWI', 'HWIO') + out = relay.nn.conv2d_transpose(out, weight3, + channels=64, + kernel_size=(3, 3), + padding=(1, 1), + data_layout='NHWC', + kernel_layout='HWIO') + out = relay.layout_transform(out, 'NHWC', 'NCHW') out = relay.Function(analysis.free_vars(out), out) return out a = before() desired_layouts = {'nn.conv2d': ['NHWC', 'HWIO'], - 'qnn.conv2d': ['NCHW', 'OIHW']} + 'qnn.conv2d': ['NCHW', 'OIHW'], + 'nn.conv2d_transpose': ['NHWC', 'HWIO'],} a = run_opt_pass(a, transform.ConvertLayout(desired_layouts)) b = run_opt_pass(expected(), transform.InferType()) @@ -751,5 +806,6 @@ def expected(): test_qnn_conv_concat_convert_layout() test_qnn_conv_add_convert_layout() test_conv_convert_kernel_layout() + test_conv_transpose_convert_layout() test_default_keyword() test_different_ops_convert_layout()