Skip to content

Commit

Permalink
address comments and fix lint
Browse files Browse the repository at this point in the history
Change-Id: Iba3436d14a16106a696400c1329dcfa164631dc7
  • Loading branch information
lhutton1 committed Nov 23, 2021
1 parent 22efcd3 commit 7010ffb
Show file tree
Hide file tree
Showing 3 changed files with 88 additions and 13 deletions.
25 changes: 16 additions & 9 deletions python/tvm/relay/backend/contrib/ethosu/codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@
# under the License.
"""Codegen for Arm(R) Ethos(TM)-U NPU"""

from typing import List, Dict, Any

import tvm
from tvm import relay
from tvm.ir.transform import Pass
Expand Down Expand Up @@ -64,6 +62,15 @@ class LayoutOptimization(ExprMutator):
"""A pass to optimize the layout of NPU operations. If both the
producer and consumer of a tensor are NPU operators, then the
layout is converted from NHWC to NHCWB16.
Attributes
----------
children : Dict[tvm.relay.expr.Call, List[tvm.relay.expr.Call]]
A map from current call to a list of calls that rely on the current
call. This allows the graph to be traversed backwards, which is useful
for checking whether the output layouts can be rewritten.
optimize_op : Dict[str, Callable]
A map from NPU op name to function that creates NPU op.
"""

def __init__(self):
Expand All @@ -73,6 +80,7 @@ def __init__(self):
"contrib.ethosu.depthwise_conv2d": ethosu_op.ethosu_depthwise_conv2d,
"contrib.ethosu.pooling": ethosu_op.ethosu_pooling,
"contrib.ethosu.binary_elementwise": ethosu_op.ethosu_binary_elementwise,
"contrib.ethosu.unary_elementwise": ethosu_op.ethosu_unary_elementwise,
}

super().__init__()
Expand Down Expand Up @@ -104,13 +112,13 @@ def alter_ethosu_op_layout(self, call: tvm.relay.expr.Call) -> tvm.relay.expr.Ca
parents = []

# Check if we can rewrite the input layouts
layout_count = 0
input_count = 0
for arg in call.args:
layout_count += 1
input_count += 1
if not isinstance(arg, tvm.relay.expr.Call):
continue
if is_ethosu_op(arg):
layout_string = "ifm_layout" if layout_count <= 1 else f"ifm{layout_count}_layout"
layout_string = "ifm_layout" if input_count <= 1 else f"ifm{input_count}_layout"
new_attrs[layout_string] = "NHCWB16"
parents.append(arg)

Expand All @@ -129,7 +137,7 @@ def alter_ethosu_op_layout(self, call: tvm.relay.expr.Call) -> tvm.relay.expr.Ca
)
new_call = self.optimize_op[name](*call.args, **new_attrs)

# Rewriting output layout requires maintaining map of current call to children
# Update map of children
for input_arg in parents:
if input_arg in self.children:
self.children[input_arg].append(new_call)
Expand Down Expand Up @@ -161,6 +169,8 @@ def visit_call(self, call: tvm.relay.expr.Call) -> tvm.relay.expr.Call:

@relay.transform.function_pass(opt_level=1, name="LayoutOptimizer")
class LayoutOptimizer(Pass):
"""Register LayoutOptimizer as a Relay pass."""

def transform_function(
self, func: tvm.relay.function.Function, mod: tvm.IRModule, _
) -> tvm.IRModule:
Expand All @@ -187,11 +197,9 @@ def _compile(ext_func):
-------
cs : str
An hex string of the bytes of command stream.
encoded_constants : str
An hex string of the bytes that includes concat'd
encoded weights, encoded biases and scales.
scratch_size : int
The size of the scratch buffer needed.
"""
Expand All @@ -200,7 +208,6 @@ def _compile(ext_func):
mod = LegalizeEthosU()(mod)
mod = LayoutOptimizer()(mod)
mod = relay.transform.InferType()(mod)

# We are currently using copy_constants scheduler In the long run,
# this should be a single intelligent and a composite scheduler
# that can perform scheduling based on user inputs such as
Expand Down
5 changes: 5 additions & 0 deletions python/tvm/relay/backend/contrib/ethosu/op/op_attrs.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,8 @@ class EthosuPoolingAttrs(Attrs):
@tvm._ffi.register_object("relay.attrs.EthosuBinaryElementwiseAttrs")
class EthosuBinaryElementwiseAttrs(Attrs):
"""Attributes for contrib.ethosu.binary_elementwise"""


@tvm._ffi.register_object("relay.attrs.EthosuUnaryElementwiseAttrs")
class EthosuUnaryElementwiseAttrs(Attrs):
"""Attributes for contrib.ethosu.unary_elementwise"""
71 changes: 67 additions & 4 deletions tests/python/contrib/test_ethosu/test_layout_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,19 @@
# under the License.

"""Test the layout optimization pass. This pass is used to
convert subgraphs to the preferred layout of NCHWB16.
convert subgraphs to the preferred layout of NHCWB16.
"""

import pytest

pytest.importorskip("ethosu.vela")

import sys

import pytest
import numpy as np
import tensorflow as tf
import tflite.Model

pytest.importorskip("ethosu.vela")

import tvm
from tvm import relay
from tvm.relay.op.contrib.ethosu import partition_for_ethosu
Expand Down Expand Up @@ -151,6 +152,38 @@ def get_graph(get_expected=False):
_assert_structural_equal(a, b)


def test_multiple_depthwise_convolution():
"""Test layout optimization pass on multiple depthwise convolutions.
depthwise_conv_1
|
depthwise_conv_2
|
depthwise_conv_3
"""

def get_graph(get_expected=False):
x = relay.var("x", shape=(1, 8, 8, 4), dtype="int8")
for i in range(3):
ifm_layout = "NHCWB16" if get_expected and i != 0 else "NHWC"
ofm_layout = "NHCWB16" if get_expected and i != 2 else "NHWC"
x = infra.make_ethosu_depthwise_conv2d(
ifm=x,
channels=4,
kernel_shape=(1, 1),
padding=(0, 0),
strides=(1, 1),
dilation=(1, 1),
ifm_layout=ifm_layout,
ofm_layout=ofm_layout,
)
return relay.Function(relay.analysis.free_vars(x), x)

a = _run_pass(get_graph(), LayoutOptimizer())
b = _run_pass(get_graph(get_expected=True), relay.transform.InferType())
_assert_structural_equal(a, b)


def test_ignore_transform_operations():
"""Test layout optimization pass ignores transform operations
such as reshape and strided slice.
Expand Down Expand Up @@ -475,6 +508,36 @@ def get_graph(get_expected=False):
_assert_structural_equal(a, b)


def test_multiple_unary_elementwise():
"""Test the layout optimization pass works as expected for multiple
unary elementwise operations.
abs_1
|
abs_2
|
abs_3
"""

def get_graph(get_expected=False):
x = relay.var("x", shape=(1, 8, 8, 4), dtype="int8")
for i in range(3):
ifm_layout = "NHCWB16" if get_expected and i != 0 else "NHWC"
ofm_layout = "NHCWB16" if get_expected and i != 2 else "NHWC"
x = infra.make_ethosu_unary_elementwise(
x,
ofm_channels=4,
operator_type="ABS",
ifm_layout=ifm_layout,
ofm_layout=ofm_layout,
)
return relay.Function(relay.analysis.free_vars(x), x)

a = _run_pass(get_graph(), LayoutOptimizer())
b = _run_pass(get_graph(get_expected=True), relay.transform.InferType())
_assert_structural_equal(a, b)


def test_same_output_multiple_convolutions():
"""Test running the layout optimization pass with multiple convolutions
gives same output as TFLite."""
Expand Down

0 comments on commit 7010ffb

Please sign in to comment.