Skip to content

Commit

Permalink
updata micronet
Browse files Browse the repository at this point in the history
  • Loading branch information
bupt906 committed Jan 13, 2022
1 parent 506fba2 commit 9065116
Show file tree
Hide file tree
Showing 2 changed files with 116 additions and 124 deletions.
136 changes: 116 additions & 20 deletions ppocr/modeling/backbones/rec_micronet.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,29 @@
# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This code is refer from:
https://github.com/liyunsheng13/micronet/blob/main/backbone/micronet.py
https://github.com/liyunsheng13/micronet/blob/main/backbone/activation.py
"""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import paddle
import paddle.nn as nn

from ppocr.optimizer.dyshiftmax import DYShiftMax
from ppocr.modeling.backbones.det_mobilenet_v3 import make_divisible

M0_cfgs = [
Expand Down Expand Up @@ -61,24 +79,6 @@ def get_micronet_config(mode):
return eval(mode + '_cfgs')


class h_sigmoid(nn.Layer):
def __init__(self):
super(h_sigmoid, self).__init__()
self.relu = nn.ReLU6()

def forward(self, x):
return self.relu(x + 3) / 6


class h_swish(nn.Layer):
def __init__(self):
super(h_swish, self).__init__()
self.sigmoid = h_sigmoid()

def forward(self, x):
return x * self.sigmoid(x)


class MaxGroupPooling(nn.Layer):
def __init__(self, channel_per_group=2):
super(MaxGroupPooling, self).__init__()
Expand Down Expand Up @@ -220,6 +220,94 @@ def forward(self, x):
return out


class DYShiftMax(nn.Layer):
def __init__(self,
inp,
oup,
reduction=4,
act_max=1.0,
act_relu=True,
init_a=[0.0, 0.0],
init_b=[0.0, 0.0],
relu_before_pool=False,
g=None,
expansion=False):
super(DYShiftMax, self).__init__()
self.oup = oup
self.act_max = act_max * 2
self.act_relu = act_relu
self.avg_pool = nn.Sequential(nn.ReLU() if relu_before_pool == True else
nn.Sequential(), nn.AdaptiveAvgPool2D(1))

self.exp = 4 if act_relu else 2
self.init_a = init_a
self.init_b = init_b

# determine squeeze
squeeze = make_divisible(inp // reduction, 4)
if squeeze < 4:
squeeze = 4

self.fc = nn.Sequential(
nn.Linear(inp, squeeze),
nn.ReLU(), nn.Linear(squeeze, oup * self.exp), nn.Hardsigmoid())

if g is None:
g = 1
self.g = g[1]
if self.g != 1 and expansion:
self.g = inp // self.g

self.gc = inp // self.g
index = paddle.to_tensor([range(inp)])
index = paddle.reshape(index, [1, inp, 1, 1])
index = paddle.reshape(index, [1, self.g, self.gc, 1, 1])
indexgs = paddle.split(index, [1, self.g - 1], axis=1)
indexgs = paddle.concat((indexgs[1], indexgs[0]), axis=1)
indexs = paddle.split(indexgs, [1, self.gc - 1], axis=2)
indexs = paddle.concat((indexs[1], indexs[0]), axis=2)
self.index = paddle.reshape(indexs, [inp])
self.expansion = expansion

def forward(self, x):
x_in = x
x_out = x

b, c, _, _ = x_in.shape
y = self.avg_pool(x_in)
y = paddle.reshape(y, [b, c])
y = self.fc(y)
y = paddle.reshape(y, [b, self.oup * self.exp, 1, 1])
y = (y - 0.5) * self.act_max

n2, c2, h2, w2 = x_out.shape
x2 = paddle.to_tensor(x_out.numpy()[:, self.index.numpy(), :, :])

if self.exp == 4:
temp = y.shape
a1, b1, a2, b2 = paddle.split(y, temp[1] // self.oup, axis=1)

a1 = a1 + self.init_a[0]
a2 = a2 + self.init_a[1]

b1 = b1 + self.init_b[0]
b2 = b2 + self.init_b[1]

z1 = x_out * a1 + x2 * b1
z2 = x_out * a2 + x2 * b2

out = paddle.maximum(z1, z2)

elif self.exp == 2:
temp = y.shape
a1, b1 = paddle.split(y, temp[1] // self.oup, axis=1)
a1 = a1 + self.init_a[0]
b1 = b1 + self.init_b[0]
out = x_out * a1 + x2 * b1

return out


class DYMicroBlock(nn.Layer):
def __init__(self,
inp,
Expand Down Expand Up @@ -347,7 +435,15 @@ def forward(self, x):


class MicroNet(nn.Layer):
def __init__(self, mode='M0', **kwargs):
"""
the MicroNet backbone network for recognition module.
Args:
mode(str): {'M0', 'M1', 'M2', 'M3'}
Four models are proposed based on four different computational costs (4M, 6M, 12M, 21M MAdds)
Default: 'M3'.
"""

def __init__(self, mode='M3', **kwargs):
super(MicroNet, self).__init__()

self.cfgs = get_micronet_config(mode)
Expand Down
104 changes: 0 additions & 104 deletions ppocr/optimizer/dyshiftmax.py

This file was deleted.

0 comments on commit 9065116

Please sign in to comment.