Skip to content

Commit

Permalink
Add several comments for MobileNetV2
Browse files Browse the repository at this point in the history
  • Loading branch information
osmr committed Aug 8, 2018
1 parent 95c091f commit abe1e5e
Show file tree
Hide file tree
Showing 2 changed files with 130 additions and 4 deletions.
76 changes: 72 additions & 4 deletions gluon/models/mobilenetv2.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@


class ReLU6(nn.HybridBlock):

"""
ReLU6 activation layer.
"""
def __init__(self, **kwargs):
super(ReLU6, self).__init__(**kwargs)

Expand All @@ -19,7 +21,28 @@ def hybrid_forward(self, F, x):


class MobnetConv(HybridBlock):
"""
MobileNetV2 specific convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
kernel_size : int or tuple/list of 2 int
Convolution window size.
strides : int or tuple/list of 2 int
Strides of the convolution.
padding : int or tuple/list of 2 int
Padding value for convolution layer.
groups : int
Number of groups.
bn_use_global_stats : bool
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
activate : bool
Whether activate the convolution block.
"""
def __init__(self,
in_channels,
out_channels,
Expand Down Expand Up @@ -60,6 +83,20 @@ def mobnet_conv1x1(in_channels,
out_channels,
bn_use_global_stats,
activate):
"""
1x1 version of the MobileNetV2 specific convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
bn_use_global_stats : bool
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
activate : bool
Whether activate the convolution block.
"""
return MobnetConv(
in_channels=in_channels,
out_channels=out_channels,
Expand All @@ -76,6 +113,22 @@ def mobnet_dwconv3x3(in_channels,
strides,
bn_use_global_stats,
activate):
"""
3x3 depthwise version of the MobileNetV2 specific convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
strides : int or tuple/list of 2 int
Strides of the convolution.
bn_use_global_stats : bool
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
activate : bool
Whether activate the convolution block.
"""
return MobnetConv(
in_channels=in_channels,
out_channels=out_channels,
Expand All @@ -88,13 +141,28 @@ def mobnet_dwconv3x3(in_channels,


class LinearBottleneck(HybridBlock):
"""
So-called 'Linear Bottleneck' layer. It is used as a MobileNetV2 unit.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
strides : int or tuple/list of 2 int
Strides of the second convolution layer.
bn_use_global_stats : bool
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
expansion : bool
Whether do expansion of channels.
"""
def __init__(self,
in_channels,
out_channels,
strides,
expansion,
bn_use_global_stats,
expansion,
**kwargs):
super(LinearBottleneck, self).__init__(**kwargs)
self.residual = (in_channels == out_channels) and (strides == 1)
Expand Down Expand Up @@ -181,8 +249,8 @@ def __init__(self,
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
expansion=expansion,
bn_use_global_stats=bn_use_global_stats))
bn_use_global_stats=bn_use_global_stats,
expansion=expansion))
in_channels = out_channels
self.features.add(stage)
self.features.add(mobnet_conv1x1(
Expand Down
58 changes: 58 additions & 0 deletions pytorch/models/mobilenetv2.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,26 @@


class MobnetConv(nn.Module):
"""
MobileNetV2 specific convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
kernel_size : int or tuple/list of 2 int
Convolution window size.
stride : int or tuple/list of 2 int
Strides of the convolution.
padding : int or tuple/list of 2 int
Padding value for convolution layer.
groups : int
Number of groups.
activate : bool
Whether activate the convolution block.
"""
def __init__(self,
in_channels,
out_channels,
Expand Down Expand Up @@ -45,6 +64,18 @@ def forward(self, x):
def mobnet_conv1x1(in_channels,
out_channels,
activate):
"""
1x1 version of the MobileNetV2 specific convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
activate : bool
Whether activate the convolution block.
"""
return MobnetConv(
in_channels=in_channels,
out_channels=out_channels,
Expand All @@ -59,6 +90,20 @@ def mobnet_dwconv3x3(in_channels,
out_channels,
stride,
activate):
"""
3x3 depthwise version of the MobileNetV2 specific convolution block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int
Strides of the convolution.
activate : bool
Whether activate the convolution block.
"""
return MobnetConv(
in_channels=in_channels,
out_channels=out_channels,
Expand All @@ -70,7 +115,20 @@ def mobnet_dwconv3x3(in_channels,


class LinearBottleneck(nn.Module):
"""
So-called 'Linear Bottleneck' layer. It is used as a MobileNetV2 unit.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
stride : int or tuple/list of 2 int
Strides of the second convolution layer.
expansion : bool
Whether do expansion of channels.
"""
def __init__(self,
in_channels,
out_channels,
Expand Down

0 comments on commit abe1e5e

Please sign in to comment.