|
import warnings |
|
from typing import Dict, Tuple, Union, Any, Optional |
|
import inspect |
|
import torch.nn as nn |
|
import torch |
|
from torch.nn.modules.batchnorm import _BatchNorm |
|
from torch.nn.modules.instancenorm import _InstanceNorm |
|
|
|
from ..model.weight_init import constant_init, kaiming_init |
|
from ..utils.registry import MODELS |
|
from .build_functions import SyncBatchNorm |
|
|
|
|
|
TORCH_VERSION = torch.__version__ |
|
|
|
|
|
MODELS.register_module('BN', module=nn.BatchNorm2d) |
|
MODELS.register_module('BN1d', module=nn.BatchNorm1d) |
|
MODELS.register_module('BN2d', module=nn.BatchNorm2d) |
|
MODELS.register_module('BN3d', module=nn.BatchNorm3d) |
|
MODELS.register_module('SyncBN', module=SyncBatchNorm) |
|
MODELS.register_module('GN', module=nn.GroupNorm) |
|
MODELS.register_module('LN', module=nn.LayerNorm) |
|
MODELS.register_module('IN', module=nn.InstanceNorm2d) |
|
MODELS.register_module('IN1d', module=nn.InstanceNorm1d) |
|
MODELS.register_module('IN2d', module=nn.InstanceNorm2d) |
|
MODELS.register_module('IN3d', module=nn.InstanceNorm3d) |
|
|
|
MODELS.register_module('Conv1d', module=nn.Conv1d) |
|
MODELS.register_module('Conv2d', module=nn.Conv2d) |
|
MODELS.register_module('Conv3d', module=nn.Conv3d) |
|
MODELS.register_module('Conv', module=nn.Conv2d) |
|
|
|
MODELS.register_module('GELU', module=nn.GELU) |
|
MODELS.register_module('ReLU', module=nn.ReLU) |
|
|
|
def build_activation_layer(cfg: Dict) -> nn.Module: |
|
"""Build activation layer. |
|
|
|
Args: |
|
cfg (dict): The activation layer config, which should contain: |
|
|
|
- type (str): Layer type. |
|
- layer args: Args needed to instantiate an activation layer. |
|
|
|
Returns: |
|
nn.Module: Created activation layer. |
|
""" |
|
return MODELS.build(cfg) |
|
|
|
|
|
def build_norm_layer(cfg: Dict, |
|
num_features: int, |
|
postfix: Union[int, str] = '') -> Tuple[str, nn.Module]: |
|
"""Build normalization layer. |
|
|
|
Args: |
|
cfg (dict): The norm layer config, which should contain: |
|
|
|
- type (str): Layer type. |
|
- layer args: Args needed to instantiate a norm layer. |
|
- requires_grad (bool, optional): Whether stop gradient updates. |
|
num_features (int): Number of input channels. |
|
postfix (int | str): The postfix to be appended into norm abbreviation |
|
to create named layer. |
|
|
|
Returns: |
|
tuple[str, nn.Module]: The first element is the layer name consisting |
|
of abbreviation and postfix, e.g., bn1, gn. The second element is the |
|
created norm layer. |
|
""" |
|
if not isinstance(cfg, dict): |
|
raise TypeError('cfg must be a dict') |
|
if 'type' not in cfg: |
|
raise KeyError('the cfg dict must contain the key "type"') |
|
cfg_ = cfg.copy() |
|
|
|
layer_type = cfg_.pop('type') |
|
|
|
|
|
|
|
|
|
with MODELS.switch_scope_and_registry(None) as registry: |
|
norm_layer = registry.get(layer_type) |
|
if norm_layer is None: |
|
raise KeyError(f'Cannot find {norm_layer} in registry under scope ' |
|
f'name {registry.scope}') |
|
abbr = infer_abbr(norm_layer) |
|
|
|
assert isinstance(postfix, (int, str)) |
|
name = abbr + str(postfix) |
|
|
|
requires_grad = cfg_.pop('requires_grad', True) |
|
cfg_.setdefault('eps', 1e-5) |
|
if layer_type != 'GN': |
|
layer = norm_layer(num_features, **cfg_) |
|
if layer_type == 'SyncBN' and hasattr(layer, '_specify_ddp_gpu_num'): |
|
layer._specify_ddp_gpu_num(1) |
|
else: |
|
assert 'num_groups' in cfg_ |
|
layer = norm_layer(num_channels=num_features, **cfg_) |
|
|
|
for param in layer.parameters(): |
|
param.requires_grad = requires_grad |
|
|
|
return name, layer |
|
|
|
|
|
def infer_abbr(class_type): |
|
"""Infer abbreviation from the class name. |
|
|
|
When we build a norm layer with `build_norm_layer()`, we want to preserve |
|
the norm type in variable names, e.g, self.bn1, self.gn. This method will |
|
infer the abbreviation to map class types to abbreviations. |
|
|
|
Rule 1: If the class has the property "_abbr_", return the property. |
|
Rule 2: If the parent class is _BatchNorm, GroupNorm, LayerNorm or |
|
InstanceNorm, the abbreviation of this layer will be "bn", "gn", "ln" and |
|
"in" respectively. |
|
Rule 3: If the class name contains "batch", "group", "layer" or "instance", |
|
the abbreviation of this layer will be "bn", "gn", "ln" and "in" |
|
respectively. |
|
Rule 4: Otherwise, the abbreviation falls back to "norm". |
|
|
|
Args: |
|
class_type (type): The norm layer type. |
|
|
|
Returns: |
|
str: The inferred abbreviation. |
|
""" |
|
if not inspect.isclass(class_type): |
|
raise TypeError( |
|
f'class_type must be a type, but got {type(class_type)}') |
|
if hasattr(class_type, '_abbr_'): |
|
return class_type._abbr_ |
|
if issubclass(class_type, _InstanceNorm): |
|
return 'in' |
|
elif issubclass(class_type, _BatchNorm): |
|
return 'bn' |
|
elif issubclass(class_type, nn.GroupNorm): |
|
return 'gn' |
|
elif issubclass(class_type, nn.LayerNorm): |
|
return 'ln' |
|
else: |
|
class_name = class_type.__name__.lower() |
|
if 'batch' in class_name: |
|
return 'bn' |
|
elif 'group' in class_name: |
|
return 'gn' |
|
elif 'layer' in class_name: |
|
return 'ln' |
|
elif 'instance' in class_name: |
|
return 'in' |
|
else: |
|
return 'norm_layer' |
|
|
|
|
|
def build_dropout(cfg: Dict, default_args: Optional[Dict] = None) -> Any: |
|
"""Builder for drop out layers.""" |
|
return MODELS.build(cfg, default_args=default_args) |
|
|
|
|
|
def build_conv_layer(cfg: Optional[Dict], *args, **kwargs) -> nn.Module: |
|
"""Build convolution layer. |
|
|
|
Args: |
|
cfg (None or dict): The conv layer config, which should contain: |
|
- type (str): Layer type. |
|
- layer args: Args needed to instantiate an conv layer. |
|
args (argument list): Arguments passed to the `__init__` |
|
method of the corresponding conv layer. |
|
kwargs (keyword arguments): Keyword arguments passed to the `__init__` |
|
method of the corresponding conv layer. |
|
|
|
Returns: |
|
nn.Module: Created conv layer. |
|
""" |
|
if cfg is None: |
|
cfg_ = dict(type='Conv2d') |
|
else: |
|
if not isinstance(cfg, dict): |
|
raise TypeError('cfg must be a dict') |
|
if 'type' not in cfg: |
|
raise KeyError('the cfg dict must contain the key "type"') |
|
cfg_ = cfg.copy() |
|
|
|
layer_type = cfg_.pop('type') |
|
|
|
|
|
|
|
|
|
with MODELS.switch_scope_and_registry(None) as registry: |
|
conv_layer = registry.get(layer_type) |
|
if conv_layer is None: |
|
raise KeyError(f'Cannot find {conv_layer} in registry under scope ' |
|
f'name {registry.scope}') |
|
layer = conv_layer(*args, **kwargs, **cfg_) |
|
|
|
return layer |
|
|
|
|
|
def build_padding_layer(cfg: Dict, *args, **kwargs) -> nn.Module: |
|
"""Build padding layer. |
|
|
|
Args: |
|
cfg (dict): The padding layer config, which should contain: |
|
- type (str): Layer type. |
|
- layer args: Args needed to instantiate a padding layer. |
|
|
|
Returns: |
|
nn.Module: Created padding layer. |
|
""" |
|
if not isinstance(cfg, dict): |
|
raise TypeError('cfg must be a dict') |
|
if 'type' not in cfg: |
|
raise KeyError('the cfg dict must contain the key "type"') |
|
|
|
cfg_ = cfg.copy() |
|
padding_type = cfg_.pop('type') |
|
|
|
|
|
|
|
|
|
with MODELS.switch_scope_and_registry(None) as registry: |
|
padding_layer = registry.get(padding_type) |
|
if padding_layer is None: |
|
raise KeyError(f'Cannot find {padding_layer} in registry under scope ' |
|
f'name {registry.scope}') |
|
layer = padding_layer(*args, **kwargs, **cfg_) |
|
|
|
return layer |
|
|
|
|
|
@MODELS.register_module() |
|
class ConvModule(nn.Module): |
|
"""A conv block that bundles conv/norm/activation layers. |
|
|
|
This block simplifies the usage of convolution layers, which are commonly |
|
used with a norm layer (e.g., BatchNorm) and activation layer (e.g., ReLU). |
|
It is based upon three build methods: `build_conv_layer()`, |
|
`build_norm_layer()` and `build_activation_layer()`. |
|
|
|
Besides, we add some additional features in this module. |
|
1. Automatically set `bias` of the conv layer. |
|
2. Spectral norm is supported. |
|
3. More padding modes are supported. Before PyTorch 1.5, nn.Conv2d only |
|
supports zero and circular padding, and we add "reflect" padding mode. |
|
|
|
Args: |
|
in_channels (int): Number of channels in the input feature map. |
|
Same as that in ``nn._ConvNd``. |
|
out_channels (int): Number of channels produced by the convolution. |
|
Same as that in ``nn._ConvNd``. |
|
kernel_size (int | tuple[int]): Size of the convolving kernel. |
|
Same as that in ``nn._ConvNd``. |
|
stride (int | tuple[int]): Stride of the convolution. |
|
Same as that in ``nn._ConvNd``. |
|
padding (int | tuple[int]): Zero-padding added to both sides of |
|
the input. Same as that in ``nn._ConvNd``. |
|
dilation (int | tuple[int]): Spacing between kernel elements. |
|
Same as that in ``nn._ConvNd``. |
|
groups (int): Number of blocked connections from input channels to |
|
output channels. Same as that in ``nn._ConvNd``. |
|
bias (bool | str): If specified as `auto`, it will be decided by the |
|
norm_cfg. Bias will be set as True if `norm_cfg` is None, otherwise |
|
False. Default: "auto". |
|
conv_cfg (dict): Config dict for convolution layer. Default: None, |
|
which means using conv2d. |
|
norm_cfg (dict): Config dict for normalization layer. Default: None. |
|
act_cfg (dict): Config dict for activation layer. |
|
Default: dict(type='ReLU'). |
|
inplace (bool): Whether to use inplace mode for activation. |
|
Default: True. |
|
with_spectral_norm (bool): Whether use spectral norm in conv module. |
|
Default: False. |
|
padding_mode (str): If the `padding_mode` has not been supported by |
|
current `Conv2d` in PyTorch, we will use our own padding layer |
|
instead. Currently, we support ['zeros', 'circular'] with official |
|
implementation and ['reflect'] with our own implementation. |
|
Default: 'zeros'. |
|
order (tuple[str]): The order of conv/norm/activation layers. It is a |
|
sequence of "conv", "norm" and "act". Common examples are |
|
("conv", "norm", "act") and ("act", "conv", "norm"). |
|
Default: ('conv', 'norm', 'act'). |
|
""" |
|
|
|
_abbr_ = 'conv_block' |
|
|
|
def __init__(self, |
|
in_channels: int, |
|
out_channels: int, |
|
kernel_size: Union[int, Tuple[int, int]], |
|
stride: Union[int, Tuple[int, int]] = 1, |
|
padding: Union[int, Tuple[int, int]] = 0, |
|
dilation: Union[int, Tuple[int, int]] = 1, |
|
groups: int = 1, |
|
bias: Union[bool, str] = 'auto', |
|
conv_cfg: Optional[Dict] = None, |
|
norm_cfg: Optional[Dict] = None, |
|
act_cfg: Optional[Dict] = dict(type='ReLU'), |
|
inplace: bool = True, |
|
with_spectral_norm: bool = False, |
|
padding_mode: str = 'zeros', |
|
order: tuple = ('conv', 'norm', 'act')): |
|
super().__init__() |
|
assert conv_cfg is None or isinstance(conv_cfg, dict) |
|
assert norm_cfg is None or isinstance(norm_cfg, dict) |
|
assert act_cfg is None or isinstance(act_cfg, dict) |
|
official_padding_mode = ['zeros', 'circular'] |
|
self.conv_cfg = conv_cfg |
|
self.norm_cfg = norm_cfg |
|
self.act_cfg = act_cfg |
|
self.inplace = inplace |
|
self.with_spectral_norm = with_spectral_norm |
|
self.with_explicit_padding = padding_mode not in official_padding_mode |
|
self.order = order |
|
assert isinstance(self.order, tuple) and len(self.order) == 3 |
|
assert set(order) == {'conv', 'norm', 'act'} |
|
|
|
self.with_norm = norm_cfg is not None |
|
self.with_activation = act_cfg is not None |
|
|
|
if bias == 'auto': |
|
bias = not self.with_norm |
|
self.with_bias = bias |
|
|
|
if self.with_explicit_padding: |
|
pad_cfg = dict(type=padding_mode) |
|
self.padding_layer = build_padding_layer(pad_cfg, padding) |
|
|
|
|
|
conv_padding = 0 if self.with_explicit_padding else padding |
|
|
|
self.conv = build_conv_layer( |
|
conv_cfg, |
|
in_channels, |
|
out_channels, |
|
kernel_size, |
|
stride=stride, |
|
padding=conv_padding, |
|
dilation=dilation, |
|
groups=groups, |
|
bias=bias) |
|
|
|
self.in_channels = self.conv.in_channels |
|
self.out_channels = self.conv.out_channels |
|
self.kernel_size = self.conv.kernel_size |
|
self.stride = self.conv.stride |
|
self.padding = padding |
|
self.dilation = self.conv.dilation |
|
self.transposed = self.conv.transposed |
|
self.output_padding = self.conv.output_padding |
|
self.groups = self.conv.groups |
|
|
|
if self.with_spectral_norm: |
|
self.conv = nn.utils.spectral_norm(self.conv) |
|
|
|
|
|
if self.with_norm: |
|
|
|
if order.index('norm') > order.index('conv'): |
|
norm_channels = out_channels |
|
else: |
|
norm_channels = in_channels |
|
self.norm_name, norm = build_norm_layer( |
|
norm_cfg, norm_channels) |
|
self.add_module(self.norm_name, norm) |
|
if self.with_bias: |
|
if isinstance(norm, (_BatchNorm, _InstanceNorm)): |
|
warnings.warn( |
|
'Unnecessary conv bias before batch/instance norm') |
|
else: |
|
self.norm_name = None |
|
|
|
|
|
if self.with_activation: |
|
act_cfg_ = act_cfg.copy() |
|
|
|
if act_cfg_['type'] not in [ |
|
'Tanh', 'PReLU', 'Sigmoid', 'HSigmoid', 'Swish', 'GELU' |
|
]: |
|
act_cfg_.setdefault('inplace', inplace) |
|
self.activate = build_activation_layer(act_cfg_) |
|
|
|
|
|
self.init_weights() |
|
|
|
@property |
|
def norm(self): |
|
if self.norm_name: |
|
return getattr(self, self.norm_name) |
|
else: |
|
return None |
|
|
|
def init_weights(self): |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if not hasattr(self.conv, 'init_weights'): |
|
if self.with_activation and self.act_cfg['type'] == 'LeakyReLU': |
|
nonlinearity = 'leaky_relu' |
|
a = self.act_cfg.get('negative_slope', 0.01) |
|
else: |
|
nonlinearity = 'relu' |
|
a = 0 |
|
kaiming_init(self.conv, a=a, nonlinearity=nonlinearity) |
|
if self.with_norm: |
|
constant_init(self.norm, 1, bias=0) |
|
|
|
def forward(self, |
|
x: torch.Tensor, |
|
activate: bool = True, |
|
norm: bool = True) -> torch.Tensor: |
|
for layer in self.order: |
|
if layer == 'conv': |
|
if self.with_explicit_padding: |
|
x = self.padding_layer(x) |
|
x = self.conv(x) |
|
elif layer == 'norm' and norm and self.with_norm: |
|
x = self.norm(x) |
|
elif layer == 'act' and activate and self.with_activation: |
|
x = self.activate(x) |
|
return x |
|
|
|
|