From 10a12bf3ba17f2b0c8256553f3987681d05fa048 Mon Sep 17 00:00:00 2001 From: Nicolas Hug Date: Tue, 30 Nov 2021 15:33:40 +0000 Subject: [PATCH 1/2] Add bias parameter to ConvNormActivation --- torchvision/ops/misc.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/torchvision/ops/misc.py b/torchvision/ops/misc.py index fac9a3570d6..857148e4ab2 100644 --- a/torchvision/ops/misc.py +++ b/torchvision/ops/misc.py @@ -116,6 +116,7 @@ class ConvNormActivation(torch.nn.Sequential): activation_layer (Callable[..., torch.nn.Module], optinal): Activation function which will be stacked on top of the normalization layer (if not None), otherwise on top of the conv layer. If ``None`` this layer wont be used. Default: ``torch.nn.ReLU`` dilation (int): Spacing between kernel elements. Default: 1 inplace (bool): Parameter for the activation layer, which can optionally do the operation in-place. Default ``True`` + bias (bool): Whether to use bias in the ``norm_layer``. By default, biases are included if ``norm_layer is None``. """ @@ -131,9 +132,12 @@ def __init__( activation_layer: Optional[Callable[..., torch.nn.Module]] = torch.nn.ReLU, dilation: int = 1, inplace: bool = True, + bias: Optional[bool] = None, ) -> None: if padding is None: padding = (kernel_size - 1) // 2 * dilation + if bias is None: + bias = norm_layer is None layers = [ torch.nn.Conv2d( in_channels, @@ -143,7 +147,7 @@ def __init__( padding, dilation=dilation, groups=groups, - bias=norm_layer is None, + bias=bias, ) ] if norm_layer is not None: From 1622d8849ddd09a2068ab3472a93eb40a9678957 Mon Sep 17 00:00:00 2001 From: Nicolas Hug Date: Tue, 30 Nov 2021 16:32:18 +0000 Subject: [PATCH 2/2] Update torchvision/ops/misc.py Co-authored-by: Vasilis Vryniotis --- torchvision/ops/misc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchvision/ops/misc.py b/torchvision/ops/misc.py index 857148e4ab2..392517cb772 100644 --- a/torchvision/ops/misc.py +++ b/torchvision/ops/misc.py @@ -116,7 +116,7 @@ class ConvNormActivation(torch.nn.Sequential): activation_layer (Callable[..., torch.nn.Module], optinal): Activation function which will be stacked on top of the normalization layer (if not None), otherwise on top of the conv layer. If ``None`` this layer wont be used. Default: ``torch.nn.ReLU`` dilation (int): Spacing between kernel elements. Default: 1 inplace (bool): Parameter for the activation layer, which can optionally do the operation in-place. Default ``True`` - bias (bool): Whether to use bias in the ``norm_layer``. By default, biases are included if ``norm_layer is None``. + bias (bool, optional): Whether to use bias in the convolution layer. By default, biases are included if ``norm_layer is None``. """