From ed5961bbe4e1c5c39760c78613d1bd5af1342969 Mon Sep 17 00:00:00 2001 From: Vasilis Vryniotis Date: Mon, 15 Feb 2021 11:14:47 +0000 Subject: [PATCH 1/2] Avoid freezing bn1 if all layers are trainable. --- torchvision/models/detection/backbone_utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/torchvision/models/detection/backbone_utils.py b/torchvision/models/detection/backbone_utils.py index 4781c912d52..d99793b0773 100644 --- a/torchvision/models/detection/backbone_utils.py +++ b/torchvision/models/detection/backbone_utils.py @@ -96,6 +96,8 @@ def resnet_fpn_backbone( # select layers that wont be frozen assert 0 <= trainable_layers <= 5 layers_to_train = ['layer4', 'layer3', 'layer2', 'layer1', 'conv1'][:trainable_layers] + if trainable_layers == 5: + layers_to_train.append('bn1') # freeze layers only if pretrained backbone is used for name, parameter in backbone.named_parameters(): if all([not name.startswith(layer) for layer in layers_to_train]): From a2644df658e6ce43d4a98e0dc8e563aa26dfe1d0 Mon Sep 17 00:00:00 2001 From: Vasilis Vryniotis Date: Mon, 15 Feb 2021 14:22:52 +0000 Subject: [PATCH 2/2] Remove misleading comments. --- torchvision/models/detection/backbone_utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/torchvision/models/detection/backbone_utils.py b/torchvision/models/detection/backbone_utils.py index d99793b0773..d662de8078a 100644 --- a/torchvision/models/detection/backbone_utils.py +++ b/torchvision/models/detection/backbone_utils.py @@ -98,7 +98,6 @@ def resnet_fpn_backbone( layers_to_train = ['layer4', 'layer3', 'layer2', 'layer1', 'conv1'][:trainable_layers] if trainable_layers == 5: layers_to_train.append('bn1') - # freeze layers only if pretrained backbone is used for name, parameter in backbone.named_parameters(): if all([not name.startswith(layer) for layer in layers_to_train]): parameter.requires_grad_(False) @@ -154,7 +153,6 @@ def mobilenet_backbone( assert 0 <= trainable_layers <= num_stages freeze_before = num_stages if trainable_layers == 0 else stage_indices[num_stages - trainable_layers] - # freeze layers only if pretrained backbone is used for b in backbone[:freeze_before]: for parameter in b.parameters(): parameter.requires_grad_(False)