From a5d872ddad0a887470060eb528f04fe71ec67dd3 Mon Sep 17 00:00:00 2001 From: Joao Gomes Date: Thu, 21 Oct 2021 11:12:36 +0100 Subject: [PATCH 1/7] adding efficientnet to prototype --- torchvision/prototype/models/__init__.py | 1 + torchvision/prototype/models/efficientnet.py | 268 +++++++++++++++++++ 2 files changed, 269 insertions(+) create mode 100644 torchvision/prototype/models/efficientnet.py diff --git a/torchvision/prototype/models/__init__.py b/torchvision/prototype/models/__init__.py index fcc4d4609fb..00fcb828d55 100644 --- a/torchvision/prototype/models/__init__.py +++ b/torchvision/prototype/models/__init__.py @@ -2,5 +2,6 @@ from .resnet import * from .densenet import * from .vgg import * +from .efficientnet import * from . import detection from . import quantization diff --git a/torchvision/prototype/models/efficientnet.py b/torchvision/prototype/models/efficientnet.py new file mode 100644 index 00000000000..ae5ebc8c0e5 --- /dev/null +++ b/torchvision/prototype/models/efficientnet.py @@ -0,0 +1,268 @@ +import warnings +from functools import partial +from typing import Any, List, Optional + +from torch import nn + +from ...models.efficientnet import EfficientNet, MBConvConfig, _efficientnet_conf +from ..transforms.presets import ImageNetEval +from ._api import Weights, WeightEntry +from ._meta import _IMAGENET_CATEGORIES + + +__all__ = [ + "EfficientNet", + "EfficientNetB0Weights", + "EfficientNetB1Weights", + "EfficientNetB2Weights", + "EfficientNetB3Weights", + "EfficientNetB4Weights", + "EfficientNetB5Weights", + "EfficientNetB6Weights", + "EfficientNetB7Weights", + "efficientnet_b0", + "efficientnet_b1", + "efficientnet_b2", + "efficientnet_b3", + "efficientnet_b4", + "efficientnet_b5", + "efficientnet_b6", + "efficientnet_b7", +] + + +def _efficientnet( + inverted_residual_setting: List[MBConvConfig], + dropout: float, + weights: Optional[Weights], + progress: bool, + **kwargs: Any, +) -> EfficientNet: + if weights is not None: + kwargs["num_classes"] = len(weights.meta["categories"]) + + model = EfficientNet(inverted_residual_setting, dropout, **kwargs) + + if weights is not None: + model.load_state_dict(weights.state_dict(progress=progress)) + + return model + + +_common_meta = { + "size": (224, 224), + "categories": _IMAGENET_CATEGORIES, +} + + +class EfficientNetB0Weights(Weights): + ImageNet1K_RefV1 = WeightEntry( + url="https://download.pytorch.org/models/efficientnet_b0_rwightman-3dd342df.pth", + transforms=partial(ImageNetEval, crop_size=224), + meta={ + **_common_meta, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#efficientnet", + "acc@1": 77.692, + "acc@5": 93.532, + }, + ) + + +class EfficientNetB1Weights(Weights): + ImageNet1K_RefV1 = WeightEntry( + url="https://download.pytorch.org/models/efficientnet_b1_rwightman-533bc792.pth", + transforms=partial(ImageNetEval, crop_size=224), + meta={ + **_common_meta, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#efficientnet", + "acc@1": 78.642, + "acc@5": 94.186, + }, + ) + + +class EfficientNetB2Weights(Weights): + ImageNet1K_RefV1 = WeightEntry( + url="https://download.pytorch.org/models/efficientnet_b2_rwightman-bcdf34b7.pth", + transforms=partial(ImageNetEval, crop_size=224), + meta={ + **_common_meta, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#efficientnet", + "acc@1": 80.608, + "acc@5": 95.310, + }, + ) + + +class EfficientNetB3Weights(Weights): + ImageNet1K_RefV1 = WeightEntry( + url="https://download.pytorch.org/models/efficientnet_b3_rwightman-cf984f9c.pth", + transforms=partial(ImageNetEval, crop_size=224), + meta={ + **_common_meta, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#efficientnet", + "acc@1": 82.008, + "acc@5": 96.054, + }, + ) + + +class EfficientNetB4Weights(Weights): + ImageNet1K_RefV1 = WeightEntry( + url="https://download.pytorch.org/models/efficientnet_b4_rwightman-7eb33cd5.pth", + transforms=partial(ImageNetEval, crop_size=224), + meta={ + **_common_meta, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#efficientnet", + "acc@1": 83.384, + "acc@5": 96.594, + }, + ) + + +class EfficientNetB5Weights(Weights): + ImageNet1K_RefV1 = WeightEntry( + url="https://download.pytorch.org/models/efficientnet_b5_lukemelas-b6417697.pth", + transforms=partial(ImageNetEval, crop_size=224), + meta={ + **_common_meta, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#efficientnet", + "acc@1": 83.444, + "acc@5": 96.628, + }, + ) + + +class EfficientNetB6Weights(Weights): + ImageNet1K_RefV1 = WeightEntry( + url="https://download.pytorch.org/models/efficientnet_b6_lukemelas-c76e70fd.pth", + transforms=partial(ImageNetEval, crop_size=224), + meta={ + **_common_meta, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#efficientnet", + "acc@1": 84.008, + "acc@5": 96.916, + }, + ) + + +class EfficientNetB7Weights(Weights): + ImageNet1K_RefV1 = WeightEntry( + url="https://download.pytorch.org/models/efficientnet_b7_lukemelas-dcc49843.pth", + transforms=partial(ImageNetEval, crop_size=224), + meta={ + **_common_meta, + "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#efficientnet", + "acc@1": 84.122, + "acc@5": 96.908, + }, + ) + + +def efficientnet_b0( + weights: Optional[EfficientNetB0Weights] = None, progress: bool = True, **kwargs: Any +) -> EfficientNet: + if "pretrained" in kwargs: + warnings.warn("The argument pretrained is deprecated, please use weights instead.") + weights = EfficientNetB0Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None + weights = EfficientNetB0Weights.verify(weights) + inverted_residual_setting = _efficientnet_conf(width_mult=1.0, depth_mult=1.0, **kwargs) + return _efficientnet(inverted_residual_setting, dropout=0.2, weights=weights, progress=progress, **kwargs) + + +def efficientnet_b1( + weights: Optional[EfficientNetB1Weights] = None, progress: bool = True, **kwargs: Any +) -> EfficientNet: + if "pretrained" in kwargs: + warnings.warn("The argument pretrained is deprecated, please use weights instead.") + weights = EfficientNetB1Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None + weights = EfficientNetB1Weights.verify(weights) + inverted_residual_setting = _efficientnet_conf(width_mult=1.0, depth_mult=1.1, **kwargs) + return _efficientnet(inverted_residual_setting, dropout=0.2, weights=weights, progress=progress, **kwargs) + + +def efficientnet_b2( + weights: Optional[EfficientNetB2Weights] = None, progress: bool = True, **kwargs: Any +) -> EfficientNet: + if "pretrained" in kwargs: + warnings.warn("The argument pretrained is deprecated, please use weights instead.") + weights = EfficientNetB2Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None + weights = EfficientNetB2Weights.verify(weights) + inverted_residual_setting = _efficientnet_conf(width_mult=1.1, depth_mult=1.2, **kwargs) + return _efficientnet(inverted_residual_setting, dropout=0.3, weights=weights, progress=progress, **kwargs) + + +def efficientnet_b3( + weights: Optional[EfficientNetB3Weights] = None, progress: bool = True, **kwargs: Any +) -> EfficientNet: + if "pretrained" in kwargs: + warnings.warn("The argument pretrained is deprecated, please use weights instead.") + weights = EfficientNetB3Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None + weights = EfficientNetB3Weights.verify(weights) + inverted_residual_setting = _efficientnet_conf(width_mult=1.2, depth_mult=1.4, **kwargs) + return _efficientnet(inverted_residual_setting, dropout=0.3, weights=weights, progress=progress, **kwargs) + + +def efficientnet_b4( + weights: Optional[EfficientNetB4Weights] = None, progress: bool = True, **kwargs: Any +) -> EfficientNet: + if "pretrained" in kwargs: + warnings.warn("The argument pretrained is deprecated, please use weights instead.") + weights = EfficientNetB4Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None + weights = EfficientNetB4Weights.verify(weights) + inverted_residual_setting = _efficientnet_conf(width_mult=1.4, depth_mult=1.8, **kwargs) + return _efficientnet(inverted_residual_setting, dropout=0.4, weights=weights, progress=progress, **kwargs) + + +def efficientnet_b5( + weights: Optional[EfficientNetB5Weights] = None, progress: bool = True, **kwargs: Any +) -> EfficientNet: + if "pretrained" in kwargs: + warnings.warn("The argument pretrained is deprecated, please use weights instead.") + weights = EfficientNetB5Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None + weights = EfficientNetB5Weights.verify(weights) + inverted_residual_setting = _efficientnet_conf(width_mult=1.6, depth_mult=2.2, **kwargs) + return _efficientnet( + inverted_residual_setting, + dropout=0.4, + weights=weights, + progress=progress, + norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01), + **kwargs, + ) + + +def efficientnet_b6( + weights: Optional[EfficientNetB6Weights] = None, progress: bool = True, **kwargs: Any +) -> EfficientNet: + if "pretrained" in kwargs: + warnings.warn("The argument pretrained is deprecated, please use weights instead.") + weights = EfficientNetB6Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None + weights = EfficientNetB6Weights.verify(weights) + inverted_residual_setting = _efficientnet_conf(width_mult=1.8, depth_mult=2.6, **kwargs) + return _efficientnet( + inverted_residual_setting, + dropout=0.5, + weights=weights, + progress=progress, + norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01), + **kwargs, + ) + + +def efficientnet_b7( + weights: Optional[EfficientNetB7Weights] = None, progress: bool = True, **kwargs: Any +) -> EfficientNet: + if "pretrained" in kwargs: + warnings.warn("The argument pretrained is deprecated, please use weights instead.") + weights = EfficientNetB7Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None + weights = EfficientNetB7Weights.verify(weights) + inverted_residual_setting = _efficientnet_conf(width_mult=2.0, depth_mult=3.1, **kwargs) + return _efficientnet( + inverted_residual_setting, + dropout=0.5, + weights=weights, + progress=progress, + norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01), + **kwargs, + ) From fed500061c872a450fcf83a3c56b1ffa13957651 Mon Sep 17 00:00:00 2001 From: Joao Gomes Date: Thu, 21 Oct 2021 15:41:03 +0100 Subject: [PATCH 2/7] Removing _efficientnet_conf from prototype and main efficientnet classification models --- torchvision/models/efficientnet.py | 54 +++++++++----------- torchvision/prototype/models/efficientnet.py | 53 ++++++++++++------- 2 files changed, 57 insertions(+), 50 deletions(-) diff --git a/torchvision/models/efficientnet.py b/torchvision/models/efficientnet.py index b9a5913ea77..b50d0f1d5ac 100644 --- a/torchvision/models/efficientnet.py +++ b/torchvision/models/efficientnet.py @@ -263,7 +263,15 @@ def forward(self, x: Tensor) -> Tensor: return self._forward_impl(x) -def _efficientnet_conf(width_mult: float, depth_mult: float, **kwargs: Any) -> List[MBConvConfig]: +def _efficientnet( + arch: str, + width_mult: float, + depth_mult: float, + dropout: float, + pretrained: bool, + progress: bool, + **kwargs: Any, +) -> EfficientNet: bneck_conf = partial(MBConvConfig, width_mult=width_mult, depth_mult=depth_mult) inverted_residual_setting = [ bneck_conf(1, 3, 1, 32, 16, 1), @@ -274,17 +282,6 @@ def _efficientnet_conf(width_mult: float, depth_mult: float, **kwargs: Any) -> L bneck_conf(6, 5, 2, 112, 192, 4), bneck_conf(6, 3, 1, 192, 320, 1), ] - return inverted_residual_setting - - -def _efficientnet_model( - arch: str, - inverted_residual_setting: List[MBConvConfig], - dropout: float, - pretrained: bool, - progress: bool, - **kwargs: Any, -) -> EfficientNet: model = EfficientNet(inverted_residual_setting, dropout, **kwargs) if pretrained: if model_urls.get(arch, None) is None: @@ -303,8 +300,7 @@ def efficientnet_b0(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ - inverted_residual_setting = _efficientnet_conf(width_mult=1.0, depth_mult=1.0, **kwargs) - return _efficientnet_model("efficientnet_b0", inverted_residual_setting, 0.2, pretrained, progress, **kwargs) + return _efficientnet("efficientnet_b0", 1.0, 1.0, 0.2, pretrained, progress, **kwargs) def efficientnet_b1(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet: @@ -316,8 +312,7 @@ def efficientnet_b1(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ - inverted_residual_setting = _efficientnet_conf(width_mult=1.0, depth_mult=1.1, **kwargs) - return _efficientnet_model("efficientnet_b1", inverted_residual_setting, 0.2, pretrained, progress, **kwargs) + return _efficientnet("efficientnet_b1", 1.0, 1.1, 0.2, pretrained, progress, **kwargs) def efficientnet_b2(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet: @@ -329,8 +324,7 @@ def efficientnet_b2(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ - inverted_residual_setting = _efficientnet_conf(width_mult=1.1, depth_mult=1.2, **kwargs) - return _efficientnet_model("efficientnet_b2", inverted_residual_setting, 0.3, pretrained, progress, **kwargs) + return _efficientnet("efficientnet_b2", 1.1, 1.2, 0.3, pretrained, progress, **kwargs) def efficientnet_b3(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet: @@ -342,8 +336,7 @@ def efficientnet_b3(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ - inverted_residual_setting = _efficientnet_conf(width_mult=1.2, depth_mult=1.4, **kwargs) - return _efficientnet_model("efficientnet_b3", inverted_residual_setting, 0.3, pretrained, progress, **kwargs) + return _efficientnet("efficientnet_b3", 1.2, 1.4, 0.3, pretrained, progress, **kwargs) def efficientnet_b4(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet: @@ -355,8 +348,7 @@ def efficientnet_b4(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ - inverted_residual_setting = _efficientnet_conf(width_mult=1.4, depth_mult=1.8, **kwargs) - return _efficientnet_model("efficientnet_b4", inverted_residual_setting, 0.4, pretrained, progress, **kwargs) + return _efficientnet("efficientnet_b4", 1.4, 1.8, 0.4, pretrained, progress, **kwargs) def efficientnet_b5(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet: @@ -368,10 +360,10 @@ def efficientnet_b5(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ - inverted_residual_setting = _efficientnet_conf(width_mult=1.6, depth_mult=2.2, **kwargs) - return _efficientnet_model( + return _efficientnet( "efficientnet_b5", - inverted_residual_setting, + 1.6, + 2.2, 0.4, pretrained, progress, @@ -389,10 +381,10 @@ def efficientnet_b6(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ - inverted_residual_setting = _efficientnet_conf(width_mult=1.8, depth_mult=2.6, **kwargs) - return _efficientnet_model( + return _efficientnet( "efficientnet_b6", - inverted_residual_setting, + 1.8, + 2.6, 0.5, pretrained, progress, @@ -410,10 +402,10 @@ def efficientnet_b7(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ - inverted_residual_setting = _efficientnet_conf(width_mult=2.0, depth_mult=3.1, **kwargs) - return _efficientnet_model( + return _efficientnet( "efficientnet_b7", - inverted_residual_setting, + 2.0, + 3.1, 0.5, pretrained, progress, diff --git a/torchvision/prototype/models/efficientnet.py b/torchvision/prototype/models/efficientnet.py index ae5ebc8c0e5..f5a70a418a8 100644 --- a/torchvision/prototype/models/efficientnet.py +++ b/torchvision/prototype/models/efficientnet.py @@ -1,10 +1,10 @@ import warnings from functools import partial -from typing import Any, List, Optional +from typing import Any, Optional from torch import nn -from ...models.efficientnet import EfficientNet, MBConvConfig, _efficientnet_conf +from ...models.efficientnet import EfficientNet, MBConvConfig from ..transforms.presets import ImageNetEval from ._api import Weights, WeightEntry from ._meta import _IMAGENET_CATEGORIES @@ -32,7 +32,8 @@ def _efficientnet( - inverted_residual_setting: List[MBConvConfig], + width_mult: float, + depth_mult: float, dropout: float, weights: Optional[Weights], progress: bool, @@ -41,6 +42,17 @@ def _efficientnet( if weights is not None: kwargs["num_classes"] = len(weights.meta["categories"]) + bneck_conf = partial(MBConvConfig, width_mult=width_mult, depth_mult=depth_mult) + inverted_residual_setting = [ + bneck_conf(1, 3, 1, 32, 16, 1), + bneck_conf(6, 3, 2, 16, 24, 2), + bneck_conf(6, 5, 2, 24, 40, 2), + bneck_conf(6, 3, 2, 40, 80, 3), + bneck_conf(6, 5, 1, 80, 112, 3), + bneck_conf(6, 5, 2, 112, 192, 4), + bneck_conf(6, 3, 1, 192, 320, 1), + ] + model = EfficientNet(inverted_residual_setting, dropout, **kwargs) if weights is not None: @@ -166,8 +178,8 @@ def efficientnet_b0( warnings.warn("The argument pretrained is deprecated, please use weights instead.") weights = EfficientNetB0Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None weights = EfficientNetB0Weights.verify(weights) - inverted_residual_setting = _efficientnet_conf(width_mult=1.0, depth_mult=1.0, **kwargs) - return _efficientnet(inverted_residual_setting, dropout=0.2, weights=weights, progress=progress, **kwargs) + + return _efficientnet(width_mult=1.0, depth_mult=1.0, dropout=0.2, weights=weights, progress=progress, **kwargs) def efficientnet_b1( @@ -177,8 +189,8 @@ def efficientnet_b1( warnings.warn("The argument pretrained is deprecated, please use weights instead.") weights = EfficientNetB1Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None weights = EfficientNetB1Weights.verify(weights) - inverted_residual_setting = _efficientnet_conf(width_mult=1.0, depth_mult=1.1, **kwargs) - return _efficientnet(inverted_residual_setting, dropout=0.2, weights=weights, progress=progress, **kwargs) + + return _efficientnet(width_mult=1.0, depth_mult=1.1, dropout=0.2, weights=weights, progress=progress, **kwargs) def efficientnet_b2( @@ -188,8 +200,8 @@ def efficientnet_b2( warnings.warn("The argument pretrained is deprecated, please use weights instead.") weights = EfficientNetB2Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None weights = EfficientNetB2Weights.verify(weights) - inverted_residual_setting = _efficientnet_conf(width_mult=1.1, depth_mult=1.2, **kwargs) - return _efficientnet(inverted_residual_setting, dropout=0.3, weights=weights, progress=progress, **kwargs) + + return _efficientnet(width_mult=1.1, depth_mult=1.2, dropout=0.3, weights=weights, progress=progress, **kwargs) def efficientnet_b3( @@ -199,8 +211,8 @@ def efficientnet_b3( warnings.warn("The argument pretrained is deprecated, please use weights instead.") weights = EfficientNetB3Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None weights = EfficientNetB3Weights.verify(weights) - inverted_residual_setting = _efficientnet_conf(width_mult=1.2, depth_mult=1.4, **kwargs) - return _efficientnet(inverted_residual_setting, dropout=0.3, weights=weights, progress=progress, **kwargs) + + return _efficientnet(width_mult=1.2, depth_mult=1.4, dropout=0.3, weights=weights, progress=progress, **kwargs) def efficientnet_b4( @@ -210,8 +222,8 @@ def efficientnet_b4( warnings.warn("The argument pretrained is deprecated, please use weights instead.") weights = EfficientNetB4Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None weights = EfficientNetB4Weights.verify(weights) - inverted_residual_setting = _efficientnet_conf(width_mult=1.4, depth_mult=1.8, **kwargs) - return _efficientnet(inverted_residual_setting, dropout=0.4, weights=weights, progress=progress, **kwargs) + + return _efficientnet(width_mult=1.4, depth_mult=1.8, dropout=0.4, weights=weights, progress=progress, **kwargs) def efficientnet_b5( @@ -221,9 +233,10 @@ def efficientnet_b5( warnings.warn("The argument pretrained is deprecated, please use weights instead.") weights = EfficientNetB5Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None weights = EfficientNetB5Weights.verify(weights) - inverted_residual_setting = _efficientnet_conf(width_mult=1.6, depth_mult=2.2, **kwargs) + return _efficientnet( - inverted_residual_setting, + width_mult=1.6, + depth_mult=2.2, dropout=0.4, weights=weights, progress=progress, @@ -239,9 +252,10 @@ def efficientnet_b6( warnings.warn("The argument pretrained is deprecated, please use weights instead.") weights = EfficientNetB6Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None weights = EfficientNetB6Weights.verify(weights) - inverted_residual_setting = _efficientnet_conf(width_mult=1.8, depth_mult=2.6, **kwargs) + return _efficientnet( - inverted_residual_setting, + width_mult=1.8, + depth_mult=2.6, dropout=0.5, weights=weights, progress=progress, @@ -257,9 +271,10 @@ def efficientnet_b7( warnings.warn("The argument pretrained is deprecated, please use weights instead.") weights = EfficientNetB7Weights.ImageNet1K_RefV1 if kwargs.pop("pretrained") else None weights = EfficientNetB7Weights.verify(weights) - inverted_residual_setting = _efficientnet_conf(width_mult=2.0, depth_mult=3.1, **kwargs) + return _efficientnet( - inverted_residual_setting, + width_mult=2.0, + depth_mult=3.1, dropout=0.5, weights=weights, progress=progress, From 3dac3fcb3012eedb41581eee3e848d3b4d625932 Mon Sep 17 00:00:00 2001 From: Joao Gomes Date: Fri, 22 Oct 2021 09:56:31 +0100 Subject: [PATCH 3/7] fixing merge conflicts --- torchvision/prototype/models/efficientnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchvision/prototype/models/efficientnet.py b/torchvision/prototype/models/efficientnet.py index 2bff713001e..7155b702d38 100644 --- a/torchvision/prototype/models/efficientnet.py +++ b/torchvision/prototype/models/efficientnet.py @@ -185,7 +185,7 @@ def efficientnet_b0( weights = EfficientNetB0Weights.ImageNet1K_TimmV1 if kwargs.pop("pretrained") else None weights = EfficientNetB0Weights.verify(weights) return _efficientnet(width_mult=1.0, depth_mult=1.0, dropout=0.2, weights=weights, progress=progress, **kwargs) - + def efficientnet_b1( weights: Optional[EfficientNetB1Weights] = None, progress: bool = True, **kwargs: Any From 82d62ffae09c743d0ba2117c3349b9cd0db48534 Mon Sep 17 00:00:00 2001 From: Joao Gomes Date: Fri, 22 Oct 2021 09:59:47 +0100 Subject: [PATCH 4/7] fixing merge conflicts --- torchvision/models/efficientnet.py | 51 ------------------------------ 1 file changed, 51 deletions(-) diff --git a/torchvision/models/efficientnet.py b/torchvision/models/efficientnet.py index 2260c055812..b50d0f1d5ac 100644 --- a/torchvision/models/efficientnet.py +++ b/torchvision/models/efficientnet.py @@ -282,20 +282,6 @@ def _efficientnet( bneck_conf(6, 5, 2, 112, 192, 4), bneck_conf(6, 3, 1, 192, 320, 1), ] -<<<<<<< HEAD -======= - return inverted_residual_setting - - -def _efficientnet( - arch: str, - inverted_residual_setting: List[MBConvConfig], - dropout: float, - pretrained: bool, - progress: bool, - **kwargs: Any, -) -> EfficientNet: ->>>>>>> main model = EfficientNet(inverted_residual_setting, dropout, **kwargs) if pretrained: if model_urls.get(arch, None) is None: @@ -314,12 +300,7 @@ def efficientnet_b0(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ -<<<<<<< HEAD return _efficientnet("efficientnet_b0", 1.0, 1.0, 0.2, pretrained, progress, **kwargs) -======= - inverted_residual_setting = _efficientnet_conf(width_mult=1.0, depth_mult=1.0, **kwargs) - return _efficientnet("efficientnet_b0", inverted_residual_setting, 0.2, pretrained, progress, **kwargs) ->>>>>>> main def efficientnet_b1(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet: @@ -331,12 +312,7 @@ def efficientnet_b1(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ -<<<<<<< HEAD return _efficientnet("efficientnet_b1", 1.0, 1.1, 0.2, pretrained, progress, **kwargs) -======= - inverted_residual_setting = _efficientnet_conf(width_mult=1.0, depth_mult=1.1, **kwargs) - return _efficientnet("efficientnet_b1", inverted_residual_setting, 0.2, pretrained, progress, **kwargs) ->>>>>>> main def efficientnet_b2(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet: @@ -348,12 +324,7 @@ def efficientnet_b2(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ -<<<<<<< HEAD return _efficientnet("efficientnet_b2", 1.1, 1.2, 0.3, pretrained, progress, **kwargs) -======= - inverted_residual_setting = _efficientnet_conf(width_mult=1.1, depth_mult=1.2, **kwargs) - return _efficientnet("efficientnet_b2", inverted_residual_setting, 0.3, pretrained, progress, **kwargs) ->>>>>>> main def efficientnet_b3(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet: @@ -365,12 +336,7 @@ def efficientnet_b3(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ -<<<<<<< HEAD return _efficientnet("efficientnet_b3", 1.2, 1.4, 0.3, pretrained, progress, **kwargs) -======= - inverted_residual_setting = _efficientnet_conf(width_mult=1.2, depth_mult=1.4, **kwargs) - return _efficientnet("efficientnet_b3", inverted_residual_setting, 0.3, pretrained, progress, **kwargs) ->>>>>>> main def efficientnet_b4(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet: @@ -382,12 +348,7 @@ def efficientnet_b4(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ -<<<<<<< HEAD return _efficientnet("efficientnet_b4", 1.4, 1.8, 0.4, pretrained, progress, **kwargs) -======= - inverted_residual_setting = _efficientnet_conf(width_mult=1.4, depth_mult=1.8, **kwargs) - return _efficientnet("efficientnet_b4", inverted_residual_setting, 0.4, pretrained, progress, **kwargs) ->>>>>>> main def efficientnet_b5(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet: @@ -399,10 +360,6 @@ def efficientnet_b5(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ -<<<<<<< HEAD -======= - inverted_residual_setting = _efficientnet_conf(width_mult=1.6, depth_mult=2.2, **kwargs) ->>>>>>> main return _efficientnet( "efficientnet_b5", 1.6, @@ -424,10 +381,6 @@ def efficientnet_b6(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ -<<<<<<< HEAD -======= - inverted_residual_setting = _efficientnet_conf(width_mult=1.8, depth_mult=2.6, **kwargs) ->>>>>>> main return _efficientnet( "efficientnet_b6", 1.8, @@ -449,10 +402,6 @@ def efficientnet_b7(pretrained: bool = False, progress: bool = True, **kwargs: A pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ -<<<<<<< HEAD -======= - inverted_residual_setting = _efficientnet_conf(width_mult=2.0, depth_mult=3.1, **kwargs) ->>>>>>> main return _efficientnet( "efficientnet_b7", 2.0, From 8323d2e4a2376eef0717a273e32209f19404fcae Mon Sep 17 00:00:00 2001 From: Joao Gomes Date: Fri, 22 Oct 2021 10:15:50 +0100 Subject: [PATCH 5/7] fixing lint errors --- torchvision/models/efficientnet.py | 2 +- torchvision/prototype/models/efficientnet.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/torchvision/models/efficientnet.py b/torchvision/models/efficientnet.py index b50d0f1d5ac..7d147bd75a6 100644 --- a/torchvision/models/efficientnet.py +++ b/torchvision/models/efficientnet.py @@ -1,7 +1,7 @@ import copy import math from functools import partial -from typing import Any, Callable, List, Optional, Sequence +from typing import Any, Callable, Optional, Sequence import torch from torch import nn, Tensor diff --git a/torchvision/prototype/models/efficientnet.py b/torchvision/prototype/models/efficientnet.py index 7155b702d38..472e390c3ce 100644 --- a/torchvision/prototype/models/efficientnet.py +++ b/torchvision/prototype/models/efficientnet.py @@ -1,6 +1,6 @@ import warnings from functools import partial -from typing import Any, List, Optional +from typing import Any, Optional from torch import nn from torchvision.transforms.functional import InterpolationMode From 7ea8819cab585dde3f41f28d76c4ada70b8c632a Mon Sep 17 00:00:00 2001 From: Joao Gomes Date: Fri, 22 Oct 2021 10:28:09 +0100 Subject: [PATCH 6/7] fixing lint errors --- torchvision/models/efficientnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchvision/models/efficientnet.py b/torchvision/models/efficientnet.py index 7d147bd75a6..34c4deba5ca 100644 --- a/torchvision/models/efficientnet.py +++ b/torchvision/models/efficientnet.py @@ -1,7 +1,7 @@ import copy import math from functools import partial -from typing import Any, Callable, Optional, Sequence +from typing import Any, Callable, Optional, List import torch from torch import nn, Tensor From 4212a1c63129533e95d762a32716a6775bd2d642 Mon Sep 17 00:00:00 2001 From: Joao Gomes Date: Fri, 22 Oct 2021 10:49:00 +0100 Subject: [PATCH 7/7] fixing lint errors --- torchvision/models/efficientnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchvision/models/efficientnet.py b/torchvision/models/efficientnet.py index 34c4deba5ca..d8356d83748 100644 --- a/torchvision/models/efficientnet.py +++ b/torchvision/models/efficientnet.py @@ -1,7 +1,7 @@ import copy import math from functools import partial -from typing import Any, Callable, Optional, List +from typing import Any, Callable, Optional, List, Sequence import torch from torch import nn, Tensor