Skip to content

Commit

Permalink
Forward parameters to efficientnet
Browse files Browse the repository at this point in the history
  • Loading branch information
BloodAxe committed Oct 3, 2019
1 parent e3a9041 commit 00d3d04
Showing 1 changed file with 7 additions and 7 deletions.
14 changes: 7 additions & 7 deletions pytorch_toolbelt/modules/encoders.py
Expand Up @@ -748,7 +748,7 @@ def __init__(self, layers=[1, 2, 4, 6], **kwargs):
class EfficientNetB1Encoder(EfficientNetEncoder):
def __init__(self, layers=[1, 2, 4, 6], **kwargs):
super().__init__(
efficient_net_b1(num_classes=1),
efficient_net_b1(num_classes=1, **kwargs),
[16, 24, 40, 80, 112, 192, 320],
[2, 4, 8, 16, 16, 32, 32],
layers,
Expand All @@ -758,7 +758,7 @@ def __init__(self, layers=[1, 2, 4, 6], **kwargs):
class EfficientNetB2Encoder(EfficientNetEncoder):
def __init__(self, layers=[1, 2, 4, 6], **kwargs):
super().__init__(
efficient_net_b2(num_classes=1),
efficient_net_b2(num_classes=1, **kwargs),
[16, 24, 48, 88, 120, 208, 352],
[2, 4, 8, 16, 16, 32, 32],
layers,
Expand All @@ -768,7 +768,7 @@ def __init__(self, layers=[1, 2, 4, 6], **kwargs):
class EfficientNetB3Encoder(EfficientNetEncoder):
def __init__(self, layers=[1, 2, 4, 6], **kwargs):
super().__init__(
efficient_net_b3(num_classes=1),
efficient_net_b3(num_classes=1, **kwargs),
[24, 32, 48, 96, 136, 232, 384],
[2, 4, 8, 16, 16, 32, 32],
layers,
Expand All @@ -778,7 +778,7 @@ def __init__(self, layers=[1, 2, 4, 6], **kwargs):
class EfficientNetB4Encoder(EfficientNetEncoder):
def __init__(self, layers=[1, 2, 4, 6], **kwargs):
super().__init__(
efficient_net_b4(num_classes=1),
efficient_net_b4(num_classes=1, **kwargs),
[24, 32, 56, 112, 160, 272, 448],
[2, 4, 8, 16, 16, 32, 32],
layers,
Expand All @@ -788,7 +788,7 @@ def __init__(self, layers=[1, 2, 4, 6], **kwargs):
class EfficientNetB5Encoder(EfficientNetEncoder):
def __init__(self, layers=[1, 2, 4, 6], **kwargs):
super().__init__(
efficient_net_b5(num_classes=1),
efficient_net_b5(num_classes=1, **kwargs),
[24, 40, 64, 128, 176, 304, 512],
[2, 4, 8, 16, 16, 32, 32],
layers,
Expand All @@ -798,7 +798,7 @@ def __init__(self, layers=[1, 2, 4, 6], **kwargs):
class EfficientNetB6Encoder(EfficientNetEncoder):
def __init__(self, layers=[1, 2, 4, 6], **kwargs):
super().__init__(
efficient_net_b6(num_classes=1),
efficient_net_b6(num_classes=1, **kwargs),
[32, 40, 72, 144, 200, 344, 576],
[2, 4, 8, 16, 16, 32, 32],
layers,
Expand All @@ -808,7 +808,7 @@ def __init__(self, layers=[1, 2, 4, 6], **kwargs):
class EfficientNetB7Encoder(EfficientNetEncoder):
def __init__(self, layers=[1, 2, 4, 6], **kwargs):
super().__init__(
efficient_net_b7(num_classes=1),
efficient_net_b7(num_classes=1, **kwargs),
[32, 48, 80, 160, 224, 384, 640],
[2, 4, 8, 16, 16, 32, 32],
layers,
Expand Down

0 comments on commit 00d3d04

Please sign in to comment.