-
Notifications
You must be signed in to change notification settings - Fork 37
/
channel_attention_layer.py
98 lines (82 loc) · 3.51 KB
/
channel_attention_layer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
import torch.nn as nn
# # SE block add to U-net
def conv3x3(in_planes, out_planes, stride=1, bias=False, group=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,padding=1, groups=group, bias=bias)
class SE_Conv_Block(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None, drop_out=False):
super(SE_Conv_Block, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes * 2)
self.bn2 = nn.BatchNorm2d(planes * 2)
self.conv3 = conv3x3(planes * 2, planes)
self.bn3 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
self.dropout = drop_out
if planes <= 16:
self.globalAvgPool = nn.AvgPool2d((224, 300), stride=1) # (224, 300) for ISIC2018
self.globalMaxPool = nn.MaxPool2d((224, 300), stride=1)
elif planes == 32:
self.globalAvgPool = nn.AvgPool2d((112, 150), stride=1) # (112, 150) for ISIC2018
self.globalMaxPool = nn.MaxPool2d((112, 150), stride=1)
elif planes == 64:
self.globalAvgPool = nn.AvgPool2d((56, 75), stride=1) # (56, 75) for ISIC2018
self.globalMaxPool = nn.MaxPool2d((56, 75), stride=1)
elif planes == 128:
self.globalAvgPool = nn.AvgPool2d((28, 37), stride=1) # (28, 37) for ISIC2018
self.globalMaxPool = nn.MaxPool2d((28, 37), stride=1)
elif planes == 256:
self.globalAvgPool = nn.AvgPool2d((14, 18), stride=1) # (14, 18) for ISIC2018
self.globalMaxPool = nn.MaxPool2d((14, 18), stride=1)
self.fc1 = nn.Linear(in_features=planes * 2, out_features=round(planes / 2))
self.fc2 = nn.Linear(in_features=round(planes / 2), out_features=planes * 2)
self.sigmoid = nn.Sigmoid()
self.downchannel = None
if inplanes != planes:
self.downchannel = nn.Sequential(nn.Conv2d(inplanes, planes * 2, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * 2),)
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downchannel is not None:
residual = self.downchannel(x)
original_out = out
out1 = out
# For global average pool
out = self.globalAvgPool(out)
out = out.view(out.size(0), -1)
out = self.fc1(out)
out = self.relu(out)
out = self.fc2(out)
out = self.sigmoid(out)
out = out.view(out.size(0), out.size(1), 1, 1)
avg_att = out
out = out * original_out
# For global maximum pool
out1 = self.globalMaxPool(out1)
out1 = out1.view(out1.size(0), -1)
out1 = self.fc1(out1)
out1 = self.relu(out1)
out1 = self.fc2(out1)
out1 = self.sigmoid(out1)
out1 = out1.view(out1.size(0), out1.size(1), 1, 1)
max_att = out1
out1 = out1 * original_out
att_weight = avg_att + max_att
out += out1
out += residual
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
out = self.relu(out)
if self.dropout:
out = nn.Dropout2d(0.5)(out)
return out, att_weight