-
Notifications
You must be signed in to change notification settings - Fork 0
/
resnet34.py
68 lines (59 loc) · 2.99 KB
/
resnet34.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import torch.nn as nn
import torch
from torch.nn import functional as F
class ResidualBlock(nn.Module):
# 实现子module:Residual Block
def __init__(self, in_ch, out_ch, stride=1, shortcut=None):
super(ResidualBlock, self).__init__()
self.left = nn.Sequential(
nn.Conv2d(in_ch, out_ch, 3, stride, padding=1, bias=False),
nn.BatchNorm2d(out_ch),
nn.ReLU(inplace=True), # inplace = True原地操作
nn.Conv2d(out_ch, out_ch, 3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(out_ch)
)
self.right = shortcut
def forward(self, x):
out = self.left(x)
residual = x if self.right is None else self.right(x)
out += residual
return F.relu(out)
class ResNet34(nn.Module): # 224x224x3
# 实现主module:ResNet34
def __init__(self, num_classes=1):
super(ResNet34, self).__init__()
self.pre = nn.Sequential(
nn.Conv2d(3, 64, 7, stride=2, padding=3, bias=False), # (224+2*p-)/2(向下取整)+1,size减半->112
nn.BatchNorm2d(64), # 112x112x64
nn.ReLU(inplace=True),
nn.MaxPool2d(3, 2, 1) # kernel_size=3, stride=2, padding=1
) # 56x56x64
# 重复的layer,分别有3,4,6,3个residual block
self.layer1 = self.make_layer(64, 64, 3) # 56x56x64,layer1层输入输出一样,make_layer里,应该不用对shortcut进行处理,但是为了统一操作。。。
self.layer2 = self.make_layer(64, 128, 4, stride=2) # 第一个stride=2,剩下3个stride=1;28x28x128
self.layer3 = self.make_layer(128, 256, 6, stride=2) # 14x14x256
self.layer4 = self.make_layer(256, 512, 3, stride=2) # 7x7x512
# 分类用的全连接
self.fc = nn.Linear(512, num_classes)
def make_layer(self, in_ch, out_ch, block_num, stride=1):
# 当维度增加时,对shortcut进行option B的处理
shortcut = nn.Sequential( # 首个Residual Block需要进行option B处理
nn.Conv2d(in_ch, out_ch, 1, stride, bias=False), # 1x1卷积用于增加维度;stride=2用于减半size;为简化不考虑偏差
nn.BatchNorm2d(out_ch)
)
layers = []
layers.append(ResidualBlock(in_ch, out_ch, stride, shortcut))
for i in range(1, block_num):
layers.append(ResidualBlock(out_ch, out_ch)) # 后面的几个ResidualBlock,shortcut直接相加
return nn.Sequential(*layers)
def forward(self, x): # 224x224x3
x = self.pre(x) # 56x56x64
x = self.layer1(x) # 56x56x64
x = self.layer2(x) # 28x28x128
x = self.layer3(x) # 14x14x256
x = self.layer4(x) # 7x7x512
x = F.avg_pool2d(x, 7) # 1x1x512
x = x.view(x.size(0), -1) # 将输出拉伸为一行:1x512
x = self.fc(x) # 1x1
# nn.BCELoss:二分类用的交叉熵,用的时候需要在该层前面加上 Sigmoid 函数
return nn.Sigmoid()(x) # 1x1,将结果化为(0~1)之间