-
Notifications
You must be signed in to change notification settings - Fork 25
/
model.py
57 lines (49 loc) · 2.1 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import torch
from torchvision import models
import torch.nn as nn
from torchvision.models.feature_extraction import create_feature_extractor
class _CutPasteNetBase(nn.Module):
# forward outputs: logits
def __init__(self, encoder = 'resnet18', pretrained = True, dims = [512,512,512,512,512,512,512,512,128], num_class = 3):
super().__init__()
self.encoder = getattr(models, encoder)(pretrained = pretrained)
last_layer= list(self.encoder.named_modules())[-1][0].split('.')[0]
setattr(self.encoder, last_layer, nn.Identity())
proj_layers = []
for d in dims[:-1]:
proj_layers.append(nn.Linear(d,d, bias=False)),
proj_layers.append((nn.BatchNorm1d(d))),
proj_layers.append(nn.ReLU(inplace=True))
embeds = nn.Linear(dims[-2], dims[-1], bias=num_class > 0)
proj_layers.append(embeds)
self.head = nn.Sequential(
*proj_layers
)
self.out = nn.Linear(dims[-1], num_class)
def forward(self, x):
features = self.encoder(x)
embeds = self.head(features)
logits = self.out(embeds)
return logits
def freeze(self, layer_name):
#freeze encoder until layer_name
check = False
for name, param in self.encoder.named_parameters():
if name == layer_name:
check = True
if not check and param.requires_grad != False:
param.requires_grad = False
else:
param.requires_grad = True
def create_graph_model(self,):
return create_feature_extractor(model=self, return_nodes=["head", "out"])
class CutPasteNet(_CutPasteNetBase):
# forward outputs: (logits, embeds)
def __init__(self, encoder='resnet18', pretrained=True, dims=[512, 512, 512, 512, 512, 512, 512, 512, 128], num_class=3):
super().__init__(encoder, pretrained, dims, num_class)
return
def forward(self, x):
features = self.encoder(x)
embeds = self.head(features)
logits = self.out(embeds)
return (logits, embeds)