Skip to content

Commit

Permalink
Remove unnecessary _add function
Browse files Browse the repository at this point in the history
  • Loading branch information
kuangliu committed Nov 6, 2017
1 parent 3b59fef commit 7cd2c59
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 24 deletions.
23 changes: 0 additions & 23 deletions fpn.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,29 +67,6 @@ def _make_layer(self, block, planes, num_blocks, stride):
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)

def _add(self, x, y):
'''Add two feature maps.
Args:
x: (Variable) upsampled feature map.
y: (Variable) lateral feature map.
Returns:
(Variable) added feature map.
Upsampled feature map size is always >= lateral feature map size.
The reason why the two feature map sizes may not equal is because when the
input size is odd, the upsampled feature map size is always 1 pixel
bigger than the original input size.
e.g.
original input size: [N,_,15,15] ->
conv2d feature map size: [N,_,8,8] ->
upsampled feature map size: [N,_,16,16]
'''
_,_,H,W = y.size()
return x[:,:,:H,:W] + y

def _upsample_add(self, x, y):
'''Upsample and add two feature maps.
Expand Down
2 changes: 1 addition & 1 deletion train.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
def train(epoch):
print('\nEpoch: %d' % epoch)
net.train()
net.freeze_bn()
net.module.freeze_bn()
train_loss = 0
for batch_idx, (inputs, loc_targets, cls_targets) in enumerate(trainloader):
inputs = Variable(inputs.cuda())
Expand Down

0 comments on commit 7cd2c59

Please sign in to comment.