-
Notifications
You must be signed in to change notification settings - Fork 14
/
utils.py
72 lines (58 loc) · 2.09 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import math
import torch.nn as nn
def get_activation_fn(name, leakiness=None, num_channels=None):
if name == 'relu':
return nn.ReLU()
elif name == 'prelu':
assert num_channels is not None
assert leakiness is not None
return nn.PReLU(num_parameters=num_channels, init=leakiness)
elif name == 'lrelu':
assert leakiness is not None
return nn.LeakyReLU(negative_slope=leakiness)
elif name == 'softmax':
return nn.Softmax()
elif name == 'tanh':
return nn.Tanh()
else:
assert False, 'Unknown activation function {}'.format(name)
def get_normalization_layer(name, num_features):
if name == 'batch':
return nn.BatchNorm2d(num_features, affine=True)
elif name == 'instance':
return nn.InstanceNorm2d(num_features, affine=False)
elif name == 'instance-affine':
return nn.InstanceNorm2d(num_features, affine=True)
else:
raise ValueError('Unknown normalization layer {}'.format(name))
def need_bias(use_norm_layers, norm_layer):
if not use_norm_layers or \
use_norm_layers == 'not-first' or \
norm_layer == 'instance':
return True
elif norm_layer == 'batch' or norm_layer == 'instance-affine':
return False
else:
return False
def get_padding_layer(total_padding, mode='zero'):
padding_layers = {
'zero': nn.ZeroPad2d,
'reflection': nn.ReflectionPad2d,
'replication': nn.ReplicationPad2d
}
assert mode in padding_layers
padding_side = total_padding // 2
if total_padding % 2 == 0:
padding = padding_side
else:
padding = (padding_side, padding_side + 1, padding_side, padding_side + 1)
return padding_layers[mode](padding)
def get_same_padding_layer(kernel_size, stride, mode='zero'):
"""Constructs padding layer for SAME padding
Calculates padding to insert such that the spatial dimensions stay the same
after a 2d convolution.
WARNING: Only works for even sized input sizes and stride one or two.
"""
assert stride == 1 or stride == 2, 'Formula only works for stride 1 or 2'
total_padding = int(math.ceil(kernel_size / stride)) - 1
return get_padding_layer(total_padding, mode)