-
Notifications
You must be signed in to change notification settings - Fork 0
/
mod_batchnorm_layer.F90
executable file
·72 lines (48 loc) · 1.66 KB
/
mod_batchnorm_layer.F90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
module mod_batchnorm_layer
use mod_layer
use mod_kinds, only: ik, rk
implicit none
! BatchNorm layer - extends from base layer_type
! Implements batch normalization
type, extends(layer_type) :: BatchNorm
! epsilon parameter
real(rk) :: epsilon
contains
procedure, public, pass(self) :: forward => batchnorm_forward
procedure, public, pass(self) :: backward => batchnorm_backward
end type BatchNorm
interface BatchNorm
module procedure :: constructor
end interface BatchNorm
contains
type(BatchNorm) function constructor(this_size) result(layer)
! BatchNorm class constructor
! this_size: size to allocate for current layer
integer(ik), intent(in) :: this_size
allocate(layer % o(this_size))
allocate(layer % beta(this_size))
allocate(layer % gama(this_size))
allocate(layer % mean(this_size))
allocate(layer % variance(this_size))
! not in training mode
layer % training = .FALSE.
! epsilon default to 0.001
layer % epsilon = 0.001
end function constructor
subroutine batchnorm_forward(self, x)
class(BatchNorm), intent(in out) :: self
real(rk), intent(in) :: x(:)
if (self % training) then
! TODO:
self % o = x
else
! NOT TRAINING: standardize using learned values
self % o = ((x - self % mean) / sqrt(self % variance + self % epsilon)) * self % gama + self % beta
end if
end subroutine batchnorm_forward
subroutine batchnorm_backward(self, g, lr)
class(BatchNorm), intent(in out) :: self
real(rk), intent(in) :: g(:), lr
! TODO: implement backward pass
end subroutine batchnorm_backward
end module mod_batchnorm_layer