Skip to content

Commit

Permalink
fix BatchNorm1d parms
Browse files Browse the repository at this point in the history
  • Loading branch information
TsumiNa committed Jun 1, 2021
1 parent 93a5c02 commit 860adf5
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions xenonpy/model/sequential.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def __init__(self,
super().__init__()
self.linear = nn.Linear(in_features, out_features, bias)
self.dropout = nn.Dropout(dropout)
self.normalizer = None if not normalizer else nn.BatchNorm1d(out_features, normalizer)
self.normalizer = None if not normalizer else nn.BatchNorm1d(out_features, momentum=normalizer)
self.activation = None if not activation_func else activation_func

def forward(self, x):
Expand Down Expand Up @@ -70,7 +70,7 @@ def __init__(
*,
h_neurons: Union[Sequence[float], Sequence[int]] = (),
h_bias: Union[bool, Sequence[bool]] = True,
h_dropouts: Union[float, Sequence[float]] = 0.1,
h_dropouts: Union[float, Sequence[float]] = 0.0,
h_normalizers: Union[float, None, Sequence[Optional[float]]] = 0.1,
h_activation_funcs: Union[Callable, None, Sequence[Optional[Callable]]] = nn.ReLU(),
):
Expand Down

0 comments on commit 860adf5

Please sign in to comment.