Skip to content

Commit

Permalink
fix: dropout torch
Browse files Browse the repository at this point in the history
  • Loading branch information
charlesmindee committed Jul 5, 2021
1 parent 32ddedd commit b5e3b3e
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 2 deletions.
4 changes: 2 additions & 2 deletions doctr/models/recognition/master/tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,8 +398,8 @@ def _master(arch: str, pretrained: bool, input_shape: Tuple[int, int, int] = Non
# Build the model
model = MASTER(cfg=_cfg, **kwargs)
# Load pretrained parameters
# if pretrained:
# load_pretrained_params(model, default_cfgs[arch]['url'])
if pretrained:
load_pretrained_params(model, default_cfgs[arch]['url'])

return model

Expand Down
3 changes: 3 additions & 0 deletions doctr/models/recognition/transformer/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@ def __init__(
) for _ in range(num_layers)
]

self.dropout = nn.Dropout(dropout)

def forward(
self,
x: torch.Tensor,
Expand All @@ -73,6 +75,7 @@ def forward(
x = self.embedding(x) # (batch_size, target_seq_len, d_model)
x *= math.sqrt(self.d_model)
x += self.pos_encoding[:, :seq_len, :]
x = self.dropout(x)

# Batch first = False in decoder
x = x.permute(1, 0, 2)
Expand Down

0 comments on commit b5e3b3e

Please sign in to comment.