Skip to content

Commit

Permalink
Fix docstrings in TransformerEncoder (#84)
Browse files Browse the repository at this point in the history
  • Loading branch information
王文涛 authored and huzecong committed Jun 29, 2019
1 parent ec583ac commit 77f3b39
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 4 deletions.
4 changes: 2 additions & 2 deletions examples/transformer/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,9 @@ def forward( # type: ignore

# Position embedding (shared b/w source and target)
src_seq_len = torch.full(
(batch_size,), encoder_input.size(1), dtype=torch.int32
(batch_size,), encoder_input.size(1), dtype=torch.int32,
device=encoder_input.device
)
src_seq_len = src_seq_len.to(device=encoder_input.device)

src_pos_embeds = self.pos_embedder(sequence_length=src_seq_len)
src_input_embedding = src_word_embeds + src_pos_embeds
Expand Down
1 change: 0 additions & 1 deletion texar/modules/encoders/transformer_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,6 @@ def default_hparams():
'multihead_attention': {
'name': 'multihead_attention',
'num_units': 512,
'output_dim': 512,
'num_heads': 8,
'dropout_rate': 0.1,
'output_dim': 512,
Expand Down
2 changes: 1 addition & 1 deletion texar/modules/encoders/transformer_encoder_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@


class TransformerEncoderTest(unittest.TestCase):
r"""Tests :class:`~texar.modules.TransformerDecoder`
r"""Tests :class:`~texar.modules.TransformerEecoder`
"""

def setUp(self):
Expand Down

0 comments on commit 77f3b39

Please sign in to comment.