Skip to content

Commit

Permalink
Fix bart shape comment (#8423)
Browse files Browse the repository at this point in the history
  • Loading branch information
sshleifer authored Nov 9, 2020
1 parent 46509d1 commit a8339b9
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion src/transformers/modeling_bart.py
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,7 @@ def forward(

x = F.dropout(x, p=self.dropout, training=self.training)

# Convert to Bart output format: (seq_len, BS, model_dim) -> (BS, seq_len, model_dim)
# Convert to Bart output format: (BS, seq_len, model_dim) -> (seq_len, BS, model_dim)
x = x.transpose(0, 1)
encoder_hidden_states = encoder_hidden_states.transpose(0, 1)

Expand Down
2 changes: 1 addition & 1 deletion src/transformers/modeling_tf_bart.py
Original file line number Diff line number Diff line change
Expand Up @@ -570,7 +570,7 @@ def call(
x = self.layernorm_embedding(x + positions)
x = tf.nn.dropout(x, rate=self.dropout if training else 0)

# Convert to Bart output format: (seq_len, BS, model_dim) -> (BS, seq_len, model_dim)
# Convert to Bart output format: (BS, seq_len, model_dim) -> (seq_len, BS, model_dim)
x = tf.transpose(x, perm=(1, 0, 2))
assert len(shape_list(encoder_hidden_states)) == 3, "encoder_hidden_states must be a 3D tensor"
encoder_hidden_states = tf.transpose(encoder_hidden_states, perm=(1, 0, 2))
Expand Down

0 comments on commit a8339b9

Please sign in to comment.