Skip to content

Commit

Permalink
Fix some styles manually
Browse files Browse the repository at this point in the history
  • Loading branch information
ydshieh committed Feb 23, 2022
1 parent 5ed4a5a commit 2edb271
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def create_and_check_causal_lm_model_as_decoder(
"encoder_hidden_states": encoder_hidden_states,
"encoder_attention_mask": encoder_attention_mask,
}
(logits,) = model(inputs)
result = model(inputs)

inputs = [input_ids, input_mask]
result = model(inputs, token_type_ids=token_type_ids, encoder_hidden_states=encoder_hidden_states)
Expand Down Expand Up @@ -358,9 +358,11 @@ def create_and_check_causal_lm_model_past_with_attn_mask(
axis=1,
)

output_from_no_past = model(next_input_ids, attention_mask=attn_mask, output_hidden_states=True).hidden_states[
0
]
output_from_no_past = model(
next_input_ids,
attention_mask=attn_mask,
output_hidden_states=True,
).hidden_states[0]
output_from_past = model(
next_tokens, past_key_values=past_key_values, attention_mask=attn_mask, output_hidden_states=True
).hidden_states[0]
Expand Down
10 changes: 6 additions & 4 deletions tests/bert/test_modeling_tf_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def create_and_check_causal_lm_model_as_decoder(
"encoder_hidden_states": encoder_hidden_states,
"encoder_attention_mask": encoder_attention_mask,
}
(logits,) = model(inputs)
result = model(inputs)

inputs = [input_ids, input_mask]
result = model(inputs, token_type_ids=token_type_ids, encoder_hidden_states=encoder_hidden_states)
Expand Down Expand Up @@ -370,9 +370,11 @@ def create_and_check_causal_lm_model_past_with_attn_mask(
axis=1,
)

output_from_no_past = model(next_input_ids, attention_mask=attn_mask, output_hidden_states=True).hidden_states[
0
]
output_from_no_past = model(
next_input_ids,
attention_mask=attn_mask,
output_hidden_states=True,
).hidden_states[0]
output_from_past = model(
next_tokens, past_key_values=past_key_values, attention_mask=attn_mask, output_hidden_states=True
).hidden_states[0]
Expand Down

0 comments on commit 2edb271

Please sign in to comment.