Skip to content
This repository has been archived by the owner on Jun 30, 2024. It is now read-only.

Commit

Permalink
Update eval_decoding.py
Browse files Browse the repository at this point in the history
  • Loading branch information
MikeWangWZHL committed Jan 8, 2024
1 parent da6b4ed commit fbaf82d
Showing 1 changed file with 2 additions and 4 deletions.
6 changes: 2 additions & 4 deletions eval_decoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,15 +64,13 @@ def eval_model(dataloaders, device, tokenizer, criterion, model, output_all_resu
# early_stopping=True
)
predicted_string=tokenizer.batch_decode(predictions,skip_special_tokens=False)
predicted_string=predicted_string.squeeze()

predictions=tokenizer.encode(predicted_string)
predicted_string=predicted_string[0]
# print('predicted string:',predicted_string)
f.write(f'predicted string: {predicted_string}\n')
f.write(f'################################################\n\n\n')

# convert to int list
predictions = predictions.tolist()
predictions = tokenizer(predicted_string)["input_ids"]
truncated_prediction = []
for t in predictions:
if t != tokenizer.eos_token_id:
Expand Down

0 comments on commit fbaf82d

Please sign in to comment.