Skip to content

Commit

Permalink
fix bug
Browse files Browse the repository at this point in the history
  • Loading branch information
lugimzzz committed May 23, 2024
1 parent 233b894 commit affd27d
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 0 deletions.
3 changes: 3 additions & 0 deletions llm/predictor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1232,6 +1232,7 @@ def create_predictor(
dtype=predictor_args.dtype,
tensor_parallel_degree=tensor_parallel_degree,
tensor_parallel_rank=tensor_parallel_rank,
tensor_parallel_output=False,
)
elif model_args.model_type == "ernie-3.5-se":
sys.path.append("./ernie-3.5-se")
Expand All @@ -1244,6 +1245,7 @@ def create_predictor(
dtype=predictor_args.dtype,
tensor_parallel_degree=tensor_parallel_degree,
tensor_parallel_rank=tensor_parallel_rank,
tensor_parallel_output=False,
)
else:
model = AutoModelForCausalLM.from_pretrained(
Expand All @@ -1252,6 +1254,7 @@ def create_predictor(
use_flash_attention=predictor_args.use_flash_attention,
tensor_parallel_degree=tensor_parallel_degree,
tensor_parallel_rank=tensor_parallel_rank,
tensor_parallel_output=False,
)

predictor = DygraphPredictor(predictor_args, model=model, tokenizer=tokenizer)
Expand Down
2 changes: 2 additions & 0 deletions paddlenlp/trl/trl_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,4 +44,6 @@ def calculate_effective_tokens(training_args, train_dataset, max_seq_len):
for i, data in enumerate(train_dataset):
total_effective_tokens += len(data["input_ids"])
total_tokens = (i + 1) * max_seq_len
total_effective_tokens *= training_args.num_train_epochs
total_tokens *= training_args.num_train_epochs
return total_effective_tokens, total_tokens

Check warning on line 49 in paddlenlp/trl/trl_utils.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trl/trl_utils.py#L44-L49

Added lines #L44 - L49 were not covered by tests

0 comments on commit affd27d

Please sign in to comment.