Skip to content

Commit

Permalink
Small typos in functions set_none_gradients_to_zero (#5557)
Browse files Browse the repository at this point in the history
change from "zero_like" to "zeros_like"
  • Loading branch information
TravelLeraLone committed May 21, 2024
1 parent 5b314f4 commit 975290a
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion deepspeed/runtime/zero/stage3.py
Original file line number Diff line number Diff line change
Expand Up @@ -1575,7 +1575,7 @@ def set_none_gradients_to_zero(self, i, partition_id):
for param_id in self.is_grad_computed[i][partition_id]:
param = self.param_dict[param_id]
if param.grad is None:
param.grad = torch.zero_like(param)
param.grad = torch.zeros_like(param)

######################Reduction Related Methods##############################

Expand Down
2 changes: 1 addition & 1 deletion deepspeed/runtime/zero/stage_1_and_2.py
Original file line number Diff line number Diff line change
Expand Up @@ -1474,7 +1474,7 @@ def set_none_gradients_to_zero(self, i, partition_id):
for param_id in self.is_grad_computed[i][partition_id]:
param = self.param_dict[param_id]
if param.grad is None:
param.grad = torch.zero_like(param)
param.grad = torch.zeros_like(param)

######################Reduction Related Methods##############################
def allreduce_bucket(self, bucket, rank=None, log=None, divide=True, process_group=None):
Expand Down

0 comments on commit 975290a

Please sign in to comment.