Skip to content

Commit

Permalink
fix rebase conflicts
Browse files Browse the repository at this point in the history
Signed-off-by: Masaki Kozuki <mkozuki@nvidia.com>
  • Loading branch information
crcrpar committed May 17, 2024
1 parent 93c499c commit 98b3468
Showing 1 changed file with 3 additions and 9 deletions.
12 changes: 3 additions & 9 deletions thunder/distributed/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -551,11 +551,7 @@ def _shard_params(
process_group: ProcessGroup,
device: torch.device | None,
broadcast_from: int | None,
<<<<<<< HEAD
allow_padding_for_fsdp: bool = False,
dim: int | None = None,
=======
>>>>>>> b3affecd (remove `dim` from `_shard_params`)
) -> None:
"""Shards the parameters on the first dimension."""
global_rank = tdist.get_rank(group=process_group)
Expand Down Expand Up @@ -586,11 +582,9 @@ def _shard_params(
# Note [FSDP Sharding]
# All internal code will assume that the parameters are sharded on the first dimension
for param_name, param in submodule.named_parameters(recurse=False, prefix=module_name):
<<<<<<< HEAD
_shard_param(param, global_rank, world_size, param_name, dim=dim, allow_padding_for_fsdp=allow_padding_for_fsdp)
=======
_shard_param(param, global_rank, world_size, param_name, dim=0)
>>>>>>> b3affecd (remove `dim` from `_shard_params`)
_shard_param(
param, global_rank, world_size, param_name, dim=0, allow_padding_for_fsdp=allow_padding_for_fsdp
)


def _shard_param(
Expand Down

0 comments on commit 98b3468

Please sign in to comment.