Skip to content

Commit

Permalink
Indented torch.init_distributed()
Browse files Browse the repository at this point in the history
  • Loading branch information
Lena-Jurkschat committed Sep 28, 2023
1 parent 52f1300 commit faad056
Showing 1 changed file with 7 additions and 7 deletions.
14 changes: 7 additions & 7 deletions megatron/initialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,13 +192,13 @@ def _initialize_distributed():
else:
args.local_rank = device
torch.cuda.set_device(device)
# Call the init process
torch.distributed.init_process_group(
backend=args.distributed_backend,
world_size=args.world_size,
rank=args.rank,
timeout=timedelta(minutes=args.distributed_timeout_minutes),
)
# Call the init process
torch.distributed.init_process_group(
backend=args.distributed_backend,
world_size=args.world_size,
rank=args.rank,
timeout=timedelta(minutes=args.distributed_timeout_minutes),
)

# Set the tensor model-parallel, pipeline model-parallel, and
# data-parallel communicators.
Expand Down

0 comments on commit faad056

Please sign in to comment.