From faad056997f3755e37989f7931a3c05158dbb6da Mon Sep 17 00:00:00 2001 From: s6690609 Date: Thu, 28 Sep 2023 09:59:40 +0200 Subject: [PATCH] Indented torch.init_distributed() --- megatron/initialize.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/megatron/initialize.py b/megatron/initialize.py index 367ba85cb2..416426b74a 100644 --- a/megatron/initialize.py +++ b/megatron/initialize.py @@ -192,13 +192,13 @@ def _initialize_distributed(): else: args.local_rank = device torch.cuda.set_device(device) - # Call the init process - torch.distributed.init_process_group( - backend=args.distributed_backend, - world_size=args.world_size, - rank=args.rank, - timeout=timedelta(minutes=args.distributed_timeout_minutes), - ) + # Call the init process + torch.distributed.init_process_group( + backend=args.distributed_backend, + world_size=args.world_size, + rank=args.rank, + timeout=timedelta(minutes=args.distributed_timeout_minutes), + ) # Set the tensor model-parallel, pipeline model-parallel, and # data-parallel communicators.