Skip to content

Commit b7fc092

Browse files
made fx public (Lightning-AI#2247)
* made fx public * made fx public * made fx public
1 parent 68a1e52 commit b7fc092

File tree

3 files changed

+5
-6
lines changed

3 files changed

+5
-6
lines changed

docs/source/trainer.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ Trainer
99
:exclude-members:
1010
run_pretrain_routine,
1111
_abc_impl,
12-
_Trainer__set_random_port,
12+
_Trainer_set_random_port,
1313
_Trainer__set_root_gpu,
1414
_Trainer__init_optimizers,
1515
_Trainer__parse_gpu_ids,

pytorch_lightning/trainer/distrib_data_parallel.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -369,7 +369,7 @@ def set_nvidia_flags(self, is_slurm_managing_tasks, data_parallel_device_ids):
369369
# don't make this debug... this is good UX
370370
rank_zero_info(f'CUDA_VISIBLE_DEVICES: [{os.environ["CUDA_VISIBLE_DEVICES"]}]')
371371

372-
def __set_random_port(self):
372+
def set_random_port(self):
373373
"""
374374
When running DDP NOT managed by SLURM, the ports might collide
375375
"""
@@ -384,7 +384,6 @@ def __set_random_port(self):
384384
os.environ['MASTER_PORT'] = str(default_port)
385385

386386
def spawn_ddp_children(self, model):
387-
self.__set_random_port()
388387
port = os.environ['MASTER_PORT']
389388

390389
master_address = '127.0.0.1' if 'MASTER_ADDR' not in os.environ else os.environ['MASTER_ADDR']

pytorch_lightning/trainer/trainer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -897,18 +897,19 @@ def fit(
897897
self.ddp_train(task, model)
898898

899899
elif self.distributed_backend == 'cpu_ddp':
900-
self.__set_random_port()
900+
self._set_random_port
901901
self.model = model
902902
mp.spawn(self.ddp_train, nprocs=self.num_processes, args=(model,))
903903

904904
elif self.distributed_backend == 'ddp_spawn':
905-
self.__set_random_port()
905+
self._set_random_port
906906
model.share_memory()
907907

908908
# spin up peers
909909
mp.spawn(self.ddp_train, nprocs=self.num_processes, args=(model, ))
910910

911911
elif self.distributed_backend == 'ddp':
912+
self._set_random_port
912913
self.spawn_ddp_children(model)
913914

914915
# 1 gpu or dp option triggers training using DP module
@@ -1273,7 +1274,6 @@ class _PatchDataLoader(object):
12731274
dataloader: Dataloader object to return when called.
12741275
12751276
"""
1276-
12771277
def __init__(self, dataloader: Union[List[DataLoader], DataLoader]):
12781278
self.dataloader = dataloader
12791279

0 commit comments

Comments
 (0)