Skip to content

Commit

Permalink
autopep8 fix
Browse files Browse the repository at this point in the history
  • Loading branch information
AutoPEP8 authored and actions-user committed May 23, 2020
1 parent 7d36ef9 commit b9d386a
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 6 deletions.
1 change: 0 additions & 1 deletion ignite/distributed/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from ignite.distributed.auto import *
from ignite.distributed.launcher import Parallel
from ignite.distributed.utils import *

7 changes: 2 additions & 5 deletions ignite/distributed/auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

import torch
from torch.utils.data import DataLoader, Dataset
from torch.utils.data.sampler import Sampler
from torch.utils.data.distributed import DistributedSampler
from torch.utils.data.sampler import Sampler

from ignite.distributed import utils as idist

Expand Down Expand Up @@ -66,10 +66,7 @@ def auto_dataloader(dataset, **kwargs):
)
kwargs["pin_memory"] = False

dataloader = DataLoader(
dataset,
**kwargs
)
dataloader = DataLoader(dataset, **kwargs)

if idist.has_xla_support:
try:
Expand Down
1 change: 1 addition & 0 deletions ignite/distributed/launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ def training(local_rank, config, **kwargs):
master_port (int, optional): optional argument, master node port for torch native backends
(`nccl`, `gloo`). Mandatory argument if ``master_addr`` is specified.
"""

def __init__(
self,
backend: str = None,
Expand Down

0 comments on commit b9d386a

Please sign in to comment.