Skip to content

Commit

Permalink
Fix seeding random module in DataLoader (#7886)
Browse files Browse the repository at this point in the history
* fix seeding random module

* make base seed int

* follow 0.4 idiom

* add a test for random seeding
  • Loading branch information
thuyen authored and soumith committed May 29, 2018
1 parent 65f8465 commit 146b951
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 1 deletion.
23 changes: 23 additions & 0 deletions test/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,15 @@ def __len__(self):
return 10


class RandomDatasetMock(object):

def __getitem__(self, index):
return torch.tensor([torch.rand(1).item(), random.uniform(0, 1)])

def __len__(self):
return 1000


class TestCheckpoint(TestCase):

# Test whether checkpoint is being triggered or not. For this, we check
Expand Down Expand Up @@ -233,6 +242,20 @@ def setUp(self):
self.dataset = torch.randn(5, 3, 3, 2)
self.batch_size = 3

def test_random_seed(self):
def run():
dataloader = torch.utils.data.DataLoader(RandomDatasetMock(),
batch_size=2,
num_workers=4,
shuffle=True)
return next(iter(dataloader))

torch.manual_seed(2018)
x1 = run()
torch.manual_seed(2018)
x2 = run()
self.assertEqual(x1, x2)

def test_single_keep(self):
dataloader = torch.utils.data.DataLoader(self.dataset,
batch_size=self.batch_size,
Expand Down
2 changes: 1 addition & 1 deletion torch/utils/data/dataloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def __init__(self, loader):

self.sample_iter = iter(self.batch_sampler)

base_seed = torch.LongTensor(1).random_()[0]
base_seed = torch.LongTensor(1).random_().item()

if self.num_workers > 0:
self.worker_init_fn = loader.worker_init_fn
Expand Down

0 comments on commit 146b951

Please sign in to comment.