Skip to content

Commit

Permalink
TST: parameter batch_size was not passed into DataLoader
Browse files Browse the repository at this point in the history
This mistake made batch size of every data loader become the
default value: 1. Though it does not affect the correctness of
all test case, it still needs to be corrected.

However, `batch_size` of a `DataLoader` cannot be modified
after it is initialized. Therefore, we can only determine it
while generating tasks for test, and that's why `batch_size`
and `steps` is moved to the signature of `__init__` of each
`Task`.
  • Loading branch information
NaleRaphael committed Jun 6, 2020
1 parent 227fc53 commit 1c549ec
Showing 1 changed file with 42 additions and 20 deletions.
62 changes: 42 additions & 20 deletions tests/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,55 +50,77 @@ def __post_init__(self):


class XORTask(BaseTask):
def __init__(self, validate=False):
def __init__(self, batch_size=8, steps=100, validate=False):
super(XORTask, self).__init__()
bs, steps = 8, 64
dataset = XORDataset(bs * steps)
n_total = batch_size * steps
dataset = XORDataset(n_total)
if validate:
self.train_loader = DataLoader(Subset(dataset, range(steps - bs)))
self.val_loader = DataLoader(Subset(dataset, range(steps - bs, steps)))
n_train = int(n_total * 0.9)
self.train_loader = DataLoader(
Subset(dataset, range(n_train)),
batch_size=batch_size
)
self.val_loader = DataLoader(
Subset(dataset, range(n_train, n_total)),
batch_size=batch_size
)
else:
self.train_loader = DataLoader(dataset)
self.train_loader = DataLoader(dataset, batch_size=batch_size)
self.val_loader = None

self.batch_size = bs
self.batch_size = batch_size
self.model = LinearMLP([8, 4, 1])
self.optimizer = optim.SGD(self.model.parameters(), lr=1e-5)
self.criterion = nn.MSELoss()
self.device = torch.device("cuda")


class ExtraXORTask(BaseTask):
def __init__(self, validate=False):
def __init__(self, batch_size=8, steps=100, validate=False):
super(ExtraXORTask, self).__init__()
bs, steps = 8, 64
dataset = ExtraXORDataset(bs * steps, extra_dims=2)
n_total = batch_size * steps
dataset = ExtraXORDataset(n_total, extra_dims=2)
if validate:
self.train_loader = DataLoader(Subset(dataset, range(steps - bs)))
self.val_loader = DataLoader(Subset(dataset, range(steps - bs, steps)))
n_train = int(n_total * 0.9)
self.train_loader = DataLoader(
Subset(dataset, range(n_train)),
batch_size=batch_size
)
self.val_loader = DataLoader(
Subset(dataset, range(n_train, n_total)),
batch_size=batch_size
)
else:
self.train_loader = DataLoader(dataset)
self.train_loader = DataLoader(dataset, batch_size=batch_size)
self.val_loader = None

self.batch_size = batch_size
self.model = LinearMLP([8, 4, 1])
self.optimizer = optim.SGD(self.model.parameters(), lr=1e-5)
self.criterion = nn.MSELoss()
self.device = torch.device("cuda")


class DiscriminativeLearningRateTask(BaseTask):
def __init__(self, validate=False):
def __init__(self, batch_size=8, steps=100, validate=False):
super(DiscriminativeLearningRateTask, self).__init__()
bs, steps = 8, 64
dataset = XORDataset(bs * steps)
n_total = batch_size * steps
dataset = XORDataset(n_total)
if validate:
self.train_loader = DataLoader(Subset(dataset, range(steps - bs)))
self.val_loader = DataLoader(Subset(dataset, range(steps - bs, steps)))
n_train = int(n_total * 0.9)
self.train_loader = DataLoader(
Subset(dataset, range(n_train)),
batch_size=batch_size
)
self.val_loader = DataLoader(
Subset(dataset, range(n_train, n_total)),
batch_size=batch_size
)
else:
self.train_loader = DataLoader(dataset)
self.train_loader = DataLoader(dataset, batch_size=batch_size)
self.val_loader = None

dataset = XORDataset(128)
self.batch_size = batch_size
self.model = LinearMLP([8, 4, 1])
self.optimizer = optim.SGD(
[
Expand Down

0 comments on commit 1c549ec

Please sign in to comment.