Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 18 additions & 15 deletions infscale/module/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def collate_fn(examples):
mmd.trace_inputs = trace_inputs

self.model_group = mmd.model_group
self._batch_list: list[Tensor | None] = []
self._curr_batch: Tensor = None

def configure(
self, micro_batch_size: int, device: torch.device, in_memory: bool, replay: int
Expand All @@ -133,8 +133,9 @@ def _inner_send_b2d(batch):

if not self._in_memory:
self._send_batch_to_device = _inner_send_b2d
batch = next(self.data_iter)
self._batch_list.append(batch)
# set the first batch to _curr_batch so that the end of replay can
# be checked at the same time when the last batch is returned
self._curr_batch = next(self.data_iter)
return

# do nothing in case of in-memory loading
Expand All @@ -147,8 +148,10 @@ def _inner_send_b2d(batch):
self.batches.append(batch)

self.data_iter = iter(self.batches)
batch = next(self.data_iter)
self._batch_list.append(batch)

# set the first batch to _curr_batch so that the end of replay can
# be checked at the same time when the last batch is returned
self._curr_batch = next(self.data_iter)

def _handle_dataset_playback(self) -> Tensor | None:
if self._replay == 0:
Expand All @@ -166,20 +169,20 @@ def _handle_dataset_playback(self) -> Tensor | None:

def next_batch(self) -> tuple[Tensor, bool]:
"""Return next data tensor and bool if last bach."""
# take a batch to return
curr_batch = self._curr_batch
# noop for in-memory case; otherwise, load batch to a correct device
self._send_batch_to_device(curr_batch)

# load a new batch to _curr_batch
try:
batch = next(self.data_iter)
self._batch_list.append(batch)
self._curr_batch = next(self.data_iter)
except StopIteration:
batch = self._handle_dataset_playback()
self._batch_list.append(batch)

batch = self._batch_list.pop(0)
# noop for in-memory case; otherwise, load batch to a correct device
self._send_batch_to_device(batch)
self._curr_batch = self._handle_dataset_playback()

is_last = self._batch_list[0] is None
is_last = self._curr_batch is None

return batch, is_last
return curr_batch, is_last

@staticmethod
def create_image_dataset(
Expand Down