Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion pytorch_lightning/trainer/supporters.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,12 @@ def _agg_memory(self, how: str):

@dataclass
class SharedCycleIteratorState:
"""A state shared between all CylceIterators in a CombinedLoader.

With a shared state, the iterators can decide to terminate based on the state of all others.
If the mode is *max_size_cycle*, all iterators need to have finished before the combined loading is considered
finished, and otherwise any iterator finishing early will lead to all iterators ending early.
"""

mode: str = "max_size_cycle"
dataloaders: List[DataLoader] = field(default_factory=lambda: [])
Expand All @@ -126,7 +132,7 @@ def reset(self) -> None:
@property
def done(self) -> bool:
if not self.has_reset:
raise MisconfigurationException("Please, call reset once all dataloaders have been added.")
raise MisconfigurationException("Please call reset once all dataloaders have been added.")
if len(self.dataloaders) == 1:
return False
decision_fn = all if self.mode == "max_size_cycle" else any
Expand Down