Skip to content

Commit bdee1cd

Browse files
awaelchliBorda
andauthored
update docs for "overfit_batches" (Lightning-AI#2324)
* update docs * Apply suggestions from code review Co-authored-by: Jirka Borovec <Borda@users.noreply.github.com>
1 parent 0f07381 commit bdee1cd

File tree

5 files changed

+11
-11
lines changed

5 files changed

+11
-11
lines changed

docs/source/debugging.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ argument of :class:`~pytorch_lightning.trainer.trainer.Trainer`)
6161

6262
.. testcode::
6363

64-
# use only 1% of training data (and use the same training Dataloader (with shuffle off) in val and test)
64+
# use only 1% of training data (and use the same training dataloader (with shuffle off) in val and test)
6565
trainer = Trainer(overfit_batches=0.01)
6666

6767
# or overfit a number of batches

pytorch_lightning/trainer/__init__.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -688,12 +688,12 @@ def on_train_end(self, trainer, pl_module):
688688
689689
.. warning:: .. deprecated:: 0.8.0.
690690
691-
Use `overfit_batches`. Will remove 1.0.0.
691+
Use `overfit_batches`. Will be removed in 0.10.0.
692692
693693
overfit_batches
694694
^^^^^^^^^^^^^^^
695-
Uses this much data of the training set. If will use the same training set for validation and testing.
696-
If the training Dataloaders(shuffle=True), Lightning will automatically disable it.
695+
Uses this much data of the training set. If nonzero, will use the same training set for validation and testing.
696+
If the training dataloaders have `shuffle=True`, Lightning will automatically disable it.
697697
698698
Useful for quickly debugging or trying to overfit on purpose.
699699

pytorch_lightning/trainer/data_loading.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -265,12 +265,12 @@ def _reset_eval_dataloader(
265265

266266
# when overfitting, the dataloader should not have sampler
267267
if self.overfit_batches > 0:
268-
rank_zero_warn('You requested to overfit but enabled training Dataloader shuffling.'
268+
rank_zero_warn('You requested to overfit but enabled training dataloader shuffling.'
269269
' We are turning it off for you.')
270270
dataloaders[loader_i] = self.replace_sampler(loader, SequentialSampler(loader.dataset))
271271

272272
else:
273-
rank_zero_warn(f'Your {mode}_dataloader has shuffle=True, it is best practice to turn'
273+
rank_zero_warn(f'Your {mode}_dataloader has `shuffle=True`, it is best practice to turn'
274274
' this off for validation and test dataloaders.')
275275

276276
if any([dl is None for dl in dataloaders]):
@@ -374,7 +374,7 @@ def determine_data_use_amount(self, overfit_batches: float) -> None:
374374
if overfit_batches > 0:
375375
if isinstance(overfit_batches, float) and overfit_batches > 1:
376376
raise ValueError('`overfit_batches` when used as a percentage must'
377-
f' be not 0.0 < x < 1.0 but got {overfit_batches:.3f}.')
377+
f' be in range 0.0 < x < 1.0 but got {overfit_batches:.3f}.')
378378
self.limit_train_batches = overfit_batches
379379
self.limit_val_batches = overfit_batches
380380
self.limit_test_batches = overfit_batches

pytorch_lightning/trainer/evaluation_loop.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
Set how much of the validation set to check
2323
-------------------------------------------
2424
25-
If you don't want to check 100% of the validation set (for debugging or if it's huge), set this flag
25+
If you don't want to check 100% of the validation set (for debugging or if it's huge), set this flag.
2626
2727
limit_val_batches will be overwritten by overfit_batches if `overfit_batches > 0`
2828
@@ -37,7 +37,7 @@
3737
Set how much of the test set to check
3838
-------------------------------------
3939
40-
If you don't want to check 100% of the test set (for debugging or if it's huge), set this flag
40+
If you don't want to check 100% of the test set (for debugging or if it's huge), set this flag.
4141
4242
limit_test_batches will be overwritten by overfit_batches if `overfit_batches > 0`
4343

pytorch_lightning/trainer/trainer.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -190,12 +190,12 @@ def __init__(
190190
progress_bar_refresh_rate: How often to refresh progress bar (in steps). Value ``0`` disables progress bar.
191191
Ignored when a custom callback is passed to :paramref:`~Trainer.callbacks`.
192192
193-
overfit_batches: Overfit a percent of training data (float) or a set number of batches (int).
193+
overfit_batches: Overfit a percent of training data (float) or a set number of batches (int). Default: 0.0
194194
195195
overfit_pct:
196196
.. warning:: .. deprecated:: 0.8.0
197197
198-
Use `overfit_batches` instead. Will remove 0.10.0.
198+
Use `overfit_batches` instead. Will be removed in 0.10.0.
199199
200200
track_grad_norm: -1 no tracking. Otherwise tracks that p-norm. May be set to 'inf' infinity-norm.
201201

0 commit comments

Comments
 (0)