From 07fb9c6e67682cd81403c9e8e653002ec1a635af Mon Sep 17 00:00:00 2001 From: Vaibhav Singh Date: Tue, 17 Aug 2021 07:02:41 -0700 Subject: [PATCH] Set batch size to 4 to prevent OOM due dynamic batch sizing (#3781) Summary: ## What does this PR do? Fixes OOM which happens from TPUs due to dynamic batching exceed the max a single core can work with. Pull Request resolved: https://github.com/pytorch/fairseq/pull/3781 Reviewed By: wnhsu Differential Revision: D30327091 Pulled By: alexeib fbshipit-source-id: 0ebe6b18329fa05d359083fa8ac54aba7b48bc53 --- examples/wav2vec/config/pretraining/wav2vec2_large_librivox.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/wav2vec/config/pretraining/wav2vec2_large_librivox.yaml b/examples/wav2vec/config/pretraining/wav2vec2_large_librivox.yaml index bee41157a9..3192ce4cba 100644 --- a/examples/wav2vec/config/pretraining/wav2vec2_large_librivox.yaml +++ b/examples/wav2vec/config/pretraining/wav2vec2_large_librivox.yaml @@ -18,6 +18,7 @@ task: normalize: true dataset: + batch_size: 4 num_workers: 6 max_tokens: 1200000 skip_invalid_size_inputs_valid_test: true