From 9457df6f8ec0d414ff56d2294c9ba9b220e4e667 Mon Sep 17 00:00:00 2001 From: Matt Watson Date: Mon, 9 May 2022 13:50:24 -0700 Subject: [PATCH] Add a call to repeat during pretraining We would like our run to continue looping without exhausting the end of our data. --- examples/bert/run_pretraining.py | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/bert/run_pretraining.py b/examples/bert/run_pretraining.py index 5bfaf723cf..f9c95a1290 100644 --- a/examples/bert/run_pretraining.py +++ b/examples/bert/run_pretraining.py @@ -382,6 +382,7 @@ def main(_): num_parallel_calls=tf.data.experimental.AUTOTUNE, ) dataset = dataset.batch(FLAGS.batch_size, drop_remainder=True) + dataset = dataset.repeat() # Create a BERT model the input config. model = BertModel(