From 503f53f9b877a8f6c73d92adb72b4e4d8064670a Mon Sep 17 00:00:00 2001 From: Kai Christensen <80430764+mkaic@users.noreply.github.com> Date: Mon, 17 Oct 2022 14:35:45 -0700 Subject: [PATCH] Honor the --skip_scheduler flag `src/open_clip/params.py` currently contains a definition for a `--skip_scheduler` flag that is not used anywhere in the OpenCLIP codebase. This is a one-line patch to fix this and make `--skip_scheduler` actually disable the learning rate scheduler in training. --- src/training/train.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/training/train.py b/src/training/train.py index 028fa31d6..c89d7f2a3 100644 --- a/src/training/train.py +++ b/src/training/train.py @@ -68,7 +68,9 @@ def train_one_epoch(model, data, epoch, optimizer, scaler, scheduler, args, tb_w end = time.time() for i, batch in enumerate(dataloader): step = num_batches_per_epoch * epoch + i - scheduler(step) + + if not args.skip_scheduler: + scheduler(step) images, texts = batch images = images.to(device=device, non_blocking=True)