From 947d03c5238c68f824212a550adcf315391ffc0b Mon Sep 17 00:00:00 2001 From: Eldar Kurtic Date: Thu, 6 May 2021 14:53:53 +0200 Subject: [PATCH] Fix unused user-specified optimizer args --- integrations/transformers/run_distill_qa.py | 2 +- integrations/transformers/run_qa.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/integrations/transformers/run_distill_qa.py b/integrations/transformers/run_distill_qa.py index 4c9fc69e359..4474ee9e60f 100644 --- a/integrations/transformers/run_distill_qa.py +++ b/integrations/transformers/run_distill_qa.py @@ -746,7 +746,7 @@ def prepare_validation_features(examples): # Start SparseML Integration #################################################################################### if training_args.do_train: - optim = load_optimizer(student_model, TrainingArguments) + optim = load_optimizer(student_model, training_args) steps_per_epoch = math.ceil(len(train_dataset) / (training_args.per_device_train_batch_size * training_args._n_gpu)) manager = ScheduledModifierManager.from_yaml(data_args.nm_prune_config) training_args.num_train_epochs = float(manager.modifiers[0].end_epoch) diff --git a/integrations/transformers/run_qa.py b/integrations/transformers/run_qa.py index a9ad798ac89..2621c65ab62 100644 --- a/integrations/transformers/run_qa.py +++ b/integrations/transformers/run_qa.py @@ -716,7 +716,7 @@ def prepare_validation_features(examples): # Start SparseML Integration #################################################################################### if training_args.do_train: - optim = load_optimizer(model, TrainingArguments) + optim = load_optimizer(model, training_args) steps_per_epoch = math.ceil(len(train_dataset) / (training_args.per_device_train_batch_size * training_args._n_gpu)) manager = ScheduledModifierManager.from_yaml(data_args.nm_prune_config) training_args.num_train_epochs = float(manager.max_epochs)