diff --git a/src/transformers/sparse.py b/src/transformers/sparse.py index 60f120e44cbb..a2a6c2497d87 100644 --- a/src/transformers/sparse.py +++ b/src/transformers/sparse.py @@ -111,17 +111,15 @@ def create_scheduler(self, num_training_steps: int): # scheduler already set return - if self.manager.learning_rate_modifiers: + if self.manager is not None and self.manager.learning_rate_modifiers: # allow SparseML to manage LR and set a dummy scheduler - self.lr_scheduler = torch.optim.lr_scheduler.LambdaLR( - self.optimizer, lambda _: 1.0, -1 - ) + self.lr_scheduler = torch.optim.lr_scheduler.LambdaLR(self.optimizer, lambda _: 1.0, -1) else: # default scheduler super().create_scheduler(num_training_steps) def qat_active(self, epoch: int): - if not self.manager.quantization_modifiers: + if self.manager is None or not self.manager.quantization_modifiers: return False qat_start = min([mod.start_epoch for mod in self.manager.quantization_modifiers])