diff --git a/src/sparseml/pytorch/utils/exporter.py b/src/sparseml/pytorch/utils/exporter.py index 4d7e44b51ac..d54875a98ea 100644 --- a/src/sparseml/pytorch/utils/exporter.py +++ b/src/sparseml/pytorch/utils/exporter.py @@ -460,7 +460,7 @@ def export_onnx( https://pytorch.org/docs/stable/onnx.html """ if _PARSED_TORCH_VERSION >= version.parse("1.10.0") and opset < 13 and convert_qat: - warnings.warn( + raise ValueError( "Exporting onnx with QAT and opset < 13 may result in errors. " "Please use opset>=13 with QAT. " "See https://github.com/pytorch/pytorch/issues/77455 for more info. " diff --git a/src/sparseml/transformers/export.py b/src/sparseml/transformers/export.py index 02a8bb69c61..0ef3f82c7d8 100644 --- a/src/sparseml/transformers/export.py +++ b/src/sparseml/transformers/export.py @@ -336,8 +336,16 @@ def export_transformer_to_onnx( recipe_args=None, teacher=None, ) - - applied = trainer.apply_manager(epoch=math.inf, checkpoint=None) + try: + applied = trainer.apply_manager(epoch=math.inf, checkpoint=None) + except ValueError as e: + raise ValueError( + f"Failed to apply the recipe to the " + f"model with the exception message:\n{e}\n" + "It is possible, that there are missing modules " + "specific to the model, that were not properly loaded. " + "A possible solution would be setting the --trust_remote_code flag" + ) if not applied: _LOGGER.warning(