Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 13 additions & 7 deletions olive/passes/onnx/transformer_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,13 +157,12 @@ def _run_for_config(

if model.model_attributes:
model_config = model.model_attributes
input_model_type = model_config.get("model_type", "")
_model_type = MODEL_TYPE_MAPPING.get(input_model_type, input_model_type)
run_config["model_type"] = run_config["model_type"] or _model_type
assert run_config["model_type"] in transformers_optimizer.MODEL_TYPES, (
f"Unsupported model type: {run_config['model_type']}, please select one from "
"{transformers_optimizer.MODEL_TYPES} which need to be set under OrtTransformersOptimization.config"
)
input_model_type = model_config.get("model_type")
Comment thread
guotuofeng marked this conversation as resolved.
if input_model_type:
model_type = MODEL_TYPE_MAPPING.get(input_model_type, input_model_type)
else:
model_type = None
run_config["model_type"] = run_config["model_type"] or model_type
if run_config["num_heads"] == 0:
for num_heads_name in NUM_HEADS_NAMES:
if num_heads_name in model_config:
Expand All @@ -175,6 +174,13 @@ def _run_for_config(
run_config["hidden_size"] = model_config[hidden_size_name]
break

if run_config["model_type"] is None or run_config["model_type"] not in transformers_optimizer.MODEL_TYPES:
Comment thread
guotuofeng marked this conversation as resolved.
raise ValueError(
f"Unsupported model type: {run_config['model_type']}, please select one from "
f"[{', '.join(transformers_optimizer.MODEL_TYPES.keys())}] which need to be set under "
"OrtTransformersOptimization.config"
)

output_model_path = ONNXModel.resolve_path(os.path.join(output_model_path, os.path.basename(model.model_path)))

optimization_options = config["optimization_options"]
Expand Down
13 changes: 13 additions & 0 deletions test/unit_test/passes/onnx/test_transformer_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,3 +77,16 @@ def test_invalid_ep_config(use_gpu, fp16, accelerator_spec, tmp_path):
if not is_pruned:
output_folder = str(tmp_path / "onnx")
p.run(input_model, None, output_folder)


def test_transformer_optimization_invalid_model_type(tmp_path):
input_model = get_onnx_model()
with pytest.raises(ValueError):
config = {"model_type": None}

config = OrtTransformersOptimization.generate_search_space(DEFAULT_CPU_ACCELERATOR, config, disable_search=True)
p = OrtTransformersOptimization(DEFAULT_CPU_ACCELERATOR, config, True)
output_folder = str(tmp_path / "onnx")

# execute
p.run(input_model, None, output_folder)