Skip to content

Commit

Permalink
Weekly test current version_converter with all models from ONNX model…
Browse files Browse the repository at this point in the history
… Zoo (onnx#4040)

* test upgrade

Signed-off-by: Chun-Wei Chen <jacky82226@gmail.com>

* add config and version_converter test

Signed-off-by: Chun-Wei Chen <jacky82226@gmail.com>

* fix lint

Signed-off-by: Chun-Wei Chen <jacky82226@gmail.com>

* fix mypy

Signed-off-by: Chun-Wei Chen <jacky82226@gmail.com>

Signed-off-by: Chun-Wei Chen <jacky82226@gmail.com>
  • Loading branch information
jcwchen authored and Bjarke Roune committed May 6, 2023
1 parent 0e96343 commit c08a516
Show file tree
Hide file tree
Showing 2 changed files with 80 additions and 26 deletions.
37 changes: 21 additions & 16 deletions workflow_scripts/config.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,22 @@
# SPDX-License-Identifier: Apache-2.0
# Originally there are some checker failures in weekly-CI
# Skip them in test_model_zoo.py for now
# TODO: fix these checker failures
SKIP_CHECKER_MODELS = {
"vision/classification/alexnet/model/bvlcalexnet-3.onnx", # opset1 typeinference function missing
"vision/classification/caffenet/model/caffenet-3.onnx", # opset1 typeinference function missing
"vision/classification/densenet-121/model/densenet-3.onnx", # opset1 typeinference function missing
"vision/classification/inception_and_googlenet/inception_v1/model/inception-v1-3.onnx", # opset1 typeinference function missing
"vision/classification/inception_and_googlenet/inception_v2/model/inception-v2-3.onnx", # opset1 typeinference function missing
"vision/classification/rcnn_ilsvrc13/model/rcnn-ilsvrc13-3.onnx", # opset1 typeinference function missing
"vision/classification/resnet/model/resnet50-caffe2-v1-3.onnx", # opset1 typeinference function missing
"vision/classification/shufflenet/model/shufflenet-3.onnx", # opset1 typeinference function missing
"vision/classification/squeezenet/model/squeezenet1.0-3.onnx", # opset1 typeinference function missing
"vision/classification/vgg/model/vgg19-caffe2-3.onnx", # opset1 typeinference function missing
"vision/classification/zfnet-512/model/zfnet512-3.onnx",
} # opset1 typeinference function missing

# (1) TODO: Fix https://github.com/onnx/onnx/issues/4101
# to solve version conversion failure from Softmax-12 to Softmax-13
# version_converter/adapters/softmax_12_13.h:56: adapt_softmax_12_13:
# Assertion `target_shape.size() != 0` failed:
# Version conversion for Softmax failed because input shape is unknown.


SKIP_VERSION_CONVERTER_MODELS = {
"vision/classification/vgg/model/vgg19-bn-7.onnx", # version_converter/adapters/transformers.h:30: operator(): Assertion `node->i(attr) == value` failed: Attribute spatial must have value 1
"vision/classification/vgg/model/vgg16-bn-7.onnx", # version_converter/adapters/transformers.h:30: operator(): Assertion `node->i(attr) == value` failed: Attribute spatial must have value 1
"vision/object_detection_segmentation/ssd/model/ssd-12.onnx", # (1) Softmax 12 to 13 failure
"vision/object_detection_segmentation/mask-rcnn/model/MaskRCNN-12.onnx", # (1) Softmax 12 to 13 failure
"vision/object_detection_segmentation/mask-rcnn/model/MaskRCNN-12-int8.onnx", # unordered_map::at: key not found
"text/machine_comprehension/t5/model/t5-encoder-12.onnx", # (1) Softmax 12 to 13 failure
"text/machine_comprehension/t5/model/t5-decoder-with-lm-head-12.onnx", # (1) Softmax 12 to 13 failure
"text/machine_comprehension/gpt2-bs/model/gpt2-lm-head-bs-12.onnx", # (1) Softmax 12 to 13 failure
"text/machine_comprehension/bert-squad/model/bertsquad-12.onnx", # (1) Softmax 12 to 13 failure
"vision/classification/inception_and_googlenet/inception_v2/model/inception-v2-6.onnx", # the converted opset 7 model cannot pass shape inference:
# [ShapeInferenceError] (op_type:Mul, node name: ): [ShapeInferenceError] Inferred shape and existing shape differ in dimension 0: (64) vs (1)
}
69 changes: 59 additions & 10 deletions workflow_scripts/test_model_zoo.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@
import sys
import time
from pathlib import Path
from typing import List

import config

import onnx
from onnx import version_converter

cwd_path = Path.cwd()

Expand Down Expand Up @@ -45,6 +47,11 @@ def run_lfs_prune():
print(f"LFS prune completed with return code= {result.returncode}")


def skip_model(error_message: str, skip_list: List[str], model_name: str):
print(error_message)
skip_list.append(model_name)


def main():
parser = argparse.ArgumentParser(description="Test settings")
# default: test all models in the repo
Expand All @@ -58,7 +65,7 @@ def main():
)
args = parser.parse_args()
parent_dir = []
# if not set, go throught each directory
# if not set, go through each directory
if not args.test_dir:
for file in os.listdir():
if os.path.isdir(file):
Expand All @@ -80,27 +87,69 @@ def main():
# run checker on each model
failed_models = []
failed_messages = []
skip_models = []
skip_models: List[str] = []
for model_path in model_list:
start = time.time()
model_name = model_path.split("/")[-1]
# if the model_path exists in the skip list, simply skip it
if model_path.replace("\\", "/") in config.SKIP_CHECKER_MODELS:
print(f"Skip model: {model_path}")
skip_models.append(model_path)
continue
print(f"-----------------Testing: {model_name}-----------------")
try:
pull_lfs_file(model_path)
model = onnx.load(model_path)
# stricter onnx.checker with onnx.shape_inference
onnx.checker.check_model(model, True)
# 1) Test onnx checker and shape inference
if model.opset_import[0].version < 4:
# Ancient opset version does not have defined shape inference function
onnx.checker.check_model(model)
print(f"[PASS]: {model_name} is checked by onnx checker. ")
else:
# stricter onnx.checker with onnx.shape_inference
onnx.checker.check_model(model, True)
print(
f"[PASS]: {model_name} is checked by onnx checker with shape_inference. "
)

# 2) Test onnx version converter with upgrade functionality
original_version = model.opset_import[0].version
latest_opset_version = onnx.helper.VERSION_TABLE[-1][2]
if original_version < latest_opset_version:
if (
model_path.replace("\\", "/")
in config.SKIP_VERSION_CONVERTER_MODELS
):
skip_model(
f"[SKIP]: model {model_path} is in the skip list for version converter. ",
skip_models,
model_name,
)
elif model_path.endswith("-int8.onnx"):
skip_model(
f"[SKIP]: model {model_path} is a quantized model using non-official ONNX domain. ",
skip_models,
model_name,
)
else:
converted = version_converter.convert_version(
model, original_version + 1
)
onnx.checker.check_model(converted, True)
print(
f"[PASS]: {model_name} can be version converted by original_version+1. "
)
elif original_version == latest_opset_version:
skip_model(
f"[SKIP]: {model_name} is already the latest opset version. ",
skip_models,
model_name,
)
else:
raise RuntimeError(
f"{model_name} has unsupported opset_version {original_version}. "
)

# remove the model to save space in CIs
if os.path.exists(model_path):
os.remove(model_path)
# clean git lfs cache
run_lfs_prune()
print(f"[PASS]: {model_name} is checked by onnx. ")

except Exception as e:
print(f"[FAIL]: {e}")
Expand Down

0 comments on commit c08a516

Please sign in to comment.