Skip to content

Commit

Permalink
Stop throwing exception on python binding when multiple EP available (#…
Browse files Browse the repository at this point in the history
…17659)

Stop throwing the exception when the provider list is empty but there
are multiple available EPs.
Other language bindings throw no exception at all, this change will
align them up.

---------

Co-authored-by: Randy Shuai <rashuai@microsoft.com>
  • Loading branch information
RandySheriffH and RandyShuai committed Sep 26, 2023
1 parent 7572e60 commit 1c245e6
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 33 deletions.
12 changes: 2 additions & 10 deletions onnxruntime/python/onnxruntime_inference_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ def _create_inference_session(self, providers, provider_options, disabled_optimi

# Tensorrt can fall back to CUDA if it's explicitly assigned. All others fall back to CPU.
if "TensorrtExecutionProvider" in available_providers:
if any(
if providers and any(
provider == "CUDAExecutionProvider"
or (isinstance(provider, tuple) and provider[0] == "CUDAExecutionProvider")
for provider in providers
Expand All @@ -448,7 +448,7 @@ def _create_inference_session(self, providers, provider_options, disabled_optimi
self._fallback_providers = ["CPUExecutionProvider"]
# MIGraphX can fall back to ROCM if it's explicitly assigned. All others fall back to CPU.
elif "MIGraphXExecutionProvider" in available_providers:
if any(
if providers and any(
provider == "ROCMExecutionProvider"
or (isinstance(provider, tuple) and provider[0] == "ROCMExecutionProvider")
for provider in providers
Expand All @@ -463,14 +463,6 @@ def _create_inference_session(self, providers, provider_options, disabled_optimi
providers, provider_options = check_and_normalize_provider_args(
providers, provider_options, available_providers
)
if not providers and len(available_providers) > 1:
self.disable_fallback()
raise ValueError(
f"This ORT build has {available_providers} enabled. "
"Since ORT 1.9, you are required to explicitly set "
"the providers parameter when instantiating InferenceSession. For example, "
f"onnxruntime.InferenceSession(..., providers={available_providers}, ...)"
)

session_options = self._sess_options if self._sess_options else C.get_default_session_options()
if self._model_path:
Expand Down
30 changes: 7 additions & 23 deletions onnxruntime/test/python/onnxruntime_test_python.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,11 +80,7 @@ def test_model_serialization(self):
so.log_severity_level = 1
so.logid = "TestModelSerialization"
so.optimized_model_filepath = "./PythonApiTestOptimizedModel.onnx"
onnxrt.InferenceSession(
get_name("mul_1.onnx"),
sess_options=so,
providers=["CPUExecutionProvider"],
)
onnxrt.InferenceSession(get_name("mul_1.onnx"), sess_options=so)
self.assertTrue(os.path.isfile(so.optimized_model_filepath))
os.remove(so.optimized_model_filepath)
except Fail as onnxruntime_error:
Expand All @@ -107,11 +103,7 @@ def test_model_serialization_with_external_initializers(self):
"session.optimized_model_external_initializers_file_name", external_initializers_file
)
so.add_session_config_entry("session.optimized_model_external_initializers_min_size_in_bytes", "100")
onnxrt.InferenceSession(
get_name("mnist.onnx"),
sess_options=so,
providers=["CPUExecutionProvider"],
)
onnxrt.InferenceSession(get_name("mnist.onnx"), sess_options=so)
self.assertTrue(os.path.isfile(so.optimized_model_filepath))
self.assertTrue(os.path.isfile(external_initializers_file))
os.remove(so.optimized_model_filepath)
Expand All @@ -137,7 +129,7 @@ def test_model_serialization_with_external_initializers_to_directory(self):
"session.optimized_model_external_initializers_file_name", external_initializers_file
)
so.add_session_config_entry("session.optimized_model_external_initializers_min_size_in_bytes", "100")
onnxrt.InferenceSession(get_name("mnist.onnx"), sess_options=so, providers=["CPUExecutionProvider"])
onnxrt.InferenceSession(get_name("mnist.onnx"), sess_options=so)
self.assertTrue(os.path.isfile(so.optimized_model_filepath))
self.assertTrue(os.path.isfile(os.path.join(directory, external_initializers_file)))
os.remove(so.optimized_model_filepath)
Expand All @@ -163,9 +155,7 @@ def test_model_serialization_with_original_external_initializers_to_directory(se
"session.optimized_model_external_initializers_file_name", external_initializers_file
)
so.add_session_config_entry("session.optimized_model_external_initializers_min_size_in_bytes", "100")
onnxrt.InferenceSession(
get_name("model_with_orig_ext_data.onnx"), sess_options=so, providers=["CPUExecutionProvider"]
)
onnxrt.InferenceSession(get_name("model_with_orig_ext_data.onnx"), sess_options=so)
self.assertTrue(os.path.isfile(so.optimized_model_filepath))
self.assertTrue(os.path.isfile(os.path.join(directory, external_initializers_file)))
os.remove(so.optimized_model_filepath)
Expand Down Expand Up @@ -198,9 +188,7 @@ def test_model_serialization_with_original_external_initializers_to_current_dire
# still refers to the original external data file. We shall fix this issue so that the
# optimized model only refers to one external data file.
so.add_session_config_entry("session.optimized_model_external_initializers_min_size_in_bytes", "10")
session1 = onnxrt.InferenceSession(
get_name("model_with_orig_ext_data.onnx"), sess_options=so, providers=["CPUExecutionProvider"]
)
session1 = onnxrt.InferenceSession(get_name("model_with_orig_ext_data.onnx"), sess_options=so)
del session1
self.assertTrue(os.path.isfile(optimized_model_filepath))
self.assertTrue(os.path.isfile(external_initializers_file))
Expand All @@ -216,9 +204,7 @@ def test_model_serialization_with_original_external_initializers_to_current_dire

# verify that we can load the optimized model with external data in current directory and save
# optimized model with external data to current directory.
session2 = onnxrt.InferenceSession(
optimized_model_filepath, sess_options=so2, providers=["CPUExecutionProvider"]
)
session2 = onnxrt.InferenceSession(optimized_model_filepath, sess_options=so2)
del session2
self.assertTrue(os.path.isfile(optimized_model_filepath_2))
self.assertTrue(os.path.isfile(external_initializers_file_2))
Expand All @@ -227,9 +213,7 @@ def test_model_serialization_with_original_external_initializers_to_current_dire
os.remove(optimized_model_filepath)
os.remove(external_initializers_file)

session3 = onnxrt.InferenceSession(
optimized_model_filepath_2, sess_options=onnxrt.SessionOptions(), providers=["CPUExecutionProvider"]
)
session3 = onnxrt.InferenceSession(optimized_model_filepath_2, sess_options=onnxrt.SessionOptions())
del session3

os.remove(optimized_model_filepath_2)
Expand Down

0 comments on commit 1c245e6

Please sign in to comment.