Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ONNX Runtime error for onnxruntime>=1.16.0 #3

Open
mnicstruwig opened this issue Oct 2, 2023 · 1 comment
Open

ONNX Runtime error for onnxruntime>=1.16.0 #3

mnicstruwig opened this issue Oct 2, 2023 · 1 comment

Comments

@mnicstruwig
Copy link

mnicstruwig commented Oct 2, 2023

Hi there,

For onnxruntime>=1.16.0, executing the example code yields the following error.

This is related to the following issue.

As I see it, there are two possible workarounds:

  1. Pin onnxruntime<1.16.0 for the time being (for anyone else coming across this issue, this is a quick workaround)
  2. Explicitly pass in the CPUExecutionProvider when creating the inference session. (probably a better permanent solution)

I can also open a PR to do this, if this is something you'd prefer.

Thanks!

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
Cell In[1], line 6
      4 nlp = spacy.load("en_core_web_sm")
      5 nlp.add_pipe("sentencizer")
----> 6 nlp.add_pipe("sentimental_onix", after="sentencizer")
      8 sentences = [
      9     (sent.text, sent._.sentiment)
     10     for doc in nlp.pipe(
   (...)
     18     for sent in doc.sents
     19 ]
     21 assert sentences == [
     22     ("i hate pasta on tuesdays", "Negative"),
     23     ("i like movies on wednesdays", "Positive"),
     24     ("i find your argument ridiculous", "Negative"),
     25     ("soda with straws are my favorite", "Positive"),
     26 ]

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/spacy/language.py:821, in Language.add_pipe(self, factory_name, name, before, after, first, last, source, config, raw_config, validate)
    817     pipe_component, factory_name = self.create_pipe_from_source(
    818         factory_name, source, name=name
    819     )
    820 else:
--> 821     pipe_component = self.create_pipe(
    822         factory_name,
    823         name=name,
    824         config=config,
    825         raw_config=raw_config,
    826         validate=validate,
    827     )
    828 pipe_index = self._get_pipe_index(before, after, first, last)
    829 self._pipe_meta[name] = self.get_factory_meta(factory_name)

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/spacy/language.py:709, in Language.create_pipe(self, factory_name, name, config, raw_config, validate)
    706 cfg = {factory_name: config}
    707 # We're calling the internal _fill here to avoid constructing the
    708 # registered functions twice
--> 709 resolved = registry.resolve(cfg, validate=validate)
    710 filled = registry.fill({"cfg": cfg[factory_name]}, validate=validate)["cfg"]
    711 filled = Config(filled)

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/confection/__init__.py:756, in registry.resolve(cls, config, schema, overrides, validate)
    747 @classmethod
    748 def resolve(
    749     cls,
   (...)
    754     validate: bool = True,
    755 ) -> Dict[str, Any]:
--> 756     resolved, _ = cls._make(
    757         config, schema=schema, overrides=overrides, validate=validate, resolve=True
    758     )
    759     return resolved

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/confection/__init__.py:805, in registry._make(cls, config, schema, overrides, resolve, validate)
    803 if not is_interpolated:
    804     config = Config(orig_config).interpolate()
--> 805 filled, _, resolved = cls._fill(
    806     config, schema, validate=validate, overrides=overrides, resolve=resolve
    807 )
    808 filled = Config(filled, section_order=section_order)
    809 # Check that overrides didn't include invalid properties not in config

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/confection/__init__.py:877, in registry._fill(cls, config, schema, validate, resolve, parent, overrides)
    874     getter = cls.get(reg_name, func_name)
    875     # We don't want to try/except this and raise our own error
    876     # here, because we want the traceback if the function fails.
--> 877     getter_result = getter(*args, **kwargs)
    878 else:
    879     # We're not resolving and calling the function, so replace
    880     # the getter_result with a Promise class
    881     getter_result = Promise(
    882         registry=reg_name, name=func_name, args=args, kwargs=kwargs
    883     )

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/sentimental_onix/pipeline.py:16, in __sentimental_onix(nlp, name, lang, threshold)
      9 @Language.factory(
     10     "sentimental_onix",
     11     assigns=["span._.sentiment"],
   (...)
     14 )
     15 def __sentimental_onix(nlp, name: str, lang: str, threshold):
---> 16     return SentimentalOnix(nlp, name, lang, threshold)

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/sentimental_onix/pipeline.py:31, in SentimentalOnix.__init__(self, nlp, name, lang, threshold)
     28 self.threshold = threshold
     30 if lang == "en":
---> 31     self.infer = sentimental_onix.inference.en.create_infererence_function(
     32         threshold=threshold
     33     )
     34 else:
     35     raise NotImplementedError(
     36         f"sentimental_onix has no support for language: {lang}"
     37     )

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/sentimental_onix/inference/en/__init__.py:24, in create_infererence_function(threshold, **kwargs)
     20     tokenizer = util.tokenizer_from_json(handle.read())
     22 onnx_model = onnx.load(_onnx_model_path)
---> 24 onnx_session = InferenceSession(onnx_model.SerializeToString())
     26 def infer(texts):
     27     tokenized = tokenizer.texts_to_sequences(texts)

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py:432, in InferenceSession.__init__(self, path_or_bytes, sess_options, providers, provider_options, **kwargs)
    430         raise fallback_error from e
    431 # Fallback is disabled. Raise the original error.
--> 432 raise e

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py:419, in InferenceSession.__init__(self, path_or_bytes, sess_options, providers, provider_options, **kwargs)
    416 disabled_optimizers = kwargs["disabled_optimizers"] if "disabled_optimizers" in kwargs else None
    418 try:
--> 419     self._create_inference_session(providers, provider_options, disabled_optimizers)
    420 except (ValueError, RuntimeError) as e:
    421     if self._enable_fallback:

File ~/mambaforge/envs/ds/lib/python3.10/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py:451, in InferenceSession._create_inference_session(self, providers, provider_options, disabled_optimizers)
    449 if not providers and len(available_providers) > 1:
    450     self.disable_fallback()
--> 451     raise ValueError(
    452         f"This ORT build has {available_providers} enabled. "
    453         "Since ORT 1.9, you are required to explicitly set "
    454         "the providers parameter when instantiating InferenceSession. For example, "
    455         f"onnxruntime.InferenceSession(..., providers={available_providers}, ...)"
    456     )
    458 session_options = self._sess_options if self._sess_options else C.get_default_session_options()
    459 if self._model_path:

ValueError: This ORT build has ['AzureExecutionProvider', 'CPUExecutionProvider'] enabled. Since ORT 1.9, you are required to explicitly set the providers parameter when instantiating InferenceSession. For example, onnxruntime.InferenceSession(..., providers=['AzureExecutionProvider', 'CPUExecutionProvider'], ...)
@sloev
Copy link
Owner

sloev commented Oct 9, 2023

hi @mnicstruwig if you are up for creating a PR i will happily review urgently and if ok push and release on pypi

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants