We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
When running openllm start opt for the first time, the process fails after downloading ..._config.json. I'm on a Macbook Pro M2. Here's the output:
openllm start opt
..._config.json
(openllm) ➜ OpenLLM git:(main) openllm start opt ╭─────────────────────────────── Traceback (most recent call last) ────────────────────────────────╮ │ /opt/homebrew/lib/python3.11/site-packages/openllm/cli.py:1248 in download_models │ │ │ │ 1245 │ ).for_model(model_name, model_id=model_id, llm_config=config) │ │ 1246 │ │ │ 1247 │ try: │ │ ❱ 1248 │ │ _ref = bentoml.transformers.get(model.tag) │ │ 1249 │ │ if output == "pretty": │ │ 1250 │ │ │ _echo(f"{model_name} is already setup for framework '{envvar}': {str(_ref.ta │ │ 1251 │ │ elif output == "json": │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/bentoml/_internal/frameworks/transformers.py:292 in │ │ get │ │ │ │ 289 │ # target model must be from the BentoML model store │ │ 290 │ model = bentoml.transformers.get("my_pipeline:latest") │ │ 291 │ """ │ │ ❱ 292 │ model = bentoml.models.get(tag_like) │ │ 293 │ if model.info.module not in (MODULE_NAME, __name__): │ │ 294 │ │ raise NotFound( │ │ 295 │ │ │ f"Model {model.tag} was saved with module {model.info.module}, not loading w │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/simple_di/__init__.py:139 in _ │ │ │ │ 136 │ │ bind = sig.bind_partial(*filtered_args, **filtered_kwargs) │ │ 137 │ │ bind.apply_defaults() │ │ 138 │ │ │ │ ❱ 139 │ │ return func(*_inject_args(bind.args), **_inject_kwargs(bind.kwargs)) │ │ 140 │ │ │ 141 │ setattr(_, "_is_injected", True) │ │ 142 │ return cast(WrappedCallable, _) │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/bentoml/models.py:42 in get │ │ │ │ 39 │ *, │ │ 40 │ _model_store: "ModelStore" = Provide[BentoMLContainer.model_store], │ │ 41 ) -> "Model": │ │ ❱ 42 │ return _model_store.get(tag) │ │ 43 │ │ 44 │ │ 45 @inject │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/bentoml/_internal/store.py:146 in get │ │ │ │ 143 │ │ matches = self._fs.glob(f"{path}*/") │ │ 144 │ │ counts = matches.count().directories │ │ 145 │ │ if counts == 0: │ │ ❱ 146 │ │ │ raise NotFound( │ │ 147 │ │ │ │ f"{self._item_type.get_typename()} '{tag}' is not found in BentoML store │ │ 148 │ │ │ ) │ │ 149 │ │ elif counts == 1: │ ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ NotFound: Model 'pt-facebook-opt-1-3b:8c7b10754972749675d22364c25c428b29face51' is not found in BentoML store <osfs '/Users/matthewberman/bentoml/models'> During handling of the above exception, another exception occurred: ╭─────────────────────────────── Traceback (most recent call last) ────────────────────────────────╮ │ /opt/homebrew/lib/python3.11/site-packages/transformers/utils/import_utils.py:1172 in │ │ _get_module │ │ │ │ 1169 │ │ │ 1170 │ def _get_module(self, module_name: str): │ │ 1171 │ │ try: │ │ ❱ 1172 │ │ │ return importlib.import_module("." + module_name, self.__name__) │ │ 1173 │ │ except Exception as e: │ │ 1174 │ │ │ raise RuntimeError( │ │ 1175 │ │ │ │ f"Failed to import {self.__name__}.{module_name} because of the followin │ │ │ │ /opt/homebrew/Cellar/python@3.11/3.11.3/Frameworks/Python.framework/Versions/3.11/lib/python3.11 │ │ /importlib/__init__.py:126 in import_module │ │ │ │ 123 │ │ │ if character != '.': │ │ 124 │ │ │ │ break │ │ 125 │ │ │ level += 1 │ │ ❱ 126 │ return _bootstrap._gcd_import(name[level:], package, level) │ │ 127 │ │ 128 │ │ 129 _RELOADING = {} │ │ in _gcd_import:1206 │ │ in _find_and_load:1178 │ │ in _find_and_load_unlocked:1149 │ │ in _load_unlocked:690 │ │ in exec_module:940 │ │ in _call_with_frames_removed:241 │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/transformers/modeling_tf_utils.py:70 in <module> │ │ │ │ 67 │ │ 68 if parse(tf.__version__) >= parse("2.11.0"): │ │ 69 │ from keras import backend as K │ │ ❱ 70 │ from keras.engine import data_adapter │ │ 71 │ from keras.engine.keras_tensor import KerasTensor │ │ 72 │ from keras.saving.legacy import hdf5_format │ │ 73 else: │ ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ ModuleNotFoundError: No module named 'keras.engine' The above exception was the direct cause of the following exception: ╭─────────────────────────────── Traceback (most recent call last) ────────────────────────────────╮ │ in _run_module_as_main:198 │ │ in _run_code:88 │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/openllm/__main__.py:26 in <module> │ │ │ │ 23 if __name__ == "__main__": │ │ 24 │ from openllm.cli import cli │ │ 25 │ │ │ ❱ 26 │ cli() │ │ 27 │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/click/core.py:1130 in __call__ │ │ │ │ 1127 │ │ │ 1128 │ def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: │ │ 1129 │ │ """Alias for :meth:`main`.""" │ │ ❱ 1130 │ │ return self.main(*args, **kwargs) │ │ 1131 │ │ 1132 │ │ 1133 class Command(BaseCommand): │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/click/core.py:1055 in main │ │ │ │ 1052 │ │ try: │ │ 1053 │ │ │ try: │ │ 1054 │ │ │ │ with self.make_context(prog_name, args, **extra) as ctx: │ │ ❱ 1055 │ │ │ │ │ rv = self.invoke(ctx) │ │ 1056 │ │ │ │ │ if not standalone_mode: │ │ 1057 │ │ │ │ │ │ return rv │ │ 1058 │ │ │ │ │ # it's not safe to `ctx.exit(rv)` here! │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/click/core.py:1657 in invoke │ │ │ │ 1654 │ │ │ │ super().invoke(ctx) │ │ 1655 │ │ │ │ sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) │ │ 1656 │ │ │ │ with sub_ctx: │ │ ❱ 1657 │ │ │ │ │ return _process_result(sub_ctx.command.invoke(sub_ctx)) │ │ 1658 │ │ │ │ 1659 │ │ # In chain mode we create the contexts step by step, but after the │ │ 1660 │ │ # base command has been invoked. Because at that point we do not │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/click/core.py:1404 in invoke │ │ │ │ 1401 │ │ │ echo(style(message, fg="red"), err=True) │ │ 1402 │ │ │ │ 1403 │ │ if self.callback is not None: │ │ ❱ 1404 │ │ │ return ctx.invoke(self.callback, **ctx.params) │ │ 1405 │ │ │ 1406 │ def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: │ │ 1407 │ │ """Return a list of completions for the incomplete value. Looks │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/click/core.py:760 in invoke │ │ │ │ 757 │ │ │ │ 758 │ │ with augment_usage_errors(__self): │ │ 759 │ │ │ with ctx: │ │ ❱ 760 │ │ │ │ return __callback(*args, **kwargs) │ │ 761 │ │ │ 762 │ def forward( │ │ 763 │ │ __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/openllm/cli.py:342 in wrapper │ │ │ │ 339 │ │ @functools.wraps(func) │ │ 340 │ │ def wrapper(*args: P.args, **attrs: P.kwargs) -> t.Any: │ │ 341 │ │ │ try: │ │ ❱ 342 │ │ │ │ return func(*args, **attrs) │ │ 343 │ │ │ except OpenLLMException as err: │ │ 344 │ │ │ │ raise click.ClickException( │ │ 345 │ │ │ │ │ click.style(f"[{group.name}] '{command_name}' failed: " + err.messag │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/openllm/cli.py:315 in wrapper │ │ │ │ 312 │ │ │ │ assert group.name is not None, "group.name should not be None" │ │ 313 │ │ │ │ event = analytics.OpenllmCliEvent(cmd_group=group.name, cmd_name=command │ │ 314 │ │ │ │ try: │ │ ❱ 315 │ │ │ │ │ return_value = func(*args, **attrs) │ │ 316 │ │ │ │ │ duration_in_ms = (time.time_ns() - start_time) / 1e6 │ │ 317 │ │ │ │ │ event.duration_in_ms = duration_in_ms │ │ 318 │ │ │ │ │ analytics.track(event) │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/openllm/cli.py:290 in wrapper │ │ │ │ 287 │ │ │ │ │ 288 │ │ │ configure_logging() │ │ 289 │ │ │ │ │ ❱ 290 │ │ │ return f(*args, **attrs) │ │ 291 │ │ │ │ 292 │ │ return t.cast("ClickFunctionWrapper[..., t.Any]", wrapper) │ │ 293 │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/openllm/cli.py:1276 in download_models │ │ │ │ 1273 │ │ │ ) │ │ 1274 │ │ │ │ 1275 │ │ (model_args, model_attrs), tokenizer_attrs = model.llm_parameters │ │ ❱ 1276 │ │ _ref = model.import_model( │ │ 1277 │ │ │ model.model_id, │ │ 1278 │ │ │ model.tag, │ │ 1279 │ │ │ *model_args, │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/openllm/models/opt/modeling_opt.py:74 in import_model │ │ │ │ 71 │ │ model: transformers.OPTForCausalLM = transformers.AutoModelForCausalLM.from_pret │ │ 72 │ │ │ model_id, torch_dtype=torch_dtype, trust_remote_code=trust_remote_code, **at │ │ 73 │ │ ) │ │ ❱ 74 │ │ return bentoml.transformers.save_model(tag, model, custom_objects={"tokenizer": │ │ 75 │ │ │ 76 │ def load_model(self, tag: bentoml.Tag, *args: t.Any, **attrs: t.Any) -> transformers │ │ 77 │ │ torch_dtype = attrs.pop("torch_dtype", self.dtype) │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/bentoml/_internal/frameworks/transformers.py:829 in │ │ save_model │ │ │ │ 826 │ │ │ pretrained, │ │ 827 │ │ │ ( │ │ 828 │ │ │ │ transformers.PreTrainedModel, │ │ ❱ 829 │ │ │ │ transformers.TFPreTrainedModel, │ │ 830 │ │ │ │ transformers.FlaxPreTrainedModel, │ │ 831 │ │ │ ), │ │ 832 │ │ ): │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/transformers/utils/import_utils.py:1162 in │ │ __getattr__ │ │ │ │ 1159 │ │ if name in self._modules: │ │ 1160 │ │ │ value = self._get_module(name) │ │ 1161 │ │ elif name in self._class_to_module.keys(): │ │ ❱ 1162 │ │ │ module = self._get_module(self._class_to_module[name]) │ │ 1163 │ │ │ value = getattr(module, name) │ │ 1164 │ │ else: │ │ 1165 │ │ │ raise AttributeError(f"module {self.__name__} has no attribute {name}") │ │ │ │ /opt/homebrew/lib/python3.11/site-packages/transformers/utils/import_utils.py:1174 in │ │ _get_module │ │ │ │ 1171 │ │ try: │ │ 1172 │ │ │ return importlib.import_module("." + module_name, self.__name__) │ │ 1173 │ │ except Exception as e: │ │ ❱ 1174 │ │ │ raise RuntimeError( │ │ 1175 │ │ │ │ f"Failed to import {self.__name__}.{module_name} because of the followin │ │ 1176 │ │ │ │ f" traceback):\n{e}" │ │ 1177 │ │ │ ) from e │ ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ RuntimeError: Failed to import transformers.modeling_tf_utils because of the following error (look up to see its traceback): No module named 'keras.engine' Traceback (most recent call last): File "/opt/homebrew/bin/openllm", line 8, in <module> sys.exit(cli()) ^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/click/core.py", line 1130, in __call__ return self.main(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/click/core.py", line 1055, in main rv = self.invoke(ctx) ^^^^^^^^^^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/click/core.py", line 1657, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/click/core.py", line 1657, in invoke return _process_result(sub_ctx.command.invoke(sub_ctx)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/click/core.py", line 1404, in invoke return ctx.invoke(self.callback, **ctx.params) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/click/core.py", line 760, in invoke return __callback(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/openllm/cli.py", line 342, in wrapper return func(*args, **attrs) ^^^^^^^^^^^^^^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/openllm/cli.py", line 315, in wrapper return_value = func(*args, **attrs) ^^^^^^^^^^^^^^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/openllm/cli.py", line 290, in wrapper return f(*args, **attrs) ^^^^^^^^^^^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/click/decorators.py", line 26, in new_func return f(get_current_context(), *args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/openllm/cli.py", line 701, in model_start llm = t.cast( ^^^^^^^ File "/opt/homebrew/lib/python3.11/site-packages/openllm/models/auto/factory.py", line 127, in for_model llm.ensure_model_id_exists() File "/opt/homebrew/lib/python3.11/site-packages/openllm/_llm.py", line 688, in ensure_model_id_exists output = subprocess.check_output( ^^^^^^^^^^^^^^^^^^^^^^^^ File "/opt/homebrew/Cellar/python@3.11/3.11.3/Frameworks/Python.framework/Versions/3.11/lib/python3.11/subprocess.py", line 466, in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/opt/homebrew/Cellar/python@3.11/3.11.3/Frameworks/Python.framework/Versions/3.11/lib/python3.11/subprocess.py", line 571, in run raise CalledProcessError(retcode, process.args, subprocess.CalledProcessError: Command '['/opt/homebrew/opt/python@3.11/bin/python3.11', '-m', 'openllm', 'download', 'opt', '--model-id', 'facebook/opt-1.3b', '--output', 'porcelain']' returned non-zero exit status 1.
No response
Macbook Pro M2 Conda Python 3.11.3
The text was updated successfully, but these errors were encountered:
can you send pip list | grep tensorflow?
pip list | grep tensorflow
Sorry, something went wrong.
(openllm) ➜ OpenLLM git:(main) pip list | grep tensorflow tensorflow 2.13.0rc1 tensorflow-estimator 2.13.0rc0 tensorflow-macos 2.13.0rc1
hmm do you have keras installed?
No branches or pull requests
Describe the bug
When running
openllm start opt
for the first time, the process fails after downloading..._config.json
. I'm on a Macbook Pro M2. Here's the output:To reproduce
openllm start opt
Logs
No response
Environment
Macbook Pro M2
Conda
Python 3.11.3
The text was updated successfully, but these errors were encountered: