Skip to content

Commit

Permalink
refactor: asign runner name (#2139)
Browse files Browse the repository at this point in the history
* refactor: runner name

* fix

* fix for transformers
  • Loading branch information
bojiang committed Dec 24, 2021
1 parent 30edb7b commit 5ab95a6
Show file tree
Hide file tree
Showing 25 changed files with 262 additions and 125 deletions.
16 changes: 11 additions & 5 deletions bentoml/_internal/frameworks/catboost.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@


def _get_model_info(
tag: t.Union[str, Tag],
tag: Tag,
model_params: t.Optional[t.Dict[str, t.Union[str, int]]],
model_store: "ModelStore",
) -> t.Tuple["Model", str, t.Dict[str, t.Any]]:
Expand Down Expand Up @@ -235,17 +235,18 @@ class _CatBoostRunner(Runner):
@inject
def __init__(
self,
tag: t.Union[str, Tag],
tag: Tag,
predict_fn_name: str,
model_params: t.Optional[t.Dict[str, t.Union[str, int]]],
name: str,
resource_quota: t.Optional[t.Dict[str, t.Any]],
batch_options: t.Optional[t.Dict[str, t.Any]],
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
):
model_info, model_file, _model_params = _get_model_info(
tag, model_params, model_store
)
super().__init__(model_info.tag.name, resource_quota, batch_options)
super().__init__(name, resource_quota, batch_options)
self._model_info = model_info
self._model_file = model_file
self._predict_fn_name = predict_fn_name
Expand Down Expand Up @@ -283,9 +284,10 @@ def load_runner(
predict_fn_name: str = "predict",
*,
model_params: t.Union[None, t.Dict[str, t.Union[str, int]]] = None,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
name: t.Optional[str] = None,
resource_quota: t.Union[None, t.Dict[str, t.Any]] = None,
batch_options: t.Union[None, t.Dict[str, t.Any]] = None,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
) -> "_CatBoostRunner":
"""
Runner represents a unit of serving logic that can be scaled horizontally to
Expand Down Expand Up @@ -320,11 +322,15 @@ def load_runner(
runner = bentoml.catboost.load_runner("my_model:latest"")
runner.run(cbt.Pool(input_data))
""" # noqa
tag = Tag.from_taglike(tag)
if name is None:
name = tag.name
return _CatBoostRunner(
tag=tag,
predict_fn_name=predict_fn_name,
model_params=model_params,
model_store=model_store,
name=name,
resource_quota=resource_quota,
batch_options=batch_options,
model_store=model_store,
)
16 changes: 11 additions & 5 deletions bentoml/_internal/frameworks/detectron.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@

@inject
def load(
tag: t.Union[str, Tag],
tag: Tag,
device: str = "cpu",
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
) -> "torch.nn.Module":
Expand Down Expand Up @@ -201,19 +201,20 @@ class _DetectronRunner(Runner):
# TODO add partial_kwargs @larme
def __init__(
self,
tag: t.Union[str, Tag],
tag: Tag,
predict_fn_name: str,
name: str,
resource_quota: t.Optional[t.Dict[str, t.Any]],
batch_options: t.Optional[t.Dict[str, t.Any]],
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
):
super().__init__(tag, resource_quota, batch_options)
super().__init__(name, resource_quota, batch_options)
self._tag = tag
self._predict_fn_name = predict_fn_name
self._model_store = model_store

@property
def required_models(self) -> t.List[str]:
def required_models(self) -> t.List[Tag]:
return [self._tag]

@property
Expand Down Expand Up @@ -263,6 +264,7 @@ def load_runner(
tag: t.Union[str, Tag],
predict_fn_name: str = "__call__",
*,
name: t.Optional[str] = None,
resource_quota: t.Union[None, t.Dict[str, t.Any]] = None,
batch_options: t.Union[None, t.Dict[str, t.Any]] = None,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
Expand Down Expand Up @@ -290,10 +292,14 @@ def load_runner(
Examples:
TODO
""" # noqa
tag = Tag.from_taglike(tag)
if name is None:
name = tag.name
return _DetectronRunner(
tag=tag,
predict_fn_name=predict_fn_name,
model_store=model_store,
name=name,
resource_quota=resource_quota,
batch_options=batch_options,
model_store=model_store,
)
14 changes: 10 additions & 4 deletions bentoml/_internal/frameworks/easyocr.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,15 +182,16 @@ class _EasyOCRRunner(Runner):
@inject
def __init__(
self,
tag: t.Union[str, Tag],
tag: Tag,
predict_fn_name: str,
name: str,
predict_params: t.Optional[t.Dict[str, t.Any]],
resource_quota: t.Optional[t.Dict[str, t.Any]],
batch_options: t.Optional[t.Dict[str, t.Any]],
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
):
super().__init__(str(tag), resource_quota, batch_options)
self._tag = Tag.from_taglike(tag)
super().__init__(name, resource_quota, batch_options)
self._tag = tag
self._predict_fn_name = predict_fn_name
self._predict_params = predict_params
self._model_store = model_store
Expand Down Expand Up @@ -227,6 +228,7 @@ def load_runner(
tag: t.Union[str, Tag],
predict_fn_name: str = "readtext_batched",
*,
name: t.Optional[str] = None,
predict_params: t.Union[None, t.Dict[str, t.Union[str, t.Any]]] = None,
resource_quota: t.Union[None, t.Dict[str, t.Any]] = None,
batch_options: t.Union[None, t.Dict[str, t.Any]] = None,
Expand Down Expand Up @@ -263,11 +265,15 @@ def load_runner(
input_data = pd.from_csv("/path/to/csv")
runner = bentoml.xgboost.load_runner("my_model:20201012_DE43A2")
runner.run(xgb.DMatrix(input_data))
""" # noqa
"""
tag = Tag.from_taglike(tag)
if name is None:
name = tag.name
return _EasyOCRRunner(
tag=tag,
predict_fn_name=predict_fn_name,
predict_params=predict_params,
name=name,
resource_quota=resource_quota,
batch_options=batch_options,
model_store=model_store,
Expand Down
13 changes: 9 additions & 4 deletions bentoml/_internal/frameworks/gluon.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@

@inject
def load(
tag: t.Union[str, Tag],
tag: Tag,
mxnet_ctx: t.Optional[mxnet.context.Context] = None,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
) -> gluon.Block:
Expand Down Expand Up @@ -128,14 +128,15 @@ class _GluonRunner(Runner):
@inject
def __init__(
self,
tag: t.Union[str, Tag],
tag: Tag,
predict_fn_name: str,
name: str,
resource_quota: t.Optional[t.Dict[str, t.Any]],
batch_options: t.Optional[t.Dict[str, t.Any]],
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
):
super().__init__(str(tag), resource_quota, batch_options)
self._tag = Tag.from_taglike(tag)
super().__init__(name, resource_quota, batch_options)
self._tag = tag
self._predict_fn_name = predict_fn_name
self._model_store = model_store
self._ctx = None
Expand Down Expand Up @@ -190,6 +191,7 @@ def load_runner(
predict_fn_name: str = "__call__",
*,
resource_quota: t.Union[None, t.Dict[str, t.Any]] = None,
name: t.Optional[str] = None,
batch_options: t.Union[None, t.Dict[str, t.Any]] = None,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
) -> _GluonRunner:
Expand All @@ -216,6 +218,9 @@ def load_runner(
Examples:
TODO
""" # noqa
tag = Tag.from_taglike(tag)
if name is None:
name = tag.name
return _GluonRunner(
tag=tag,
predict_fn_name=predict_fn_name,
Expand Down
12 changes: 9 additions & 3 deletions bentoml/_internal/frameworks/h2o.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@

@inject
def load(
tag: t.Union[str, Tag],
tag: Tag,
init_params: t.Optional[t.Dict[str, t.Any]] = None,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
) -> h2o.model.model_base.ModelBase:
Expand Down Expand Up @@ -133,14 +133,15 @@ class _H2ORunner(Runner):
@inject
def __init__(
self,
tag: t.Union[str, Tag],
tag: Tag,
predict_fn_name: str,
init_params: t.Optional[t.Dict[str, t.Union[str, t.Any]]],
name: str,
resource_quota: t.Optional[t.Dict[str, t.Any]],
batch_options: t.Optional[t.Dict[str, t.Any]],
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
):
super().__init__(str(tag), resource_quota, batch_options)
super().__init__(name, resource_quota, batch_options)

self._tag = Tag.from_taglike(tag)
self._predict_fn_name = predict_fn_name
Expand Down Expand Up @@ -189,6 +190,7 @@ def load_runner(
predict_fn_name: str = "predict",
*,
init_params: t.Optional[t.Dict[str, t.Union[str, t.Any]]],
name: t.Optional[str] = None,
resource_quota: t.Optional[t.Dict[str, t.Any]] = None,
batch_options: t.Optional[t.Dict[str, t.Any]] = None,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
Expand Down Expand Up @@ -220,10 +222,14 @@ def load_runner(
TODO
""" # noqa
tag = Tag.from_taglike(tag)
if name is None:
name = tag.name
return _H2ORunner(
tag=tag,
predict_fn_name=predict_fn_name,
init_params=init_params,
name=name,
resource_quota=resource_quota,
batch_options=batch_options,
model_store=model_store,
Expand Down
11 changes: 9 additions & 2 deletions bentoml/_internal/frameworks/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def get_session() -> "BaseSession":

@inject
def load(
tag: t.Union[str, Tag],
tag: Tag,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
) -> "keras.Model":
"""
Expand Down Expand Up @@ -198,10 +198,11 @@ class _KerasRunner(_TensorflowRunner):
@inject
def __init__(
self,
tag: t.Union[str, Tag],
tag: Tag,
predict_fn_name: str,
device_id: str,
predict_kwargs: t.Optional[t.Dict[str, t.Any]],
name: str,
resource_quota: t.Optional[t.Dict[str, t.Any]],
batch_options: t.Optional[t.Dict[str, t.Any]],
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
Expand All @@ -210,6 +211,7 @@ def __init__(
tag=tag,
predict_fn_name=predict_fn_name,
device_id=device_id,
name=name,
partial_kwargs=predict_kwargs,
resource_quota=resource_quota,
batch_options=batch_options,
Expand Down Expand Up @@ -254,6 +256,7 @@ def load_runner(
predict_fn_name: str = "predict",
device_id: str = "CPU:0",
predict_kwargs: t.Optional[t.Dict[str, t.Any]] = None,
name: t.Optional[str] = None,
resource_quota: t.Union[None, t.Dict[str, t.Any]] = None,
batch_options: t.Union[None, t.Dict[str, t.Any]] = None,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
Expand Down Expand Up @@ -284,10 +287,14 @@ def load_runner(
Examples::
""" # noqa: LN001
tag = Tag.from_taglike(tag)
if name is None:
name = tag.name
return _KerasRunner(
tag=tag,
predict_fn_name=predict_fn_name,
device_id=device_id,
name=name,
predict_kwargs=predict_kwargs,
resource_quota=resource_quota,
batch_options=batch_options,
Expand Down
12 changes: 9 additions & 3 deletions bentoml/_internal/frameworks/lightgbm.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def _get_model_info(

@inject
def load(
tag: t.Union[str, Tag],
tag: Tag,
booster_params: t.Optional[t.Dict[str, t.Union[str, int]]] = None,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
) -> t.Union["lgb.basic.Booster", _LightGBMModelType]:
Expand Down Expand Up @@ -208,14 +208,15 @@ class _LightGBMRunner(Runner):
@inject
def __init__(
self,
tag: t.Union[str, Tag],
tag: Tag,
infer_api_callback: str,
booster_params: t.Optional[t.Dict[str, t.Union[str, int]]],
name: str,
resource_quota: t.Optional[t.Dict[str, t.Any]],
batch_options: t.Optional[t.Dict[str, t.Any]],
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
):
super().__init__(str(tag), resource_quota, batch_options)
super().__init__(name, resource_quota, batch_options)
model_info, model_file, booster_params = _get_model_info(
tag, booster_params, model_store
)
Expand Down Expand Up @@ -269,6 +270,7 @@ def load_runner(
infer_api_callback: str = "predict",
*,
booster_params: t.Optional[t.Dict[str, t.Union[str, int]]] = None,
name: t.Optional[str] = None,
resource_quota: t.Union[None, t.Dict[str, t.Any]] = None,
batch_options: t.Union[None, t.Dict[str, t.Any]] = None,
model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
Expand Down Expand Up @@ -303,9 +305,13 @@ def load_runner(
runner = bentoml.lightgbm.load_runner("my_lightgbm_model:latest")
runner.run_batch(X_test, num_iteration=gbm.best_iteration)
""" # noqa
tag = Tag.from_taglike(tag)
if name is None:
name = tag.name
return _LightGBMRunner(
tag=tag,
infer_api_callback=infer_api_callback,
name=name,
booster_params=booster_params,
resource_quota=resource_quota,
batch_options=batch_options,
Expand Down
Loading

0 comments on commit 5ab95a6

Please sign in to comment.