Skip to content

Commit

Permalink
Improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
javierdlrm committed Jan 27, 2022
1 parent f8cc7d2 commit f94ef03
Show file tree
Hide file tree
Showing 10 changed files with 75 additions and 51 deletions.
18 changes: 4 additions & 14 deletions python/hsml/component_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,25 +36,15 @@ def __init__(
inference_logger: Optional[Union[InferenceLoggerConfig, dict]] = None,
inference_batcher: Optional[Union[InferenceBatcherConfig, dict]] = None,
):
# check for dict params
inference_logger = util.get_obj_from_json(
InferenceLoggerConfig, inference_logger
)
inference_batcher = util.get_obj_from_json(
InferenceBatcherConfig, inference_batcher
)

self._script_file = script_file
self._resources_config = resources_config
self._inference_logger = (
inference_logger
if inference_logger is not None
else InferenceLoggerConfig() # default
util.get_obj_from_json(InferenceLoggerConfig, inference_logger)
or InferenceLoggerConfig()
)
self._inference_batcher = (
inference_batcher
if inference_batcher is not None
else InferenceBatcherConfig() # default
util.get_obj_from_json(InferenceBatcherConfig, inference_batcher)
or InferenceBatcherConfig()
)

@abstractclassmethod
Expand Down
10 changes: 9 additions & 1 deletion python/hsml/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,16 @@ class RESOURCES:
GPUS = 0


class KAFKA_TOPIC_CONFIG:
NUM_REPLICAS = 1
NUM_PARTITIONS = 1


class INFERENCE_LOGGER:
MODE = "NONE"
MODE_NONE = "NONE"
MODE_ALL = "ALL"
MODE_MODEL_INPUTS = "MODEL_INPUTS"
MODE_PREDICTIONS = "PREDICTIONS"


class INFERENCE_BATCHER:
Expand Down
4 changes: 1 addition & 3 deletions python/hsml/inference_batcher_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,7 @@ class InferenceBatcherConfig:
"""Configuration for an inference batcher."""

def __init__(self, enabled: Optional[bool] = None):
self._enabled = (
enabled if enabled is not None else INFERENCE_BATCHER.ENABLED
) # default
self._enabled = enabled or INFERENCE_BATCHER.ENABLED

def describe(self):
util.pretty_print(self)
Expand Down
22 changes: 17 additions & 5 deletions python/hsml/inference_logger_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,27 @@ def __init__(
kafka_topic: Optional[Union[KafkaTopicConfig, dict]] = None,
mode: Optional[str] = None,
):
# check for dict params
kafka_topic = util.get_obj_from_json(KafkaTopicConfig, kafka_topic)

self._kafka_topic = kafka_topic
self._mode = mode if mode is not None else INFERENCE_LOGGER.MODE
self._kafka_topic = util.get_obj_from_json(KafkaTopicConfig, kafka_topic)
self._mode = self._validate_mode(mode) or (
INFERENCE_LOGGER.MODE_ALL
if self._kafka_topic is not None
else INFERENCE_LOGGER.MODE_NONE
)

def describe(self):
util.pretty_print(self)

def _validate_mode(self, mode):
if mode is not None:
modes = util.get_members(INFERENCE_LOGGER)
if mode not in modes:
raise ValueError(
"Inference logging mode {} is not valid. Possible values are {}".format(
mode, modes.join(", ")
)
)
return mode

@classmethod
def from_response_json(cls, json_dict):
json_decamelized = humps.decamelize(json_dict)
Expand Down
7 changes: 5 additions & 2 deletions python/hsml/kafka_topic_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from typing import Optional

from hsml import util
from hsml.constants import KAFKA_TOPIC_CONFIG


class KafkaTopicConfig:
Expand All @@ -30,8 +31,10 @@ def __init__(
topic_num_partitions: Optional[int] = None,
):
self._topic_name = topic_name
self._topic_num_replicas = topic_num_replicas
self._topic_num_partitions = topic_num_partitions
self._topic_num_replicas = topic_num_replicas or KAFKA_TOPIC_CONFIG.NUM_REPLICAS
self._topic_num_partitions = (
topic_num_partitions or KAFKA_TOPIC_CONFIG.NUM_PARTITIONS
)

def describe(self):
util.pretty_print(self)
Expand Down
25 changes: 15 additions & 10 deletions python/hsml/predictor_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,25 +38,30 @@ def __init__(
inference_batcher: Optional[Union[InferenceBatcherConfig, dict]] = None,
):
resources_config = (
resources_config
if resources_config is not None
else PredictorResourcesConfig()
) # default
util.get_obj_from_json(PredictorResourcesConfig, resources_config)
or PredictorResourcesConfig()
)

super().__init__(
script_file, resources_config, inference_logger, inference_batcher
)

self._model_server = model_server
self._serving_tool = (
serving_tool
if serving_tool is not None
else PREDICTOR.SERVING_TOOL_KFSERVING
)
self._model_server = self._validate_model_server(model_server)
self._serving_tool = serving_tool or PREDICTOR.SERVING_TOOL_KFSERVING

def describe(self):
util.pretty_print(self)

def _validate_model_server(self, model_server):
model_servers = util.get_members(PREDICTOR, prefix="SERVING_TOOL")
if model_server not in model_servers:
raise ValueError(
"Model server {} is not valid. Possible values are {}".format(
model_server, model_servers.join(", ")
)
)
return model_server

@classmethod
def for_model(cls, model):
return util.get_predictor_config_for_model(model)
Expand Down
2 changes: 1 addition & 1 deletion python/hsml/predictor_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def __init__(
self._external_ip = external_ip
self._external_port = external_port
self._revision = revision
self._deployed = deployed if deployed is not None else False
self._deployed = deployed or False
self._conditions = conditions
self._status = status

Expand Down
12 changes: 4 additions & 8 deletions python/hsml/resources_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,10 @@ def __init__(
memory: Optional[int] = None,
gpus: Optional[int] = None,
):
self._num_instances = (
num_instances
if num_instances is not None
else RESOURCES.NUM_INSTANCES # default
)
self._cores = cores if cores is not None else RESOURCES.CORES # default
self._memory = memory if memory is not None else RESOURCES.MEMORY # default
self._gpus = gpus if gpus is not None else RESOURCES.GPUS # default
self._num_instances = num_instances or RESOURCES.NUM_INSTANCES
self._cores = cores or RESOURCES.CORES
self._memory = memory or RESOURCES.MEMORY
self._gpus = gpus or RESOURCES.GPUS

def describe(self):
util.pretty_print(self)
Expand Down
6 changes: 1 addition & 5 deletions python/hsml/transformer_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,7 @@ def __init__(
inference_logger: Optional[InferenceLoggerConfig] = None,
inference_batcher: Optional[InferenceBatcherConfig] = None,
):
resources_config = (
resources_config
if resources_config is not None
else TransformerResourcesConfig()
) # default
resources_config = resources_config or TransformerResourcesConfig()

super().__init__(
script_file, resources_config, inference_logger, inference_batcher
Expand Down
20 changes: 18 additions & 2 deletions python/hsml/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

import shutil
import datetime
import inspect
import humps

import numpy as np
Expand Down Expand Up @@ -226,6 +227,21 @@ def pretty_print(obj):


def get_obj_from_json(cls, obj):
if obj is not None and isinstance(obj, dict):
return cls.from_json(obj)
if obj is not None:
if isinstance(cls, obj):
return obj
if isinstance(obj, dict):
return cls.from_json(obj)
raise ValueError(
"Object of type {} cannot be converted to class {}".format(type(obj), cls)
)
return obj


def get_members(cls, prefix=None):
for m in inspect.getmembers(cls, lambda m: not (inspect.isroutine(m))):
n = m[0]
if (prefix is not None and n.startswith(prefix)) or (
prefix is None and not (n.startswith("__") and n.endswith("__"))
):
yield n

0 comments on commit f94ef03

Please sign in to comment.