Skip to content

ODSC-29065/md_opctl_doc_new #177

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 16 commits into from
May 4, 2023
Merged
2 changes: 1 addition & 1 deletion ads/opctl/backend/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class Backend:

def __init__(self, config: Dict) -> None:
self.config = config
self.auth_type = config["execution"].get("auth")
self.auth_type = config["execution"].get("auth", "api_key")
self.profile = config["execution"].get("oci_profile", None)
self.oci_config = config["execution"].get("oci_config", None)

Expand Down
99 changes: 48 additions & 51 deletions ads/opctl/backend/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
)
from ads.pipeline.ads_pipeline import Pipeline, PipelineStep
from ads.common.oci_client import OCIClientFactory
from ads.config import NO_CONTAINER

class CondaPackNotFound(Exception): # pragma: no cover
pass
Expand Down Expand Up @@ -218,6 +219,7 @@ def _run_with_conda_pack(
)
if os.path.exists(os.path.join(conda_pack_path, "spark-defaults.conf")):
env_vars["SPARK_CONF_DIR"] = os.path.join(DEFAULT_IMAGE_CONDA_DIR, slug)
logger.info(f"Running with conda pack in a container with command {command}")
return self._activate_conda_env_and_run(
image, slug, command, bind_volumes, env_vars
)
Expand Down Expand Up @@ -679,9 +681,11 @@ def predict(self) -> None:
None
Nothing.
"""

# model artifact in artifact directory
artifact_directory = self.config["execution"].get("artifact_directory")
ocid = self.config["execution"].get("ocid")
data = self.config["execution"].get("payload")

model_folder = os.path.expanduser(
self.config["execution"].get("model_save_folder", DEFAULT_MODEL_FOLDER)
)
Expand All @@ -698,79 +702,72 @@ def predict(self) -> None:
)

_download_model(
oci_auth=self.oci_auth,
auth=self.auth_type,
profile=self.profile,
ocid=ocid,
artifact_directory=artifact_directory,
region=region,
bucket_uri=bucket_uri,
timeout=timeout,
force_overwrite=True,
)
conda_slug, conda_path = None, None
if ocid:

# conda
conda_slug, conda_path = self.config["execution"].get("conda_slug"), self.config["execution"].get("conda_path")
if not conda_slug and not conda_path and ocid:
conda_slug, conda_path = self._get_conda_info_from_custom_metadata(ocid)
if not conda_path:
if (
not os.path.exists(artifact_directory)
or len(os.listdir(artifact_directory)) == 0
):
raise ValueError(
f"`artifact_directory` {artifact_directory} does not exist or is empty."
)
if not conda_slug and not conda_path:
conda_slug, conda_path = self._get_conda_info_from_runtime(
artifact_dir=artifact_directory
)
if not conda_path or not conda_slug:
raise ValueError("Conda information cannot be detected.")
compartment_id = self.config["execution"].get(
"compartment_id", self.config["infrastructure"].get("compartment_id")
)
project_id = self.config["execution"].get(
"project_id", self.config["infrastructure"].get("project_id")
)
if not compartment_id or not project_id:
raise ValueError("`compartment_id` and `project_id` must be provided.")
extra_cmd = (
DEFAULT_MODEL_DEPLOYMENT_FOLDER
+ " "
+ data
+ " "
+ compartment_id
+ " "
+ project_id
)
if 'conda_slug' not in self.config["execution"]:
self.config["execution"]["conda_slug"] = conda_path.split("/")[-1] if conda_path else conda_slug

self.config["execution"]["image"] = ML_JOB_IMAGE

# bind_volumnes
bind_volumes = {}
SCRIPT = "script.py"
dir_path = os.path.dirname(os.path.realpath(__file__))
if not is_in_notebook_session():
bind_volumes = {
os.path.expanduser(
os.path.dirname(self.config["execution"]["oci_config"])
): {"bind": os.path.join(DEFAULT_IMAGE_HOME_DIR, ".oci")}
}
dir_path = os.path.dirname(os.path.realpath(__file__))
script = "script.py"

self.config["execution"]["source_folder"] = os.path.abspath(
os.path.join(dir_path, "..")
)
self.config["execution"]["entrypoint"] = script
self.config["execution"]["entrypoint"] = SCRIPT
bind_volumes[artifact_directory] = {"bind": DEFAULT_MODEL_DEPLOYMENT_FOLDER}
if self.config["execution"].get("conda_slug", conda_slug):
self.config["execution"]["image"] = ML_JOB_IMAGE
if not self.config["execution"].get("conda_slug"):
self.config["execution"]["conda_slug"] = conda_slug
self.config["execution"]["slug"] = conda_slug
self.config["execution"]["conda_path"] = conda_path
exit_code = self._run_with_conda_pack(
bind_volumes, extra_cmd, install=True, conda_uri=conda_path
)

# extra cmd
data = self.config["execution"].get("payload")
extra_cmd = f"--payload '{data}' " + f"--auth {self.auth_type} "
if self.auth_type != "resource_principal":
extra_cmd += f"--profile {self.profile}"

if is_in_notebook_session() or NO_CONTAINER:
# _run_with_conda_pack has code to handle notebook session case,
# however, it activate the conda pack and then run the script.
# For the deployment, we just take the current conda env and run it.
# Hence we just handle the notebook case directly here.
script_path = os.path.join(os.path.join(dir_path, ".."), SCRIPT)
cmd = f"python {script_path} " + f"--artifact-directory {artifact_directory} " + extra_cmd
logger.info(f"Running in a notebook or NO_CONTAINER with command {cmd}")
run_command(cmd=cmd, shell=True)
else:
raise ValueError("Either conda pack info or image should be specified.")

if exit_code != 0:
raise RuntimeError(
f"`predict` did not complete successfully. Exit code: {exit_code}. "
f"Run with the --debug argument to view container logs."
)

extra_cmd = f"--artifact-directory {DEFAULT_MODEL_DEPLOYMENT_FOLDER} "+ extra_cmd
exit_code = self._run_with_conda_pack(
bind_volumes, extra_cmd, install=True, conda_uri=conda_path
)
if exit_code != 0:
raise RuntimeError(
f"`predict` did not complete successfully. Exit code: {exit_code}. "
f"Run with the --debug argument to view container logs."
)

def _get_conda_info_from_custom_metadata(self, ocid):
"""
Get conda env info from custom metadata from model catalog.
Expand Down
32 changes: 24 additions & 8 deletions ads/opctl/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import ads.opctl.model.cli
import ads.opctl.spark.cli
from ads.common import auth as authutil
from ads.common.auth import AuthType
from ads.opctl.cmds import activate as activate_cmd
from ads.opctl.cmds import cancel as cancel_cmd
from ads.opctl.cmds import configure as configure_cmd
Expand All @@ -29,12 +30,8 @@
from ads.opctl.cmds import run_diagnostics as run_diagnostics_cmd
from ads.opctl.cmds import watch as watch_cmd
from ads.opctl.config.merger import ConfigMerger
from ads.opctl.constants import (
BACKEND_NAME,
DEFAULT_MODEL_FOLDER,
RESOURCE_TYPE,
RUNTIME_TYPE,
)
from ads.opctl.constants import (BACKEND_NAME, DEFAULT_MODEL_FOLDER,
RESOURCE_TYPE, RUNTIME_TYPE)
from ads.opctl.utils import build_image as build_image_cmd
from ads.opctl.utils import publish_image as publish_image_cmd
from ads.opctl.utils import suppress_traceback
Expand Down Expand Up @@ -544,6 +541,7 @@ def init(debug: bool, **kwargs: Dict[str, Any]) -> None:
suppress_traceback(debug)(init_cmd)(**kwargs)


@commands.command()
@click.option(
"--ocid",
nargs=1,
Expand Down Expand Up @@ -597,7 +595,13 @@ def init(debug: bool, **kwargs: Dict[str, Any]) -> None:
"--conda-slug",
nargs=1,
required=False,
help="The conda env used to load the model and conduct the prediction. This is only used when model id is passed to `ocid` and a local predict is conducted. It should match the inference conda env specified in the runtime.yaml file which is the conda pack being used when conducting real model deployment.",
help="The conda slug used to load the model and conduct the prediction. This is only used when model id is passed to `ocid` and a local predict is conducted. It should match the inference conda env specified in the runtime.yaml file which is the conda pack being used when conducting real model deployment.",
)
@click.option(
"--conda-path",
nargs=1,
required=False,
help="The conda path used to load the model and conduct the prediction. This is only used when model id is passed to `ocid` and a local predict is conducted. It should match the inference conda env specified in the runtime.yaml file which is the conda pack being used when conducting real model deployment.",
)
@click.option(
"--model-version",
Expand All @@ -611,10 +615,22 @@ def init(debug: bool, **kwargs: Dict[str, Any]) -> None:
required=False,
help="When the `inference_server='triton'`, the name of the model to invoke. This can only be used when model deployment id is passed in. For the other cases, it will be ignored.",
)
@click.option(
"--auth",
"-a",
help="authentication method",
type=click.Choice(AuthType.values()),
default=None,
)
@click.option(
"--oci-profile",
help="oci profile",
default=None,
)
@click.option("--debug", "-d", help="set debug mode", is_flag=True, default=False)
def predict(**kwargs):
"""
Deactivates a data science service.
Make prediction using the model with the payload.
"""
suppress_traceback(kwargs["debug"])(predict_cmd)(**kwargs)

Expand Down
42 changes: 19 additions & 23 deletions ads/opctl/model/cmds.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import os
import shutil

from ads.common.auth import create_signer
from ads.common.auth import AuthContext
from ads.model.datascience_model import DataScienceModel
from ads.opctl import logger
from ads.opctl.constants import DEFAULT_MODEL_FOLDER
from ads.opctl.config.base import ConfigProcessor
from ads.opctl.config.merger import ConfigMerger
from ads.opctl.constants import DEFAULT_MODEL_FOLDER


def download_model(**kwargs):
Expand All @@ -15,12 +15,6 @@ def download_model(**kwargs):

auth_type = p.config["execution"].get("auth")
profile = p.config["execution"].get("oci_profile", None)
oci_config = p.config["execution"].get("oci_config", None)
oci_auth = create_signer(
auth_type,
oci_config,
profile,
)
model_folder = os.path.expanduser(
p.config["execution"].get("model_save_folder", DEFAULT_MODEL_FOLDER)
)
Expand All @@ -44,7 +38,8 @@ def download_model(**kwargs):
bucket_uri=bucket_uri,
timeout=timeout,
force_overwrite=force_overwrite,
oci_auth=oci_auth,
auth=auth_type,
profile=profile
)
else:
logger.error(f"Model already exists. Set `force_overwrite=True` to overwrite.")
Expand All @@ -54,23 +49,24 @@ def download_model(**kwargs):


def _download_model(
ocid, artifact_directory, oci_auth, region, bucket_uri, timeout, force_overwrite
ocid, artifact_directory, region, bucket_uri, timeout, force_overwrite, auth, profile=None
):
os.makedirs(artifact_directory, exist_ok=True)
os.chmod(artifact_directory, 777)

kwargs = {"auth": auth}
if profile:
kwargs["profile"] = profile
try:
dsc_model = DataScienceModel.from_id(ocid)
dsc_model.download_artifact(
target_dir=artifact_directory,
force_overwrite=force_overwrite,
overwrite_existing_artifact=True,
remove_existing_artifact=True,
auth=oci_auth,
region=region,
timeout=timeout,
bucket_uri=bucket_uri,
)
with AuthContext(**kwargs):
dsc_model = DataScienceModel.from_id(ocid)
dsc_model.download_artifact(
target_dir=artifact_directory,
force_overwrite=force_overwrite,
overwrite_existing_artifact=True,
remove_existing_artifact=True,
region=region,
timeout=timeout,
bucket_uri=bucket_uri,
)
except Exception as e:
print(type(e))
shutil.rmtree(artifact_directory, ignore_errors=True)
Expand Down
26 changes: 16 additions & 10 deletions ads/opctl/script.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,37 @@
import argparse
import json
import sys
import tempfile

from ads.common.auth import AuthContext
from ads.model.generic_model import GenericModel


def verify(artifact_dir, data, compartment_id, project_id): # pragma: no cover
with tempfile.TemporaryDirectory() as td:
def verify(artifact_dir, payload, auth, profile): # pragma: no cover
kwargs = {"auth": auth}
if profile != 'None':
kwargs["profile"] = profile
with AuthContext(**kwargs):
model = GenericModel.from_model_artifact(
uri=artifact_dir,
artifact_dir=artifact_dir,
force_overwrite=True,
compartment_id=compartment_id,
project_id=project_id,
)

try:
data = json.loads(data)
payload = json.loads(payload)
except:
pass
print(model.verify(data, auto_serialize_data=False))
print(model.verify(payload, auto_serialize_data=False))


def main(): # pragma: no cover
args = sys.argv[1:]
parser = argparse.ArgumentParser()
parser.add_argument("--payload", type=str, required=True)
parser.add_argument("--artifact-directory", type=str, required=True)
parser.add_argument("--auth", type=str, required=True)
parser.add_argument("--profile", type=str,required=False)
args = parser.parse_args()
verify(
artifact_dir=args[0], data=args[1], compartment_id=args[2], project_id=args[3]
artifact_dir=args.artifact_directory, payload=args.payload, auth=args.auth, profile=args.profile
)
return 0

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,4 @@ Setup up your workstation for development and testing your code locally before y
localdev/jobs
localdev/local_jobs
localdev/local_pipelines
localdev/local_deployment
Loading