diff --git a/ads/jobs/serializer.py b/ads/jobs/serializer.py index 4c475fdb9..f0493b280 100644 --- a/ads/jobs/serializer.py +++ b/ads/jobs/serializer.py @@ -12,11 +12,9 @@ import yaml from ads.common.auth import default_signer +# Special type to represent the current enclosed class. +# This type is used by factory class method or when a method returns ``self``. Self = TypeVar("Self", bound="Serializable") -"""Special type to represent the current enclosed class. - -This type is used by factory class method or when a method returns ``self``. -""" class Serializable(ABC): @@ -72,6 +70,14 @@ def _write_to_file(s: str, uri: str, **kwargs) -> None: "if you wish to overwrite." ) + # Add default signer if the uri is an object storage uri, and + # the user does not specify config or signer. + if ( + uri.startswith("oci://") + and "config" not in kwargs + and "signer" not in kwargs + ): + kwargs.update(default_signer()) with fsspec.open(uri, "w", **kwargs) as f: f.write(s) diff --git a/ads/opctl/config/merger.py b/ads/opctl/config/merger.py index 3226efe9b..e7063665a 100644 --- a/ads/opctl/config/merger.py +++ b/ads/opctl/config/merger.py @@ -117,7 +117,7 @@ def _fill_config_with_defaults(self, ads_config_path: str) -> None: else: self.config["execution"]["auth"] = AuthType.API_KEY # determine profile - if self.config["execution"]["auth"] == AuthType.RESOURCE_PRINCIPAL: + if self.config["execution"]["auth"] != AuthType.API_KEY: profile = self.config["execution"]["auth"].upper() exec_config.pop("oci_profile", None) self.config["execution"]["oci_profile"] = None @@ -202,12 +202,15 @@ def _get_service_config(self, oci_profile: str, ads_config_folder: str) -> Dict: def _config_flex_shape_details(self): infrastructure = self.config["infrastructure"] backend = self.config["execution"].get("backend", None) - if backend == BACKEND_NAME.JOB.value or backend == BACKEND_NAME.MODEL_DEPLOYMENT.value: + if ( + backend == BACKEND_NAME.JOB.value + or backend == BACKEND_NAME.MODEL_DEPLOYMENT.value + ): shape_name = infrastructure.get("shape_name", "") if shape_name.endswith(".Flex"): if ( - "ocpus" not in infrastructure or - "memory_in_gbs" not in infrastructure + "ocpus" not in infrastructure + or "memory_in_gbs" not in infrastructure ): raise ValueError( "Parameters `ocpus` and `memory_in_gbs` must be provided for using flex shape. " @@ -215,7 +218,7 @@ def _config_flex_shape_details(self): ) infrastructure["shape_config_details"] = { "ocpus": infrastructure.pop("ocpus"), - "memory_in_gbs": infrastructure.pop("memory_in_gbs") + "memory_in_gbs": infrastructure.pop("memory_in_gbs"), } elif backend == BACKEND_NAME.DATAFLOW.value: executor_shape = infrastructure.get("executor_shape", "") @@ -224,7 +227,7 @@ def _config_flex_shape_details(self): "driver_shape_memory_in_gbs", "driver_shape_ocpus", "executor_shape_memory_in_gbs", - "executor_shape_ocpus" + "executor_shape_ocpus", ] # executor_shape and driver_shape must be the same shape family if executor_shape.endswith(".Flex") or driver_shape.endswith(".Flex"): @@ -236,9 +239,9 @@ def _config_flex_shape_details(self): ) infrastructure["driver_shape_config"] = { "ocpus": infrastructure.pop("driver_shape_ocpus"), - "memory_in_gbs": infrastructure.pop("driver_shape_memory_in_gbs") + "memory_in_gbs": infrastructure.pop("driver_shape_memory_in_gbs"), } infrastructure["executor_shape_config"] = { "ocpus": infrastructure.pop("executor_shape_ocpus"), - "memory_in_gbs": infrastructure.pop("executor_shape_memory_in_gbs") + "memory_in_gbs": infrastructure.pop("executor_shape_memory_in_gbs"), } diff --git a/ads/opctl/utils.py b/ads/opctl/utils.py index 9ca13bc4c..1ae64a6e1 100644 --- a/ads/opctl/utils.py +++ b/ads/opctl/utils.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8; -*- -# Copyright (c) 2022 Oracle and/or its affiliates. +# Copyright (c) 2022, 2023 Oracle and/or its affiliates. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ @@ -88,9 +88,8 @@ def get_namespace(auth: dict) -> str: def get_region_key(auth: dict) -> str: - if len(auth["config"]) > 0: - tenancy = auth["config"]["tenancy"] - else: + tenancy = auth["config"].get("tenancy") + if not tenancy: tenancy = auth["signer"].tenancy_id client = OCIClientFactory(**auth).identity return client.get_tenancy(tenancy).data.home_region_key diff --git a/tests/integration/jobs/test_dsc_job.py b/tests/integration/jobs/test_dsc_job.py index e317de67e..2cbd616ee 100644 --- a/tests/integration/jobs/test_dsc_job.py +++ b/tests/integration/jobs/test_dsc_job.py @@ -222,11 +222,10 @@ def assert_job_creation(self, job, expected_infra_spec, expected_runtime_spec): random.seed(threading.get_ident() + os.getpid()) random_suffix = "".join(random.choices(string.ascii_uppercase, k=6)) yaml_uri = f"oci://{self.BUCKET}@{self.NAMESPACE}/tests/{timestamp}/example_job_{random_suffix}.yaml" - config_path = "~/.oci/config" - job.to_yaml(uri=yaml_uri, config=config_path) + job.to_yaml(uri=yaml_uri) print(f"Job YAML saved to {yaml_uri}") try: - job = Job.from_yaml(uri=yaml_uri, config=config_path) + job = Job.from_yaml(uri=yaml_uri) except Exception: self.fail(f"Failed to load job from YAML\n{traceback.format_exc()}") diff --git a/tests/integration/jobs/test_jobs_cli.py b/tests/integration/jobs/test_jobs_cli.py index e5aee2136..bbf16efc5 100644 --- a/tests/integration/jobs/test_jobs_cli.py +++ b/tests/integration/jobs/test_jobs_cli.py @@ -7,38 +7,88 @@ from click.testing import CliRunner +from ads.common.auth import AuthType from ads.jobs.cli import run, watch, delete class TestJobsCLI: + # TeamCity will use Instance Principal, when running locally - set OCI_IAM_TYPE to security_token + auth = os.environ.get("OCI_IAM_TYPE", AuthType.INSTANCE_PRINCIPAL) + def test_create_watch_delete_job(self): curr_dir = os.path.dirname(os.path.abspath(__file__)) runner = CliRunner() res = runner.invoke( - run, args=["-f", os.path.join(curr_dir, "../yamls", "sample_job.yaml")] + run, + args=[ + "-f", + os.path.join(curr_dir, "../yamls", "sample_job.yaml"), + "--auth", + self.auth, + ], ) assert res.exit_code == 0, res.output run_id = res.output.split("\n")[1] - res2 = runner.invoke(watch, args=[run_id]) + res2 = runner.invoke( + watch, + args=[ + run_id, + "--auth", + self.auth, + ], + ) assert res2.exit_code == 0, res2.output - res3 = runner.invoke(delete, args=[run_id]) + res3 = runner.invoke( + delete, + args=[ + run_id, + "--auth", + self.auth, + ], + ) assert res3.exit_code == 0, res3.output def test_create_watch_delete_dataflow(self): curr_dir = os.path.dirname(os.path.abspath(__file__)) runner = CliRunner() res = runner.invoke( - run, args=["-f", os.path.join(curr_dir, "../yamls", "sample_dataflow.yaml")] + run, + args=[ + "-f", + os.path.join(curr_dir, "../yamls", "sample_dataflow.yaml"), + "--auth", + self.auth, + ], ) assert res.exit_code == 0, res.output run_id = res.output.split("\n")[1] - res2 = runner.invoke(watch, args=[run_id]) + res2 = runner.invoke( + watch, + args=[ + run_id, + "--auth", + self.auth, + ], + ) assert res2.exit_code == 0, res2.output res3 = runner.invoke( - run, args=["-f", os.path.join(curr_dir, "../yamls", "sample_dataflow.yaml")] + run, + args=[ + "-f", + os.path.join(curr_dir, "../yamls", "sample_dataflow.yaml"), + "--auth", + self.auth, + ], ) run_id2 = res3.output.split("\n")[1] - res4 = runner.invoke(delete, args=[run_id2]) + res4 = runner.invoke( + delete, + args=[ + run_id2, + "--auth", + self.auth, + ], + ) assert res4.exit_code == 0, res4.output diff --git a/tests/integration/jobs/test_jobs_notebook.py b/tests/integration/jobs/test_jobs_notebook.py index 8664394c8..9a56df4a3 100644 --- a/tests/integration/jobs/test_jobs_notebook.py +++ b/tests/integration/jobs/test_jobs_notebook.py @@ -8,6 +8,7 @@ import tempfile import fsspec +from ads.common.auth import default_signer, AuthType from ads.jobs.builders.infrastructure.dsc_job_runtime import ( NotebookRuntimeHandler, ) @@ -64,9 +65,7 @@ def run_notebook( # Clear the files in output URI try: # Ignore the error for unit tests. - fs = fsspec.filesystem( - "oci", config=os.path.expanduser("~/.oci/config") - ) + fs = fsspec.filesystem("oci", **default_signer()) if fs.find(output_uri): fs.rm(output_uri, recursive=True) except: diff --git a/tests/integration/jobs/test_jobs_notebook_runtime.py b/tests/integration/jobs/test_jobs_notebook_runtime.py index b9bc0c927..a98916c31 100644 --- a/tests/integration/jobs/test_jobs_notebook_runtime.py +++ b/tests/integration/jobs/test_jobs_notebook_runtime.py @@ -4,12 +4,13 @@ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ import json +import pytest import os import tempfile from zipfile import ZipFile import fsspec - +from ads.common.auth import default_signer from tests.integration.config import secrets from tests.integration.jobs.test_dsc_job import DSCJobTestCaseWithCleanUp from tests.integration.jobs.test_jobs_notebook import NotebookDriverRunTest @@ -19,7 +20,9 @@ class NotebookRuntimeTest(DSCJobTestCaseWithCleanUp): - NOTEBOOK_PATH = os.path.join(os.path.dirname(__file__), "../fixtures/ads_check.ipynb") + NOTEBOOK_PATH = os.path.join( + os.path.dirname(__file__), "../fixtures/ads_check.ipynb" + ) NOTEBOOK_PATH_EXCLUDE = os.path.join( os.path.dirname(__file__), "../fixtures/exclude_check.ipynb" ) @@ -86,10 +89,15 @@ def test_create_job_with_notebook(self): class NotebookDriverIntegrationTest(NotebookDriverRunTest): + @pytest.mark.skip( + reason="api_keys not an option anymore, this test is candidate to be removed" + ) def test_notebook_driver_with_outputs(self): """Tests run the notebook driver with a notebook plotting and saving data.""" # Notebook to be executed - notebook_path = os.path.join(os.path.dirname(__file__), "../fixtures/plot.ipynb") + notebook_path = os.path.join( + os.path.dirname(__file__), "../fixtures/plot.ipynb" + ) # Object storage output location output_uri = f"oci://{secrets.jobs.BUCKET_B}@{secrets.common.NAMESPACE}/notebook_driver_int_test/plot/" # Run the notebook with driver and check the logs @@ -100,7 +108,7 @@ def test_notebook_driver_with_outputs(self): # Check the notebook saved to object storage. with fsspec.open( os.path.join(output_uri, os.path.basename(notebook_path)), - config=os.path.expanduser("~/.oci/config"), + **default_signer(), ) as f: outputs = [cell.get("outputs") for cell in json.load(f).get("cells")] # There should be 7 cells in the notebook @@ -113,7 +121,7 @@ def test_notebook_driver_with_outputs(self): # Check the JSON output file from the notebook with fsspec.open( os.path.join(output_uri, "data.json"), - config=os.path.expanduser("~/.oci/config"), + **default_signer(), ) as f: data = json.load(f) # There should be 10 data points diff --git a/tests/integration/jobs/test_jobs_runs.py b/tests/integration/jobs/test_jobs_runs.py index 8aea62900..35d7a3dd2 100644 --- a/tests/integration/jobs/test_jobs_runs.py +++ b/tests/integration/jobs/test_jobs_runs.py @@ -117,7 +117,7 @@ def job_run_test_infra(self): @staticmethod def list_objects(uri: str) -> list: """Lists objects on OCI object storage.""" - oci_os = fsspec.filesystem("oci", config=oci.config.from_file()) + oci_os = fsspec.filesystem("oci", **default_signer()) if uri.startswith("oci://"): uri = uri[len("oci://") :] items = oci_os.ls(uri, detail=False, refresh=True) @@ -126,7 +126,7 @@ def list_objects(uri: str) -> list: @staticmethod def remove_objects(uri: str): """Removes objects from OCI object storage.""" - oci_os = fsspec.filesystem("oci", config=oci.config.from_file()) + oci_os = fsspec.filesystem("oci", **default_signer()) try: oci_os.rm(uri, recursive=True) except FileNotFoundError: diff --git a/tests/integration/opctl/test_opctl_cli.py b/tests/integration/opctl/test_opctl_cli.py index a432621e3..2e99e0bcd 100644 --- a/tests/integration/opctl/test_opctl_cli.py +++ b/tests/integration/opctl/test_opctl_cli.py @@ -15,6 +15,13 @@ ) ADS_CONFIG_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") +if "TEAMCITY_VERSION" in os.environ: + # When running in TeamCity we specify dir, which is CHECKOUT_DIR="%teamcity.build.checkoutDir%" + WORK_DIR = os.getenv("CHECKOUT_DIR", "~") + CONDA_PACK_FOLDER = f"{WORK_DIR}/conda" +else: + CONDA_PACK_FOLDER = "~/conda" + def _assert_run_command(cmd_str, expected_outputs: list = None): runner = CliRunner() @@ -48,7 +55,7 @@ class TestLocalRunsWithConda: # For tests, we can always run the command in debug mode (-d) # By default, pytest only print the logs if the test is failed, # in which case we would like to see the debug logs. - CMD_OPTIONS = "-d -b local " + CMD_OPTIONS = f"-d -b local --conda-pack-folder {CONDA_PACK_FOLDER} " def test_hello_world(self): test_folder = os.path.join(TESTS_FILES_DIR, "hello_world_test") @@ -79,6 +86,9 @@ def test_linear_reg_test(self): ] _assert_run_command(cmd, expected_outputs) + @pytest.mark.skip( + reason="spark do not support instance principal - this test candidate to remove" + ) def test_spark_run(self): test_folder = os.path.join(TESTS_FILES_DIR, "spark_test") cmd = ( diff --git a/tests/integration/opctl/test_opctl_conda.py b/tests/integration/opctl/test_opctl_conda.py index a0eed7779..488c3acd8 100644 --- a/tests/integration/opctl/test_opctl_conda.py +++ b/tests/integration/opctl/test_opctl_conda.py @@ -4,12 +4,12 @@ # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ import os +from ads.common.auth import AuthType from ads.opctl.conda.cmds import create, install, publish from ads.opctl.conda.cli import create as cli_create from ads.opctl.conda.cli import install as cli_install from ads.opctl.conda.cli import publish as cli_publish from tests.integration.config import secrets -import shutil import tempfile import yaml @@ -17,8 +17,15 @@ from click.testing import CliRunner ADS_CONFIG_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") -# When running in TeamCity we specify dir, which is CHECKOUT_DIR="%teamcity.build.checkoutDir%" -WORK_DIR = os.getenv("CHECKOUT_DIR", None) + +if "TEAMCITY_VERSION" in os.environ: + # When running in TeamCity we specify dir, which is CHECKOUT_DIR="%teamcity.build.checkoutDir%" + WORK_DIR = os.getenv("CHECKOUT_DIR", "~") + CONDA_PACK_FOLDER = f"{WORK_DIR}/conda" + AUTH = AuthType.INSTANCE_PRINCIPAL +else: + CONDA_PACK_FOLDER = "~/conda" + AUTH = AuthType.SECURITY_TOKEN class TestCondaRun: @@ -33,6 +40,10 @@ def test_conda_install_service_pack_path(self): "oci://service_conda_packs@ociodscdev/service_pack", "--ads-config", ADS_CONFIG_DIR, + "--conda-pack-folder", + CONDA_PACK_FOLDER, + "--auth", + AUTH, ], ) assert res.exit_code == 0, res.output @@ -67,6 +78,7 @@ def test_conda_create_publish_setup(self): overwrite=True, conda_pack_folder=os.path.join(td_name, "conda"), ads_config=ADS_CONFIG_DIR, + auth=AUTH, ) td = tempfile.TemporaryDirectory(dir=WORK_DIR) @@ -76,6 +88,7 @@ def test_conda_create_publish_setup(self): conda_pack_folder=os.path.join(td_name, "conda"), ads_config=ADS_CONFIG_DIR, overwrite=True, + auth=AUTH, ) assert os.path.exists( @@ -132,6 +145,8 @@ def test_conda_cli(self): os.path.join(td_name, "conda"), "--ads-config", ADS_CONFIG_DIR, + "--auth", + AUTH, ], ) assert res.exit_code == 0, res.output @@ -147,6 +162,8 @@ def test_conda_cli(self): os.path.join(td_name, "conda"), "--ads-config", ADS_CONFIG_DIR, + "--auth", + AUTH, ], input="test2\no\n", ) @@ -161,6 +178,8 @@ def test_conda_cli(self): os.path.join(td_name, "conda"), "--ads-config", ADS_CONFIG_DIR, + "--auth", + AUTH, ], ) assert res.exit_code == 0, res.output diff --git a/tests/integration/opctl/test_opctl_utils.py b/tests/integration/opctl/test_opctl_utils.py index b612c98ac..1013f5336 100644 --- a/tests/integration/opctl/test_opctl_utils.py +++ b/tests/integration/opctl/test_opctl_utils.py @@ -6,7 +6,6 @@ import tempfile import os -from ads.opctl.constants import DEFAULT_OCI_CONFIG_FILE, DEFAULT_PROFILE from ads.opctl.utils import ( get_region_key, get_namespace, @@ -18,13 +17,19 @@ import pytest import fsspec +if "TEAMCITY_VERSION" in os.environ: + AUTH = AuthType.INSTANCE_PRINCIPAL +else: + AUTH = AuthType.SECURITY_TOKEN + class TestOpctlUtils: @pytest.fixture(scope="class") def oci_auth(self): - return create_signer(AuthType.API_KEY, DEFAULT_OCI_CONFIG_FILE, DEFAULT_PROFILE) + return create_signer(AUTH) def test_get_regional_key(self, oci_auth): + # Using "ads_teamcity_test_policy" in ociodscdev(root) compartment assert get_region_key(oci_auth) == "IAD" def test_get_namespace(self, oci_auth): diff --git a/tests/integration/other/test_artifact_saving_data.py b/tests/integration/other/test_artifact_saving_data.py index ecdd97e58..58c9a9462 100644 --- a/tests/integration/other/test_artifact_saving_data.py +++ b/tests/integration/other/test_artifact_saving_data.py @@ -38,19 +38,12 @@ def setup_class(cls): cls.project_id = secrets.common.PROJECT_OCID cls.authorization = auth.default_signer() - cls.AUTH = "api_key" cls.BUCKET_NAME = "ads_test" cls.NAMESPACE = secrets.common.NAMESPACE cls.OS_PREFIX = "unit_test" - profile = "DEFAULT" cls.oci_path = f"oci://{cls.BUCKET_NAME}@{cls.NAMESPACE}/{cls.OS_PREFIX}" - config_path = os.path.expanduser(os.path.join("~/.oci", "config")) - if os.path.exists(config_path): - cls.config = oci.config.from_file(config_path, profile) - cls.oci_client = ObjectStorageClient(cls.config) - else: - raise Exception(f"OCI keys not found at {config_path}") + cls.oci_client = ObjectStorageClient(**cls.authorization) datafiles = cls.oci_client.list_objects( namespace_name=cls.NAMESPACE, @@ -70,14 +63,13 @@ def setup_class(cls): @pytest.mark.parametrize("data", [array, df, l]) def test_modelartifact_save_data_from_memory(self, data): - storage_options = {"config": self.config} self.model_artifact._save_data_from_memory( prefix=f"oci://{self.BUCKET_NAME}@{self.NAMESPACE}/{self.OS_PREFIX}", train_data=data, train_data_name=f"{type(data)}_train.csv", validation_data=data, validation_data_name=f"{type(data)}_validation.csv", - storage_options=storage_options, + storage_options=self.authorization, ) datafiles = self.oci_client.list_objects( namespace_name=self.NAMESPACE, @@ -111,11 +103,10 @@ def test_modelartifact_save_data_from_memory(self, data): ) def test_modelartifact_save_data_from_files(self): - storage_options = {"config": self.config} self.model_artifact._save_data_from_file( f"oci://{self.BUCKET_NAME}@{self.NAMESPACE}/{self.OS_PREFIX}", train_data_path=os.path.join(self.model_dir, self.file_name), - storage_options=storage_options, + storage_options=self.authorization, ) datafiles = self.oci_client.list_objects( namespace_name=self.NAMESPACE, @@ -160,10 +151,8 @@ def test_modelartifact_save_with_tags(self): auth=self.authorization, training_id=None, freeform_tags={"freeform_key": "freeform_val"}, - defined_tags={"teamcity-test": {"CreatedBy": "test_user"}}, ) assert mc_model.freeform_tags == {"freeform_key": "freeform_val"} - assert mc_model.defined_tags["teamcity-test"]["CreatedBy"] == "test_user" def teardown_class(cls): if os.path.exists(cls.model_dir): diff --git a/tests/integration/other/test_common_model_artifact_save.py b/tests/integration/other/test_common_model_artifact_save.py index d5ea58aec..86beb92ad 100644 --- a/tests/integration/other/test_common_model_artifact_save.py +++ b/tests/integration/other/test_common_model_artifact_save.py @@ -39,19 +39,12 @@ def setup_class(cls): cls.authorization = auth.default_signer() def setup_method(self): - self.AUTH = "api_key" self.BUCKET_NAME = "ads_test" self.NAMESPACE = secrets.common.NAMESPACE self.OS_PREFIX = "unit_test" - profile = "DEFAULT" self.oci_path = f"oci://{self.BUCKET_NAME}@{self.NAMESPACE}/{self.OS_PREFIX}" - config_path = os.path.expanduser(os.path.join("~/.oci", "config")) - if os.path.exists(config_path): - self.config = oci.config.from_file(config_path, profile) - self.oci_client = ObjectStorageClient(self.config) - else: - raise Exception(f"OCI keys not found at {config_path}") + self.oci_client = ObjectStorageClient(**self.authorization) @patch.object(ModelIntrospect, "_reset") def test_modelartifact_save_with_introspection(self, mock_reset): diff --git a/tests/integration/other/test_dataflow.py b/tests/integration/other/test_dataflow.py index 7bd5d12c4..024dede2e 100644 --- a/tests/integration/other/test_dataflow.py +++ b/tests/integration/other/test_dataflow.py @@ -156,6 +156,10 @@ def test_runs(self, df): assert dfr2.id in ids assert dfr3.id in ids + @pytest.mark.skip( + reason="Error observed - 'Rest call to get region from metadata service failed' " + "after tests moved to run from TeamCity Runner Instance, might be policies required." + ) def test_create_from_id(self, df): df2 = DataFlow.from_id(df.id) dfr = df2.run(args=["run-4", "-v", "-l", "5"], wait=True) diff --git a/tests/integration/other/test_dataset_factory_open.py b/tests/integration/other/test_dataset_factory_open.py index 5d42ad123..84a363ef2 100644 --- a/tests/integration/other/test_dataset_factory_open.py +++ b/tests/integration/other/test_dataset_factory_open.py @@ -3,6 +3,7 @@ # Copyright (c) 2021, 2023 Oracle and/or its affiliates. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ +from ads.common import auth from ads.dataset.factory import DatasetFactory from sklearn.datasets import make_classification from tests.integration.config import secrets @@ -22,7 +23,7 @@ def setup_class(cls): X_small, y_small = make_classification(n_samples=600, n_features=200) cls.df_small = pd.concat([pd.DataFrame(X_small), pd.DataFrame(y_small)], axis=0) - cls.storage_options = {"config": "~/.oci/config", "profile": "DEFAULT"} + cls.storage_options = auth.default_signer() def test_small_data(self): ds = DatasetFactory.open(self.df_small) diff --git a/tests/integration/other/test_export_generic.py b/tests/integration/other/test_export_generic.py index c5d3d83cd..e0142ca99 100644 --- a/tests/integration/other/test_export_generic.py +++ b/tests/integration/other/test_export_generic.py @@ -27,11 +27,9 @@ class TestPrepare: train_X = [[1, 2], [2, 3], [3, 4], [4, 3]] train_y = [19, 26, 33, 30] gamma_reg_model = linear_model.GammaRegressor() - AUTH = "api_key" BUCKET_NAME = secrets.other.BUCKET_3 NAMESPACE = secrets.common.NAMESPACE OS_PREFIX = "unit_test" - profile = "DEFAULT" oci_path = f"oci://{BUCKET_NAME}@{NAMESPACE}/{OS_PREFIX}" config_path = os.path.expanduser(os.path.join("~/.oci", "config")) @@ -39,13 +37,6 @@ class TestPrepare: project_id = secrets.common.PROJECT_OCID authorization = auth.default_signer() - if os.path.exists(config_path): - config = oci.config.from_file(config_path, profile) - # oci_client = ObjectStorageClient(config) - else: - raise Exception(f"OCI keys not found at {config_path}") - storage_options = {"config": config} - def setup_method(self): if not os.path.exists(self.tmp_model_dir): os.mkdir(self.tmp_model_dir) @@ -80,7 +71,7 @@ def test_metadata_integration(self): [pd.DataFrame(self.train_X), pd.DataFrame(self.train_y)], axis=1 ), train_data_name="training_data.csv", - storage_options=self.storage_options, + storage_options=self.authorization, ) print(os.listdir(self.tmp_model_dir)) assert len(os.listdir(self.tmp_model_dir)) > 0, "No files created" @@ -115,7 +106,7 @@ def test_metadata_integration(self): display_name="advanced-ds-test", description="A sample gamma regression classifier", ignore_pending_changes=True, - auth=auth.default_signer(), + auth=self.authorization, training_id=None, ) diff --git a/tests/integration/other/test_hpo_tuner_artifact.py b/tests/integration/other/test_hpo_tuner_artifact.py index b4058f528..aa1c417dc 100644 --- a/tests/integration/other/test_hpo_tuner_artifact.py +++ b/tests/integration/other/test_hpo_tuner_artifact.py @@ -21,7 +21,7 @@ from sklearn.model_selection import train_test_split from xgboost import XGBClassifier import sys, mock, pytest -from ads.common import auth as authutil +from ads.common import auth from ads.common import oci_client as oc from tests.integration.config import secrets @@ -36,9 +36,7 @@ class TestADSTunerTunerArtifact: n_samples=10000, n_features=10, n_informative=2, random_state=42 ) - auth = authutil.api_keys( - oci_config="~/.oci/config", client_kwargs={"timeout": 6000} - ) + auth = auth.default_signer(client_kwargs={"timeout": 6000}) client = oc.OCIClientFactory(**auth).object_storage bucket_name = secrets.other.BUCKET_3 name_space = secrets.common.NAMESPACE diff --git a/tests/integration/other/test_prepare_artifact.py b/tests/integration/other/test_prepare_artifact.py index 97a1cfeed..99a584ae2 100644 --- a/tests/integration/other/test_prepare_artifact.py +++ b/tests/integration/other/test_prepare_artifact.py @@ -15,6 +15,7 @@ import pandas as pd import pytest from ads.catalog.model import ModelCatalog +from ads.common import auth from ads.common.model import ADSModel from ads.common.model_export_util import prepare_generic_model from ads.model.model_metadata import Framework, UseCaseType @@ -220,14 +221,11 @@ class TestModelArtifactPrepareGenericArtifact(TestCase): def setUp(self) -> None: """Sets up the test case.""" - AUTH = "api_key" BUCKET_NAME = secrets.other.BUCKET_3 NAMESPACE = secrets.common.NAMESPACE OS_PREFIX = "unit_test" - profile = "DEFAULT" self.oci_path = f"oci://{BUCKET_NAME}@{NAMESPACE}/{OS_PREFIX}" - config_path = os.path.expanduser(os.path.join("~/.oci", "config")) - self.storage_options = {"config": oci.config.from_file(config_path)} + self.storage_options = auth.default_signer() file_name = "vor_house_price.csv" self.file_path = os.path.join(