diff --git a/mlrun/runtimes/sparkjob.py b/mlrun/runtimes/sparkjob.py index 8dbc4f4888b..08afaeffb75 100644 --- a/mlrun/runtimes/sparkjob.py +++ b/mlrun/runtimes/sparkjob.py @@ -130,7 +130,6 @@ def __init__( hadoop_conf=None, node_selector=None, use_default_image=False, - priority_class_name=None, ): super().__init__( @@ -152,7 +151,7 @@ def __init__( workdir=workdir, build=build, node_selector=node_selector, - priority_class_name=priority_class_name, + priority_class_name=None, ) self.driver_resources = driver_resources or {} @@ -195,6 +194,11 @@ def deploy_default_image(cls, with_gpu=False): sj.deploy() get_run_db().delete_function(name=sj.metadata.name) + def with_priority_class( + self, name: str = config.default_function_priority_class_name + ): + raise NotImplementedError("Not supported in spark 2 operator") + def _is_using_gpu(self): _, driver_gpu = self._get_gpu_type_and_quantity( resources=self.spec.driver_resources["requests"]