From 73aa905170fab2c55dde05a069d8b0a38e5693b9 Mon Sep 17 00:00:00 2001 From: Aliaksandr Ivanou Date: Wed, 17 Nov 2021 10:27:09 -0800 Subject: [PATCH] Remove ray scheduler (#350) Summary: Pull Request resolved: https://github.com/pytorch/torchx/pull/350 Remove ray scheduler from registry for now since it is not ready Reviewed By: d4l3k Differential Revision: D32473942 fbshipit-source-id: 50771faf91294de245392955e81e1a06e4e9c77a --- torchx/schedulers/__init__.py | 4 ---- torchx/schedulers/test/ray_scheduler_test.py | 22 ++++++++++---------- 2 files changed, 11 insertions(+), 15 deletions(-) diff --git a/torchx/schedulers/__init__.py b/torchx/schedulers/__init__.py index 785380307..b1f0aba70 100644 --- a/torchx/schedulers/__init__.py +++ b/torchx/schedulers/__init__.py @@ -10,7 +10,6 @@ import torchx.schedulers.docker_scheduler as docker_scheduler import torchx.schedulers.kubernetes_scheduler as kubernetes_scheduler import torchx.schedulers.local_scheduler as local_scheduler -import torchx.schedulers.ray_scheduler as ray_scheduler import torchx.schedulers.slurm_scheduler as slurm_scheduler from torchx.schedulers.api import Scheduler from torchx.specs.api import SchedulerBackend @@ -37,9 +36,6 @@ def get_scheduler_factories() -> Dict[str, SchedulerFactory]: "kubernetes": kubernetes_scheduler.create_scheduler, } - if ray_scheduler.has_ray(): - default_schedulers["ray"] = ray_scheduler.create_scheduler - return load_group( "torchx.schedulers", default=default_schedulers, diff --git a/torchx/schedulers/test/ray_scheduler_test.py b/torchx/schedulers/test/ray_scheduler_test.py index 00ecba016..f79e91fea 100644 --- a/torchx/schedulers/test/ray_scheduler_test.py +++ b/torchx/schedulers/test/ray_scheduler_test.py @@ -6,31 +6,31 @@ from contextlib import contextmanager from dataclasses import dataclass -from typing import Any, Dict, Iterator, Type, cast +from typing import Any, Dict, Iterator, Type from unittest import TestCase from unittest.mock import patch -from torchx.schedulers import get_schedulers from torchx.schedulers.ray_scheduler import RayScheduler, _logger, has_ray from torchx.specs import AppDef, CfgVal, Resource, Role, runopts if has_ray(): - class RaySchedulerRegistryTest(TestCase): - def test_get_schedulers_returns_ray_scheduler(self) -> None: - schedulers = get_schedulers("test_session") + # TODO(aivanou): enable after 0.1.1 release + # class RaySchedulerRegistryTest(TestCase): + # def test_get_schedulers_returns_ray_scheduler(self) -> None: + # schedulers = get_schedulers("test_session") - self.assertIn("ray", schedulers) + # self.assertIn("ray", schedulers) - scheduler = schedulers["ray"] + # scheduler = schedulers["ray"] - self.assertIsInstance(scheduler, RayScheduler) + # self.assertIsInstance(scheduler, RayScheduler) - ray_scheduler = cast(RayScheduler, scheduler) + # ray_scheduler = cast(RayScheduler, scheduler) - self.assertEqual(ray_scheduler.backend, "ray") - self.assertEqual(ray_scheduler.session_name, "test_session") + # self.assertEqual(ray_scheduler.backend, "ray") + # self.assertEqual(ray_scheduler.session_name, "test_session") class RaySchedulerTest(TestCase): def setUp(self) -> None: