diff --git a/gradient/api_sdk/clients/__init__.py b/gradient/api_sdk/clients/__init__.py index d299e2ca..2501dee2 100644 --- a/gradient/api_sdk/clients/__init__.py +++ b/gradient/api_sdk/clients/__init__.py @@ -4,5 +4,6 @@ from .job_client import JobsClient from .machines_client import MachinesClient from .model_client import ModelsClient +from .notebook_client import NotebooksClient from .project_client import ProjectsClient from .sdk_client import SdkClient diff --git a/gradient/api_sdk/clients/notebook_client.py b/gradient/api_sdk/clients/notebook_client.py new file mode 100644 index 00000000..dbf696fb --- /dev/null +++ b/gradient/api_sdk/clients/notebook_client.py @@ -0,0 +1,81 @@ +from .base_client import BaseClient +from .. import repositories, models + + +class NotebooksClient(BaseClient): + def create( + self, + vm_type_id, + container_id, + cluster_id, + container_name=None, + name=None, + registry_username=None, + registry_password=None, + default_entrypoint=None, + container_user=None, + shutdown_timeout=None, + is_preemptible=None, + ): + """Create new notebook + + :param int vm_type_id: + :param int container_id: + :param int cluster_id: + :param str container_name: + :param str name: + :param str registry_username: + :param str registry_password: + :param str default_entrypoint: + :param str container_user: + :param int|float shutdown_timeout: + :param bool is_preemptible: + + :return: Notebook ID + :rtype str: + """ + + notebook = models.Notebook( + vm_type_id=vm_type_id, + container_id=container_id, + cluster_id=cluster_id, + container_name=container_name, + name=name, + registry_username=registry_username, + registry_password=registry_password, + default_entrypoint=default_entrypoint, + container_user=container_user, + shutdown_timeout=shutdown_timeout, + is_preemptible=is_preemptible, + ) + + repository = repositories.CreateNotebook(api_key=self.api_key, logger=self.logger) + handle = repository.create(notebook) + return handle + + def get(self, id): + """Get Notebook + + :param str id: Notebook ID + :rtype: models.Notebook + """ + repository = repositories.GetNotebook(api_key=self.api_key, logger=self.logger) + notebook = repository.get(id=id) + return notebook + + def delete(self, id): + """Delete existing notebook + + :param str id: Notebook ID + """ + repository = repositories.DeleteNotebook(api_key=self.api_key, logger=self.logger) + repository.delete(id) + + def list(self): + """Get list of Notebooks + + :rtype: list[models.Notebook] + """ + repository = repositories.ListNotebooks(api_key=self.api_key, logger=self.logger) + notebooks = repository.list() + return notebooks diff --git a/gradient/api_sdk/clients/sdk_client.py b/gradient/api_sdk/clients/sdk_client.py index 0aa88881..7c47c546 100644 --- a/gradient/api_sdk/clients/sdk_client.py +++ b/gradient/api_sdk/clients/sdk_client.py @@ -1,4 +1,5 @@ -from . import DeploymentsClient, ExperimentsClient, HyperparameterJobsClient, ModelsClient, ProjectsClient +from . import DeploymentsClient, ExperimentsClient, HyperparameterJobsClient, ModelsClient, ProjectsClient, \ + MachinesClient, NotebooksClient from .job_client import JobsClient from .. import logger as sdk_logger @@ -15,3 +16,5 @@ def __init__(self, api_key, logger=sdk_logger.MuteLogger()): self.models = ModelsClient(api_key=api_key, logger=logger) self.jobs = JobsClient(api_key=api_key, logger=logger) self.projects = ProjectsClient(api_key=api_key, logger=logger) + self.machines = MachinesClient(api_key=api_key, logger=logger) + self.notebooks = NotebooksClient(api_key=api_key, logger=logger) diff --git a/gradient/api_sdk/models/__init__.py b/gradient/api_sdk/models/__init__.py index c4c8f321..5088b40b 100644 --- a/gradient/api_sdk/models/__init__.py +++ b/gradient/api_sdk/models/__init__.py @@ -5,4 +5,5 @@ from .log import LogRow from .machine import Machine, MachineEvent, MachineUtilization from .model import Model +from .notebook import Notebook from .project import Project diff --git a/gradient/api_sdk/models/notebook.py b/gradient/api_sdk/models/notebook.py new file mode 100644 index 00000000..da4efe7e --- /dev/null +++ b/gradient/api_sdk/models/notebook.py @@ -0,0 +1,21 @@ +import attr + + +@attr.s +class Notebook(object): + id = attr.ib(type=str, default=None) + vm_type_id = attr.ib(type=int, default=None) + container_id = attr.ib(type=int, default=None) + container_name = attr.ib(type=str, default=None) + name = attr.ib(type=str, default=None) + cluster_id = attr.ib(type=int, default=None) + registry_username = attr.ib(type=str, default=None) + registry_password = attr.ib(type=str, default=None) + default_entrypoint = attr.ib(type=str, default=None) + container_user = attr.ib(type=str, default=None) + shutdown_timeout = attr.ib(type=int, default=None) + is_preemptible = attr.ib(type=bool, default=None) + project_id = attr.ib(type=bool, default=None) + state = attr.ib(type=bool, default=None) + vm_type = attr.ib(type=bool, default=None) + fqdn = attr.ib(type=bool, default=None) diff --git a/gradient/api_sdk/repositories/__init__.py b/gradient/api_sdk/repositories/__init__.py index 8a735121..20230e30 100644 --- a/gradient/api_sdk/repositories/__init__.py +++ b/gradient/api_sdk/repositories/__init__.py @@ -7,4 +7,5 @@ from .machines import CheckMachineAvailability, CreateMachine, CreateResource, StartMachine, StopMachine, \ RestartMachine, GetMachine, UpdateMachine, GetMachineUtilization from .models import ListModels +from .notebooks import CreateNotebook, DeleteNotebook, GetNotebook, ListNotebooks from .projects import CreateProject, ListProjects diff --git a/gradient/api_sdk/repositories/jobs.py b/gradient/api_sdk/repositories/jobs.py index 6474b405..f6c1f665 100644 --- a/gradient/api_sdk/repositories/jobs.py +++ b/gradient/api_sdk/repositories/jobs.py @@ -9,20 +9,6 @@ def _get_api_url(self, **_): return config.config.CONFIG_HOST -class ParseJobDictMixin(object): - @staticmethod - def _parse_object(job_dict, **kwargs): - """ - - :param job_dict: - :param kwargs: - :return: - :rtype: Job - """ - job = JobSchema().get_instance(job_dict) - return job - - class ListJobs(GetBaseJobApiUrlMixin, ListResources): def get_request_url(self, **kwargs): diff --git a/gradient/api_sdk/repositories/notebooks.py b/gradient/api_sdk/repositories/notebooks.py new file mode 100644 index 00000000..35baedc3 --- /dev/null +++ b/gradient/api_sdk/repositories/notebooks.py @@ -0,0 +1,81 @@ +from gradient import config +from .common import CreateResource, DeleteResource, ListResources, GetResource +from .. import serializers + + +class GetNotebookApiUrlMixin(object): + def _get_api_url(self, use_vpc=False): + return config.config.CONFIG_HOST + + +class CreateNotebook(GetNotebookApiUrlMixin, CreateResource): + SERIALIZER_CLS = serializers.NotebookSchema + + def get_request_url(self, **kwargs): + return "notebooks/createNotebook" + + def _process_instance_dict(self, instance_dict): + # the API requires this field but marshmallow does not create it if it's value is None + instance_dict.setdefault("containerId") + return instance_dict + + +class DeleteNotebook(GetNotebookApiUrlMixin, DeleteResource): + def get_request_url(self, **kwargs): + return "notebooks/v2/deleteNotebook" + + def _get_request_json(self, kwargs): + notebook_id = kwargs["id"] + d = {"notebookId": notebook_id} + return d + + def _send_request(self, client, url, json_data=None): + response = client.post(url, json=json_data) + return response + + +class GetNotebook(GetNotebookApiUrlMixin, GetResource): + def get_request_url(self, **kwargs): + notebook_id = kwargs["id"] + url = "notebooks/{}/getNotebook".format(notebook_id) + return url + + def _parse_object(self, data, **kwargs): + # this ugly hack is here because marshmallow disallows reading value into `id` field + # if JSON's field was named differently (despite using load_from in schema definition) + data["id"] = data["handle"] + + serializer = serializers.NotebookSchema() + notebooks = serializer.get_instance(data) + return notebooks + + +class ListNotebooks(GetNotebookApiUrlMixin, ListResources): + def get_request_url(self, **kwargs): + return "notebooks/getNotebooks" + + def _parse_objects(self, data, **kwargs): + notebook_dicts = data["notebookList"] + # this ugly hack is here because marshmallow disallows reading value into `id` field + # if JSON's field was named differently (despite using load_from in schema definition) + for d in notebook_dicts: + d["id"] = d["handle"] + + serializer = serializers.NotebookSchema() + notebooks = serializer.get_instance(notebook_dicts, many=True) + return notebooks + + def _get_request_json(self, kwargs): + json_ = { + "filter": { + "filter": { + "limit": 11, + "offset": 0, + "where": { + "dtDeleted": None, + }, + "order": "jobId desc", + }, + }, + } + return json_ diff --git a/gradient/api_sdk/serializers/__init__.py b/gradient/api_sdk/serializers/__init__.py index cb879153..8531e124 100644 --- a/gradient/api_sdk/serializers/__init__.py +++ b/gradient/api_sdk/serializers/__init__.py @@ -6,4 +6,5 @@ from .log import LogRowSchema from .machine import MachineSchema, MachineSchemaForListing, MachineEventSchema from .model import Model +from .notebook import NotebookSchema from .project import Project diff --git a/gradient/api_sdk/serializers/notebook.py b/gradient/api_sdk/serializers/notebook.py new file mode 100644 index 00000000..6382e637 --- /dev/null +++ b/gradient/api_sdk/serializers/notebook.py @@ -0,0 +1,25 @@ +import marshmallow + +from . import BaseSchema +from .. import models + + +class NotebookSchema(BaseSchema): + MODEL = models.Notebook + + id = marshmallow.fields.Str() + vm_type_id = marshmallow.fields.Int(load_from="vmTypeId", dump_to="vmTypeId") + container_id = marshmallow.fields.Int(load_from="containerId", dump_to="containerId", allow_none=True) + container_name = marshmallow.fields.Str(load_from="containerName", dump_to="containerName", allow_none=True) + name = marshmallow.fields.Str() + cluster_id = marshmallow.fields.Int(load_from="clusterId", dump_to="clusterId") + registry_username = marshmallow.fields.Str(load_from="registryUsername", dump_to="registryUsername") + registry_password = marshmallow.fields.Str(load_from="registryPassword", dump_to="registryPassword") + default_entrypoint = marshmallow.fields.Str(load_from="defaultEntrypoint", dump_to="defaultEntrypoint") + container_user = marshmallow.fields.Str(load_from="containerUser", dump_to="containerUser") + shutdown_timeout = marshmallow.fields.Int(load_from="shutdownTimeout", dump_to="shutdownTimeout") + is_preemptible = marshmallow.fields.Bool(load_from="isPreemptible", dump_to="isPreemptible") + project_id = marshmallow.fields.Str(load_from="projectHandle", dump_to="projectHandle") + state = marshmallow.fields.Str() + vm_type = marshmallow.fields.Str(load_from="vmType", dump_to="vmType") + fqdn = marshmallow.fields.Str() diff --git a/gradient/cli/__init__.py b/gradient/cli/__init__.py index 89f5ad0e..915201a6 100644 --- a/gradient/cli/__init__.py +++ b/gradient/cli/__init__.py @@ -9,6 +9,7 @@ import gradient.cli.jobs import gradient.cli.machines import gradient.cli.models +import gradient.cli.notebooks import gradient.cli.projects import gradient.cli.run diff --git a/gradient/cli/hyperparameters.py b/gradient/cli/hyperparameters.py index 8641d03c..f7aab8da 100644 --- a/gradient/cli/hyperparameters.py +++ b/gradient/cli/hyperparameters.py @@ -10,13 +10,6 @@ from gradient.commands import hyperparameters as hyperparameters_commands -def add_use_docker_file_flag_if_used(ctx, param, value): - if value: - ctx.params["useDockerFile"] = True - - return value - - @cli.group("hyperparameters", help="Manage hyperparameters", cls=ClickGroup) def hyperparameters_group(): pass diff --git a/gradient/cli/notebooks.py b/gradient/cli/notebooks.py new file mode 100644 index 00000000..1458df96 --- /dev/null +++ b/gradient/cli/notebooks.py @@ -0,0 +1,129 @@ +import click + +from gradient.cli import common +from gradient.cli.cli import cli +from gradient.commands import notebooks + + +@cli.group("notebooks", help="Manage notebooks", cls=common.ClickGroup) +def notebooks_group(): + pass + + +@notebooks_group.command("create", help="Create new notebook") +@click.option( + "--vmTypeId", + "vm_type_id", + type=int, + required=True, + help="Type of Virtual Machine", + cls=common.OptionReadValueFromConfigFile, +) +@click.option( + "--containerId", + "container_id", + type=int, + required=True, + help="Container ID", + cls=common.OptionReadValueFromConfigFile, +) +@click.option( + "--clusterId", + "cluster_id", + type=int, + required=True, + help="Cluster ID", + cls=common.OptionReadValueFromConfigFile, +) +@click.option( + "--containerName", + "container_name", + help="Container name", + cls=common.OptionReadValueFromConfigFile, +) +@click.option( + "--name", + "name", + help="Notebook name", + cls=common.OptionReadValueFromConfigFile, +) +@click.option( + "--registryUsername", + "registry_username", + help="Registry username", + cls=common.OptionReadValueFromConfigFile, +) +@click.option( + "--registryPassword", + "registry_password", + help="Registry password", + cls=common.OptionReadValueFromConfigFile, +) +@click.option( + "--defaultEntrypoint", + "default_entrypoint", + help="Default entrypoint", + cls=common.OptionReadValueFromConfigFile, +) +@click.option( + "--containerUser", + "container_user", + help="Container user", + cls=common.OptionReadValueFromConfigFile, +) +@click.option( + "--shutdownTimeout", + "shutdown_timeout", + help="Shutdown timeout in hours", + type=float, + cls=common.OptionReadValueFromConfigFile, +) +@click.option( + "--isPreemptible", + "is_preemptible", + help="Is preemptible", + is_flag=True, + type=bool, + cls=common.OptionReadValueFromConfigFile, +) +@common.api_key_option +@common.options_file +def create_notebook(api_key, options_file, **notebook): + command = notebooks.CreateNotebookCommand(api_key=api_key) + command.execute(**notebook) + + +@notebooks_group.command("delete", help="Delete existing notebook") +@click.option( + "--id", + "id_", + help="Notebook ID", + cls=common.OptionReadValueFromConfigFile, +) +@common.api_key_option +@common.options_file +def delete_notebook(id_, api_key, options_file): + command = notebooks.DeleteNotebookCommand(api_key=api_key) + command.execute(id_=id_) + + +@notebooks_group.command("list", help="List notebooks") +@common.api_key_option +@common.options_file +def list_notebooks(api_key, options_file): + command = notebooks.ListNotebooksCommand(api_key=api_key) + command.execute() + + +@notebooks_group.command("show", help="Show notebook details", hidden=True) +@click.option( + "--id", + "id", + help="Notebook ID", + cls=common.OptionReadValueFromConfigFile, +) +@common.api_key_option +@common.options_file +def show_notebook(id, api_key, options_file): + command = notebooks.ShowNotebookDetailsCommand(api_key=api_key) + command.execute(id) diff --git a/gradient/commands/common.py b/gradient/commands/common.py index 787cf357..0927fcaa 100644 --- a/gradient/commands/common.py +++ b/gradient/commands/common.py @@ -39,7 +39,7 @@ def _get_instances(self, kwargs): pass @abc.abstractmethod - def _get_table_data(self, experiments): + def _get_table_data(self, objects): pass def _log_objects_list(self, objects): @@ -59,69 +59,3 @@ def _make_table(table_data): ascii_table = terminaltables.AsciiTable(table_data) table_string = ascii_table.table return table_string - - -class CommandBase(object): - def __init__(self, api=None, logger_=Logger()): - self.api = api - self.logger = logger_ - - -class ListCommand(CommandBase): - WAITING_FOR_RESPONSE_MESSAGE = "Waiting for data..." - - @property - def request_url(self): - raise NotImplementedError() - - def execute(self, **kwargs): - with halo.Halo(text=self.WAITING_FOR_RESPONSE_MESSAGE, spinner="dots"): - response = self._get_response(kwargs) - - try: - if not response.ok: - self.logger.log_error_response(response.json()) - return - - objects = self._get_objects(response, kwargs) - except (ValueError, KeyError) as e: - self.logger.error("Error while parsing response data: {}".format(e)) - else: - self._log_objects_list(objects) - - def _log_objects_list(self, objects): - if not objects: - self.logger.warning("No data found") - return - - table_data = self._get_table_data(objects) - table_str = self._make_table(table_data) - if len(table_str.splitlines()) > get_terminal_lines(): - pydoc.pager(table_str) - else: - self.logger.log(table_str) - - def _get_objects(self, response, kwargs): - data = response.json() - return data - - def _get_response(self, kwargs): - json_ = self._get_request_json(kwargs) - params = self._get_request_params(kwargs) - response = self.api.get(self.request_url, json=json_, params=params) - return response - - def _get_table_data(self, objects): - raise NotImplementedError() - - @staticmethod - def _make_table(table_data): - ascii_table = terminaltables.AsciiTable(table_data) - table_string = ascii_table.table - return table_string - - def _get_request_json(self, kwargs): - return None - - def _get_request_params(self, kwargs): - return None diff --git a/gradient/commands/deployments.py b/gradient/commands/deployments.py index 517b5caf..1eb905e6 100644 --- a/gradient/commands/deployments.py +++ b/gradient/commands/deployments.py @@ -7,7 +7,6 @@ from gradient import version, logger as gradient_logger, api_sdk, exceptions from gradient.api_sdk.clients import http_client -from gradient.commands import common from gradient.config import config from gradient.utils import get_terminal_lines @@ -17,25 +16,6 @@ deployments_api = http_client.API(config.CONFIG_HOST, headers=default_headers) -class _DeploymentCommandBase(common.CommandBase): - def _log_message(self, response, success_msg_template, error_msg): - if response.ok: - try: - j = response.json() - handle = j["deployment"] - except (ValueError, KeyError): - self.logger.error(success_msg_template) - else: - msg = success_msg_template.format(**handle) - self.logger.log(msg) - else: - try: - data = response.json() - self.logger.log_error_response(data) - except ValueError: - self.logger.error(error_msg) - - @six.add_metaclass(abc.ABCMeta) class _DeploymentCommand(object): def __init__(self, deployment_client, logger_=gradient_logger.Logger()): diff --git a/gradient/commands/hyperparameters.py b/gradient/commands/hyperparameters.py index c545852c..7329a168 100644 --- a/gradient/commands/hyperparameters.py +++ b/gradient/commands/hyperparameters.py @@ -6,30 +6,11 @@ import terminaltables from gradient import api_sdk, exceptions -from gradient.commands import common from gradient.commands.common import BaseCommand, ListCommandMixin from gradient.commands.experiments import BaseCreateExperimentCommandMixin from gradient.utils import get_terminal_lines -class HyperparametersCommandBase(common.CommandBase): - def _log_message(self, response, success_msg_template, error_msg): - if response.ok: - try: - json_ = response.json() - except (ValueError, KeyError): - self.logger.log(success_msg_template) - else: - msg = success_msg_template.format(**json_) - self.logger.log(msg) - else: - try: - data = response.json() - self.logger.log_error_response(data) - except ValueError: - self.logger.error(error_msg) - - @six.add_metaclass(abc.ABCMeta) class BaseHyperparameterCommand(BaseCommand): def _get_client(self, api_key, logger): diff --git a/gradient/commands/jobs.py b/gradient/commands/jobs.py index 1a7c85e2..8ce9d039 100644 --- a/gradient/commands/jobs.py +++ b/gradient/commands/jobs.py @@ -22,22 +22,6 @@ def _get_client(self, api_key, logger_): client = api_sdk.clients.JobsClient(api_key=api_key, logger=logger_) return client - def _log_message(self, response_data, is_response_ok, success_msg_template, error_msg): - if is_response_ok: - try: - handle = response_data - except (ValueError, KeyError): - self.logger.log(success_msg_template) - else: - msg = success_msg_template.format(**handle) - self.logger.log(msg) - else: - try: - data = response_data - self.logger.log_error_response(data) - except ValueError: - self.logger.error(error_msg) - @six.add_metaclass(abc.ABCMeta) class BaseCreateJobCommandMixin(object): diff --git a/gradient/commands/login.py b/gradient/commands/login.py index 05cc75d4..60965317 100644 --- a/gradient/commands/login.py +++ b/gradient/commands/login.py @@ -1,9 +1,15 @@ +from gradient import logger from gradient.login import login, logout -from gradient.commands.common import CommandBase from gradient.login import set_apikey from gradient.version import version +class CommandBase(object): + def __init__(self, api=None, logger_=logger.Logger()): + self.api = api + self.logger = logger_ + + class LogInCommand(CommandBase): def execute(self, email, password, api_token_name=None): login(email, password, api_token_name) diff --git a/gradient/commands/notebooks.py b/gradient/commands/notebooks.py new file mode 100644 index 00000000..8be4f567 --- /dev/null +++ b/gradient/commands/notebooks.py @@ -0,0 +1,99 @@ +import abc +import pydoc + +import halo +import six +import terminaltables + +from gradient import api_sdk +from gradient.commands.common import BaseCommand, ListCommandMixin +from gradient.utils import get_terminal_lines + + +@six.add_metaclass(abc.ABCMeta) +class BaseNotebookCommand(BaseCommand): + def _get_client(self, api_key, logger): + client = api_sdk.clients.NotebooksClient(api_key=api_key, logger=logger) + return client + + +class CreateNotebookCommand(BaseNotebookCommand): + SPINNER_MESSAGE = "Creating new notebook" + + def execute(self, **kwargs): + with halo.Halo(text=self.SPINNER_MESSAGE, spinner="dots"): + notebook_id = self.client.create(**kwargs) + + self.logger.log("Created new notebook with id: {}".format(notebook_id)) + + +class DeleteNotebookCommand(BaseNotebookCommand): + WAITING_FOR_RESPONSE_MESSAGE = "Deleting notebook" + + def execute(self, id_): + with halo.Halo(text=self.WAITING_FOR_RESPONSE_MESSAGE, spinner="dots"): + self.client.delete(id_) + + self.logger.log("Notebook deleted") + + +class ListNotebooksCommand(ListCommandMixin, BaseNotebookCommand): + SPINNER_MESSAGE = "Waiting for data" + + def _get_instances(self, kwargs): + notebooks = self.client.list() + return notebooks + + def _get_table_data(self, notebooks): + data = [("Name", "ID")] + for obj in notebooks: + data.append((obj.name, obj.id)) + return data + + +class ShowNotebookDetailsCommand(BaseNotebookCommand): + WAITING_FOR_RESPONSE_MESSAGE = "Waiting for data" + + def execute(self, id_): + with halo.Halo(text=self.WAITING_FOR_RESPONSE_MESSAGE, spinner="dots"): + instance = self._get_instance(id_) + + self._log_object(instance) + + def _get_instance(self, id_): + """ + :rtype: api_sdk.Notebook + """ + instance = self.client.get(id_) + return instance + + def _log_object(self, instance): + + table_str = self._make_table(instance) + if len(table_str.splitlines()) > get_terminal_lines(): + pydoc.pager(table_str) + else: + self.logger.log(table_str) + + def _make_table(self, instance): + """ + :param api_sdk.Notebook: + """ + data = self._get_table_data(instance) + ascii_table = terminaltables.AsciiTable(data) + table_string = ascii_table.table + return table_string + + @staticmethod + def _get_table_data(instance): + """ + :param api_sdk.Notebook instance: + """ + data = ( + ("Name", instance.name), + ("ID", instance.id), + ("VM Type", instance.vm_type), + ("State", instance.state), + ("FQDN", instance.fqdn), + ) + return data diff --git a/gradient/logger.py b/gradient/logger.py index fcfe98e0..9d240813 100644 --- a/gradient/logger.py +++ b/gradient/logger.py @@ -1,6 +1,5 @@ from click import secho -from gradient.api_sdk.utils import MessageExtractor from .config import config @@ -21,33 +20,6 @@ def warning(self, message): color = "yellow" if config.USE_CONSOLE_COLORS else None self._log(message, color=color) - def log_error_response(self, data): - msg = MessageExtractor().get_message_from_response_data(data) - if not msg: - raise ValueError("No error messages found") - - self.error(msg) - def debug(self, message): if config.DEBUG: self._log("DEBUG: {}".format(message)) - - def log_response(self, response, success_msg, error_msg): - """ - :type response: requests.Response|http_client.GradientResponse - :type success_msg: str - :type error_msg: str - """ - if response.ok: - self._log(success_msg) - else: - try: - data = response.json() - self.log_error_response(data) - except ValueError: - self.error(error_msg) - except AttributeError: - if response.data: - self.log_error_response(response.data) - else: - self.error(response) diff --git a/tests/config_files/notebooks_create.yaml b/tests/config_files/notebooks_create.yaml new file mode 100644 index 00000000..005b2b15 --- /dev/null +++ b/tests/config_files/notebooks_create.yaml @@ -0,0 +1,12 @@ +apiKey: some_key +clusterId: 321 +containerId: 123 +containerName: null +containerUser: some_container_user +defaultEntrypoint: some_entrypoint +isPreemptible: true +name: some_notebook_name +registryPassword: some_password +registryUsername: some_username +shutdownTimeout: 8.0 +vmTypeId: 25 diff --git a/tests/config_files/notebooks_delete.yaml b/tests/config_files/notebooks_delete.yaml new file mode 100644 index 00000000..b52f2e44 --- /dev/null +++ b/tests/config_files/notebooks_delete.yaml @@ -0,0 +1,2 @@ +apiKey: some_key +id: some_id diff --git a/tests/config_files/notebooks_list.yaml b/tests/config_files/notebooks_list.yaml new file mode 100644 index 00000000..8cadda1f --- /dev/null +++ b/tests/config_files/notebooks_list.yaml @@ -0,0 +1 @@ +apiKey: some_key diff --git a/tests/config_files/notebooks_show.yaml b/tests/config_files/notebooks_show.yaml new file mode 100644 index 00000000..b52f2e44 --- /dev/null +++ b/tests/config_files/notebooks_show.yaml @@ -0,0 +1,2 @@ +apiKey: some_key +id: some_id diff --git a/tests/conftest.py b/tests/conftest.py index 369a1ea6..21d6a2e4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -256,3 +256,31 @@ def run_config_path(): p = Path(__file__) fixture_dir = p.parent / "config_files" / "run.yaml" return str(fixture_dir.resolve()) + + +@pytest.fixture +def notebooks_create_config_path(): + p = Path(__file__) + fixture_dir = p.parent / "config_files" / "notebooks_create.yaml" + return str(fixture_dir.resolve()) + + +@pytest.fixture +def notebooks_delete_config_path(): + p = Path(__file__) + fixture_dir = p.parent / "config_files" / "notebooks_delete.yaml" + return str(fixture_dir.resolve()) + + +@pytest.fixture +def notebooks_show_config_path(): + p = Path(__file__) + fixture_dir = p.parent / "config_files" / "notebooks_show.yaml" + return str(fixture_dir.resolve()) + + +@pytest.fixture +def notebooks_list_config_path(): + p = Path(__file__) + fixture_dir = p.parent / "config_files" / "notebooks_list.yaml" + return str(fixture_dir.resolve()) diff --git a/tests/example_responses.py b/tests/example_responses.py index 32ea72f0..84520e65 100644 --- a/tests/example_responses.py +++ b/tests/example_responses.py @@ -3627,7 +3627,7 @@ "jobId": "jsy2ibsz1l026y", "line": 48, "timestamp": "2019-07-08T12:40:59.863Z", - "message": "I0708 12:40:59.862842 140451862599424 estimator.py:201] Using config: {'_master': '', '_num_worker_replicas': 1, '_num_ps_replicas': 0, '_save_checkpoints_secs': None, '_log_step_count_steps': 100, '_train_distribute': \u003ctensorflow.contrib.distribute.python.one_device_strategy.OneDeviceStrategy object at 0x7fbcb5adecc0\u003e, '_protocol': None, '_session_config': allow_soft_placement: true" + "message": "I0708 12:40:59.862842 140451862599424 estimator.py:201] Using config: {'_master': '', '_num_worker_replicas': 1, '_num_ps_replicas': 0, '_save_checkpoints_secs': None, '_log_step_count_steps': 100, '_train_distribute': \u003ctensorflow.contrib.distribute.python.one_device_strategy.OneDeviceStrategy object at 0x7fbcb5adecc0\u003e, '_protocol': None, '_session_config': allow_soft_placement: True" }, { "jobId": "jsy2ibsz1l026y", @@ -4529,3 +4529,804 @@ }, "message": "success" } + +NOTEBOOK_GET_RESPONSE = { + "name": "some_name", + "handle": "ngw7piq9", + "jobHandle": "jzhmk7fpluqje", + "project": "Notebook-some_name", + "projectHandle": "prg284tu2", + "state": "Running", + "token": "dc2c331ce694ebe5a615f6e5885f6a933fcb1be49cb0d17f", + "container": "Paperspace + Fast.AI 1.0 (V3)", + "containerUrl": "paperspace/fastai:1.0-CUDA9.2-base-3.0-v1.0.6", + "baseContainer": "Paperspace + Fast.AI 1.0 (V3)", + "baseContainerUrl": "paperspace/fastai:1.0-CUDA9.2-base-3.0-v1.0.6", + "vmType": "K80", + "cluster": "PS Notebooks on GCP", + "clusterId": "clmtkpnm2", + "fqdn": "ngw7piq9.dgradient.paperspace.com", + "startedByUser": "first last", + "startedByUserId": "ukgvw4i8", + "namespace": "username", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-09-03T11:06:18.154Z", + "dtModified": "2019-09-03T11:06:18.154Z", + "dtProvisioningStarted": "2019-09-03T11:08:36.286Z", + "dtProvisioningFinished": "2019-09-03T11:10:36.471Z", + "dtStarted": "2019-09-03T11:10:36.471Z", + "dtFinished": None, + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "shutdownTimeout": 6, + "jobId": 20206, + "isPublic": False, + "isPreemptible": False, + "cpuHostname": "gradient-host-1567508793", + "cpuCount": 2, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat md_clear arch_capabilities", + "cpuMem": "12297212 kB", + "gpuName": "Tesla K80", + "gpuSerial": "0320617088427", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "418.67", + "gpuCount": 1, + "gpuMem": "11441 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "id": 1823 +} + +NOTEBOOKS_LIST_RESPONSE_JSON = { + "notebookList": [ + { + "name": "job 1", + "handle": "n1vmfj6x", + "jobHandle": "jsh0692p80dphg", + "project": "Notebook-undefined", + "projectHandle": "pr231zktg", + "state": "Running", + "token": "80426c989ef8d42b4dff6806c9592b1111a47c63c0f4a36f", + "container": "Paperspace + Fast.AI 1.0 (V3)", + "containerUrl": "paperspace/fastai:1.0-CUDA9.2-base-3.0-v1.0.6", + "baseContainer": "Paperspace + Fast.AI 1.0 (V3)", + "baseContainerUrl": "paperspace/fastai:1.0-CUDA9.2-base-3.0-v1.0.6", + "vmType": "K80", + "cluster": "PS Notebooks on GCP", + "clusterId": "clmtkpnm2", + "fqdn": "n1vmfj6x.dgradient.paperspace.com", + "startedByUser": "first last", + "startedByUserId": "ukgvw4i8", + "namespace": "username", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-09-04T11:06:12.667Z", + "dtModified": "2019-09-04T11:06:12.667Z", + "dtProvisioningStarted": "2019-09-04T11:08:28.305Z", + "dtProvisioningFinished": "2019-09-04T11:10:30.628Z", + "dtStarted": "2019-09-04T11:10:30.628Z", + "dtFinished": None, + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "shutdownTimeout": None, + "jobId": 20221, + "isPublic": False, + "isPreemptible": False, + "cpuHostname": "gradient-host-1567595186", + "cpuCount": 2, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat md_clear arch_capabilities", + "cpuMem": "12297212 kB", + "gpuName": "Tesla K80", + "gpuSerial": "0320617028675", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "418.67", + "gpuCount": 1, + "gpuMem": "11441 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "id": 1832 + }, + { + "name": "job 1", + "handle": "nhdf8zf3", + "jobHandle": "jsyvcxoxch3jgu", + "project": "Notebook-undefined", + "projectHandle": "pr2u2sfja", + "state": "Running", + "token": "1d763ce770a195c98ea3d30588f3ad007c2b8403608ab091", + "container": "Paperspace + Fast.AI 1.0 (V3)", + "containerUrl": "paperspace/fastai:1.0-CUDA9.2-base-3.0-v1.0.6", + "baseContainer": "Paperspace + Fast.AI 1.0 (V3)", + "baseContainerUrl": "paperspace/fastai:1.0-CUDA9.2-base-3.0-v1.0.6", + "vmType": "K80", + "cluster": "PS Notebooks on GCP", + "clusterId": "clmtkpnm2", + "fqdn": "nhdf8zf3.dgradient.paperspace.com", + "startedByUser": "first last", + "startedByUserId": "ukgvw4i8", + "namespace": "username", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-09-04T10:23:04.762Z", + "dtModified": "2019-09-04T10:23:04.762Z", + "dtProvisioningStarted": "2019-09-04T10:26:05.190Z", + "dtProvisioningFinished": "2019-09-04T10:28:13.609Z", + "dtStarted": "2019-09-04T10:28:13.609Z", + "dtFinished": None, + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "shutdownTimeout": None, + "jobId": 20219, + "isPublic": False, + "isPreemptible": False, + "cpuHostname": "gradient-host-1567592650-d5337953", + "cpuCount": 2, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat md_clear arch_capabilities", + "cpuMem": "12297204 kB", + "gpuName": "Tesla K80", + "gpuSerial": "0320617086962", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "418.67", + "gpuCount": 1, + "gpuMem": "11441 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "id": 1831 + }, + { + "name": "My Notebook 123", + "handle": "nslk5r03", + "jobHandle": "jskm7amsly7mmj", + "project": "Notebook-My Notebook 123", + "projectHandle": "pr3qq8qlg", + "state": "Stopped", + "token": "7751a516535bf3d52c164315d6187c1c9a04f15c15cf1c15", + "container": "nslk5r03", + "containerUrl": "us.gcr.io/ps-development-229517/paperspace/pr3qq8qlg:nslk5r03", + "baseContainer": "Paperspace + Fast.AI 1.0 (V3)", + "baseContainerUrl": "paperspace/fastai:1.0-CUDA9.2-base-3.0-v1.0.6", + "vmType": "K80", + "cluster": "PS Notebooks on GCP", + "clusterId": "clmtkpnm2", + "fqdn": "nslk5r03.dgradient.paperspace.com", + "startedByUser": "first last", + "startedByUserId": "ukgvw4i8", + "namespace": "username", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-09-04T10:22:43.248Z", + "dtModified": "2019-09-04T10:22:43.248Z", + "dtProvisioningStarted": "2019-09-04T10:25:26.545Z", + "dtProvisioningFinished": "2019-09-04T10:27:24.319Z", + "dtStarted": "2019-09-04T10:27:24.319Z", + "dtFinished": "2019-09-04T16:27:52.044Z", + "dtTeardownStarted": "2019-09-04T16:27:56.915Z", + "dtTeardownFinished": "2019-09-04T16:28:57.796Z", + "dtDeleted": None, + "shutdownTimeout": 6, + "jobId": 20218, + "isPublic": False, + "isPreemptible": False, + "cpuHostname": "gradient-host-1567592577", + "cpuCount": 2, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat md_clear arch_capabilities", + "cpuMem": "12297212 kB", + "gpuName": "Tesla K80", + "gpuSerial": "0320617029024", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "418.67", + "gpuCount": 1, + "gpuMem": "11441 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "id": 1830 + }, + { + "name": "My Notebook 123", + "handle": "ng9a3tp4", + "jobHandle": "jg5vkj6d799z8", + "project": "Notebook-My Notebook 123", + "projectHandle": "pr5ngrxr9", + "state": "Stopped", + "token": "d9ba60e2bf7abd8ebd0c9988507ce203dd9baa7bc2b77284", + "container": "ng9a3tp4", + "containerUrl": "us.gcr.io/ps-development-229517/paperspace/pr5ngrxr9:ng9a3tp4", + "baseContainer": "Paperspace + Fast.AI 1.0 (V3)", + "baseContainerUrl": "paperspace/fastai:1.0-CUDA9.2-base-3.0-v1.0.6", + "vmType": "K80", + "cluster": "PS Notebooks on GCP", + "clusterId": "clmtkpnm2", + "fqdn": "ng9a3tp4.dgradient.paperspace.com", + "startedByUser": "first last", + "startedByUserId": "ukgvw4i8", + "namespace": "username", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-09-04T10:16:22.362Z", + "dtModified": "2019-09-04T10:16:22.362Z", + "dtProvisioningStarted": "2019-09-04T10:18:46.309Z", + "dtProvisioningFinished": "2019-09-04T10:20:45.879Z", + "dtStarted": "2019-09-04T10:20:45.879Z", + "dtFinished": "2019-09-04T16:20:51.718Z", + "dtTeardownStarted": "2019-09-04T16:20:56.745Z", + "dtTeardownFinished": "2019-09-04T16:22:03.922Z", + "dtDeleted": None, + "shutdownTimeout": 6, + "jobId": 20217, + "isPublic": False, + "isPreemptible": False, + "cpuHostname": "gradient-host-1567592197", + "cpuCount": 2, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat md_clear arch_capabilities", + "cpuMem": "12297212 kB", + "gpuName": "Tesla K80", + "gpuSerial": "0320617086541", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "418.67", + "gpuCount": 1, + "gpuMem": "11441 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "id": 1829 + }, + { + "name": "some_name", + "handle": "ngw7piq9", + "jobHandle": "jzhmk7fpluqje", + "project": "Notebook-some_name", + "projectHandle": "prg284tu2", + "state": "Stopped", + "token": "dc2c331ce694ebe5a615f6e5885f6a933fcb1be49cb0d17f", + "container": "ngw7piq9", + "containerUrl": "us.gcr.io/ps-development-229517/paperspace/prg284tu2:ngw7piq9", + "baseContainer": "Paperspace + Fast.AI 1.0 (V3)", + "baseContainerUrl": "paperspace/fastai:1.0-CUDA9.2-base-3.0-v1.0.6", + "vmType": "K80", + "cluster": "PS Notebooks on GCP", + "clusterId": "clmtkpnm2", + "fqdn": "ngw7piq9.dgradient.paperspace.com", + "startedByUser": "first last", + "startedByUserId": "ukgvw4i8", + "namespace": "username", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-09-03T11:06:18.154Z", + "dtModified": "2019-09-03T11:06:18.154Z", + "dtProvisioningStarted": "2019-09-03T11:08:36.286Z", + "dtProvisioningFinished": "2019-09-03T11:10:36.471Z", + "dtStarted": "2019-09-03T11:10:36.471Z", + "dtFinished": "2019-09-03T17:10:53.440Z", + "dtTeardownStarted": "2019-09-03T17:10:54.455Z", + "dtTeardownFinished": "2019-09-03T17:12:01.889Z", + "dtDeleted": None, + "shutdownTimeout": 6, + "jobId": 20206, + "isPublic": False, + "isPreemptible": False, + "cpuHostname": "gradient-host-1567508793", + "cpuCount": 2, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat md_clear arch_capabilities", + "cpuMem": "12297212 kB", + "gpuName": "Tesla K80", + "gpuSerial": "0320617088427", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "418.67", + "gpuCount": 1, + "gpuMem": "11441 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "id": 1823 + }, + { + "name": "some_notebook_name", + "handle": "n8h0d5lf", + "jobHandle": "js63sf787xc3mx", + "project": "Notebook-some_notebook_name", + "projectHandle": "prupasg3e", + "state": "Error", + "token": None, + "container": "some_name", + "containerUrl": "some_name", + "baseContainer": "some_name", + "baseContainerUrl": "some_name", + "vmType": "G1", + "cluster": "PS Notebooks on GCP", + "clusterId": "clmtkpnm2", + "fqdn": "n8h0d5lf.dgradient.paperspace.com", + "startedByUser": "first last", + "startedByUserId": "ukgvw4i8", + "namespace": "username", + "parentJobId": None, + "jobError": "Error pulling container during provisioning for job js63sf787xc3mx: Error pulling image 'some_name': Error response from daemon: Get https://registry-1.docker.io/v2/library/some_name/manifests/latest: unauthorized: incorrect username or password", + "dtCreated": "2019-08-30T12:31:43.392Z", + "dtModified": "2019-08-30T12:31:43.392Z", + "dtProvisioningStarted": "2019-08-30T12:33:34.650Z", + "dtProvisioningFinished": None, + "dtStarted": None, + "dtFinished": "2019-08-30T12:33:35.479Z", + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "shutdownTimeout": 8, + "jobId": 20163, + "isPublic": False, + "isPreemptible": True, + "cpuHostname": "gradient-host-1567168315", + "cpuCount": 1, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat md_clear arch_capabilities", + "cpuMem": "1783380 kB", + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "id": 1811 + }, + { + "name": "some_notebook_name", + "handle": "nl0b6cn0", + "jobHandle": "jss28gdrarcbrw", + "project": "Notebook-some_notebook_name", + "projectHandle": "pr43jj028", + "state": "Error", + "token": None, + "container": "some_name", + "containerUrl": "some_name", + "baseContainer": "some_name", + "baseContainerUrl": "some_name", + "vmType": "G1", + "cluster": "PS Notebooks on GCP", + "clusterId": "clmtkpnm2", + "fqdn": "nl0b6cn0.dgradient.paperspace.com", + "startedByUser": "first last", + "startedByUserId": "ukgvw4i8", + "namespace": "username", + "parentJobId": None, + "jobError": "Error pulling container during provisioning for job jss28gdrarcbrw: Error pulling image 'some_name': Error response from daemon: Get https://registry-1.docker.io/v2/library/some_name/manifests/latest: unauthorized: incorrect username or password", + "dtCreated": "2019-08-30T12:16:11.944Z", + "dtModified": "2019-08-30T12:16:11.944Z", + "dtProvisioningStarted": "2019-08-30T12:16:19.646Z", + "dtProvisioningFinished": None, + "dtStarted": None, + "dtFinished": "2019-08-30T12:16:20.382Z", + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "shutdownTimeout": 8, + "jobId": 20162, + "isPublic": False, + "isPreemptible": True, + "cpuHostname": "gradient-host-1567167274", + "cpuCount": 1, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat md_clear arch_capabilities", + "cpuMem": "1783380 kB", + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "id": 1810 + }, + { + "name": "some_notebook_name", + "handle": "njmq1zju", + "jobHandle": "jd35vd65dkqch", + "project": "Notebook-some_notebook_name", + "projectHandle": "prflq2sy0", + "state": "Error", + "token": None, + "container": "None", + "containerUrl": "None", + "baseContainer": "None", + "baseContainerUrl": "None", + "vmType": "G1", + "cluster": "PS Notebooks on GCP", + "clusterId": "clmtkpnm2", + "fqdn": "njmq1zju.dgradient.paperspace.com", + "startedByUser": "first last", + "startedByUserId": "ukgvw4i8", + "namespace": "username", + "parentJobId": None, + "jobError": "Error pulling container during provisioning for job jd35vd65dkqch: Error pulling image 'None': Error response from daemon: Get https://registry-1.docker.io/v2/library/None/manifests/latest: unauthorized: incorrect username or password", + "dtCreated": "2019-08-30T12:14:32.296Z", + "dtModified": "2019-08-30T12:14:32.296Z", + "dtProvisioningStarted": "2019-08-30T12:16:08.347Z", + "dtProvisioningFinished": None, + "dtStarted": None, + "dtFinished": "2019-08-30T12:16:09.132Z", + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "shutdownTimeout": 8, + "jobId": 20161, + "isPublic": False, + "isPreemptible": True, + "cpuHostname": "gradient-host-1567167274", + "cpuCount": 1, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat md_clear arch_capabilities", + "cpuMem": "1783380 kB", + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "id": 1809 + }, + { + "name": "some_notebook_name", + "handle": "nfcuwqu5", + "jobHandle": "je2cmiigxwhy0", + "project": "Notebook-some_notebook_name", + "projectHandle": "prl9nu5p7", + "state": "Error", + "token": None, + "container": "some_name", + "containerUrl": "some_name", + "baseContainer": "some_name", + "baseContainerUrl": "some_name", + "vmType": "G1", + "cluster": "PS Notebooks on GCP", + "clusterId": "clmtkpnm2", + "fqdn": "nfcuwqu5.dgradient.paperspace.com", + "startedByUser": "first last", + "startedByUserId": "ukgvw4i8", + "namespace": "username", + "parentJobId": None, + "jobError": "Error pulling container during provisioning for job je2cmiigxwhy0: Error pulling image 'some_name': Error response from daemon: Get https://registry-1.docker.io/v2/library/some_name/manifests/latest: unauthorized: incorrect username or password", + "dtCreated": "2019-08-30T12:13:30.657Z", + "dtModified": "2019-08-30T12:13:30.657Z", + "dtProvisioningStarted": "2019-08-30T12:15:11.388Z", + "dtProvisioningFinished": None, + "dtStarted": None, + "dtFinished": "2019-08-30T12:15:12.207Z", + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "shutdownTimeout": 8, + "jobId": 20160, + "isPublic": False, + "isPreemptible": True, + "cpuHostname": "gradient-host-1567167224", + "cpuCount": 1, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat md_clear arch_capabilities", + "cpuMem": "1783380 kB", + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "id": 1808 + } + ], + "availableMachines": [ + { + "vmTypeId": 5, + "clusterId": 1, + "isAvailable": False, + "isPreemptible": False, + "showDisabled": False, + "numActiveNodes": "0", + "numAvailableNodes": "0", + "id": 11, + "cluster": { + "name": "PS Notebooks", + "type": 1, + "regionId": 1, + "showDisabled": False, + "dtCreated": "2017-11-03T19:09:41.077Z", + "dtModified": "2019-05-29T15:56:16.039Z", + "dtDeleted": None, + "handle": "cls28l0qm", + "cloud": None, + "isDeleted": False, + "isPrivate": False, + "isDefault": False, + "fqdn": None, + "id": 1 + } + }, + { + "vmTypeId": 7, + "clusterId": 1, + "isAvailable": True, + "isPreemptible": False, + "showDisabled": False, + "numActiveNodes": "1", + "numAvailableNodes": "1", + "id": 15, + "cluster": { + "name": "PS Notebooks", + "type": 1, + "regionId": 1, + "showDisabled": False, + "dtCreated": "2017-11-03T19:09:41.077Z", + "dtModified": "2019-05-29T15:56:16.039Z", + "dtDeleted": None, + "handle": "cls28l0qm", + "cloud": None, + "isDeleted": False, + "isPrivate": False, + "isDefault": False, + "fqdn": None, + "id": 1 + } + }, + { + "vmTypeId": 20, + "clusterId": 3, + "isAvailable": True, + "isPreemptible": False, + "showDisabled": False, + "numActiveNodes": "2", + "numAvailableNodes": "0", + "id": 7, + "cluster": { + "name": "PS Notebooks on GCP", + "type": 1, + "regionId": 4, + "showDisabled": False, + "dtCreated": "2018-03-02T18:27:16.323Z", + "dtModified": "2019-05-29T15:56:16.039Z", + "dtDeleted": None, + "handle": "clmtkpnm2", + "cloud": "gcp", + "isDeleted": False, + "isPrivate": False, + "isDefault": False, + "fqdn": None, + "id": 3 + } + }, + { + "vmTypeId": 20, + "clusterId": 3, + "isAvailable": True, + "isPreemptible": True, + "showDisabled": False, + "numActiveNodes": "2", + "numAvailableNodes": "0", + "id": 6, + "cluster": { + "name": "PS Notebooks on GCP", + "type": 1, + "regionId": 4, + "showDisabled": False, + "dtCreated": "2018-03-02T18:27:16.323Z", + "dtModified": "2019-05-29T15:56:16.039Z", + "dtDeleted": None, + "handle": "clmtkpnm2", + "cloud": "gcp", + "isDeleted": False, + "isPrivate": False, + "isDefault": False, + "fqdn": None, + "id": 3 + } + }, + { + "vmTypeId": 21, + "clusterId": 3, + "isAvailable": True, + "isPreemptible": True, + "showDisabled": False, + "numActiveNodes": "0", + "numAvailableNodes": "0", + "id": 22, + "cluster": { + "name": "PS Notebooks on GCP", + "type": 1, + "regionId": 4, + "showDisabled": False, + "dtCreated": "2018-03-02T18:27:16.323Z", + "dtModified": "2019-05-29T15:56:16.039Z", + "dtDeleted": None, + "handle": "clmtkpnm2", + "cloud": "gcp", + "isDeleted": False, + "isPrivate": False, + "isDefault": False, + "fqdn": None, + "id": 3 + } + }, + { + "vmTypeId": 21, + "clusterId": 3, + "isAvailable": True, + "isPreemptible": False, + "showDisabled": False, + "numActiveNodes": "0", + "numAvailableNodes": "0", + "id": 23, + "cluster": { + "name": "PS Notebooks on GCP", + "type": 1, + "regionId": 4, + "showDisabled": False, + "dtCreated": "2018-03-02T18:27:16.323Z", + "dtModified": "2019-05-29T15:56:16.039Z", + "dtDeleted": None, + "handle": "clmtkpnm2", + "cloud": "gcp", + "isDeleted": False, + "isPrivate": False, + "isDefault": False, + "fqdn": None, + "id": 3 + } + }, + { + "vmTypeId": 25, + "clusterId": 3, + "isAvailable": True, + "isPreemptible": False, + "showDisabled": False, + "numActiveNodes": "1", + "numAvailableNodes": "0", + "id": 27, + "cluster": { + "name": "PS Notebooks on GCP", + "type": 1, + "regionId": 4, + "showDisabled": False, + "dtCreated": "2018-03-02T18:27:16.323Z", + "dtModified": "2019-05-29T15:56:16.039Z", + "dtDeleted": None, + "handle": "clmtkpnm2", + "cloud": "gcp", + "isDeleted": False, + "isPrivate": False, + "isDefault": False, + "fqdn": None, + "id": 3 + } + }, + { + "vmTypeId": 25, + "clusterId": 3, + "isAvailable": True, + "isPreemptible": True, + "showDisabled": False, + "numActiveNodes": "1", + "numAvailableNodes": "0", + "id": 26, + "cluster": { + "name": "PS Notebooks on GCP", + "type": 1, + "regionId": 4, + "showDisabled": False, + "dtCreated": "2018-03-02T18:27:16.323Z", + "dtModified": "2019-05-29T15:56:16.039Z", + "dtDeleted": None, + "handle": "clmtkpnm2", + "cloud": "gcp", + "isDeleted": False, + "isPrivate": False, + "isDefault": False, + "fqdn": None, + "id": 3 + } + }, + { + "vmTypeId": 31, + "clusterId": 90, + "isAvailable": True, + "isPreemptible": False, + "showDisabled": False, + "numActiveNodes": "1", + "numAvailableNodes": "1", + "id": 1, + "cluster": { + "name": "Free Public Notebooks", + "type": 1, + "regionId": 2, + "showDisabled": False, + "dtCreated": "2019-06-18T23:33:58.997Z", + "dtModified": "2019-08-12T17:06:19.492Z", + "dtDeleted": None, + "handle": "cltwhzxx6", + "cloud": None, + "isDeleted": False, + "isPrivate": False, + "isDefault": False, + "fqdn": None, + "id": 90 + } + }, + { + "vmTypeId": 32, + "clusterId": 90, + "isAvailable": True, + "isPreemptible": False, + "showDisabled": False, + "numActiveNodes": "1", + "numAvailableNodes": "1", + "id": 12, + "cluster": { + "name": "Free Public Notebooks", + "type": 1, + "regionId": 2, + "showDisabled": False, + "dtCreated": "2019-06-18T23:33:58.997Z", + "dtModified": "2019-08-12T17:06:19.492Z", + "dtDeleted": None, + "handle": "cltwhzxx6", + "cloud": None, + "isDeleted": False, + "isPrivate": False, + "isDefault": False, + "fqdn": None, + "id": 90 + } + } + ], + "total": 9, + "runningTotal": 2, + "displayTotal": 9 +} \ No newline at end of file diff --git a/tests/functional/test_notebooks.py b/tests/functional/test_notebooks.py new file mode 100644 index 00000000..4650bf9c --- /dev/null +++ b/tests/functional/test_notebooks.py @@ -0,0 +1,490 @@ +import mock +from click.testing import CliRunner + +import gradient.api_sdk.clients.http_client +from gradient.cli import cli +from tests import MockResponse, example_responses + + +class TestNotebooksCreate(object): + URL = "https://api.paperspace.io/notebooks/createNotebook" + COMMAND = [ + "notebooks", + "create", + "--vmTypeId", "25", + "--containerId", "123", + "--clusterId", "321" + ] + EXPECTED_REQUEST_JSON = {"vmTypeId": 25, "containerId": 123, "clusterId": 321} + EXPECTED_RESPONSE_JSON = { + "handle": "some_id", + "notebookToken": None, + "jobId": 20163, + "isPublic": False, + "id": 1811, + "containerId": 123, + } + EXPECTED_STDOUT = """Created new notebook with id: some_id\n""" + + COMMAND_WITH_API_KEY_USED = [ + "notebooks", + "create", + "--vmTypeId", "25", + "--containerId", "123", + "--clusterId", "321", + "--apiKey", "some_key", + ] + + COMMAND_WITH_ALL_OPTIONS = [ + "notebooks", + "create", + "--vmTypeId", "25", + "--containerId", "123", + "--clusterId", "321", + "--name", "some_notebook_name", + "--registryUsername", "some_username", + "--registryPassword", "some_password", + "--defaultEntrypoint", "some_entrypoint", + "--containerUser", "some_container_user", + "--shutdownTimeout", "8", + "--isPreemptible", + ] + EXPECTED_REQUEST_JSON_WITH_ALL_OPTIONS = { + "vmTypeId": 25, + "containerId": 123, + "clusterId": 321, + "name": "some_notebook_name", + "registryUsername": "some_username", + "registryPassword": "some_password", + "defaultEntrypoint": "some_entrypoint", + "containerUser": "some_container_user", + "shutdownTimeout": 8, + "isPreemptible": True, + } + COMMAND_WITH_OPTIONS_FILE_USED = ["notebooks", "create", "--optionsFile", ] # path added in test + + EXPECTED_HEADERS = gradient.api_sdk.clients.http_client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY = gradient.api_sdk.clients.http_client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" + + RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} + EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Failed to create resource: Invalid API token\n" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_send_post_request_and_print_notebook_id(self, post_patched): + post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPECTED_REQUEST_JSON, + data=None, + files=None, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched): + post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=self.EXPECTED_REQUEST_JSON, + data=None, + files=None, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_send_post_request_and_print_notebook_id_when_all_options_were_used(self, post_patched): + post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND_WITH_ALL_OPTIONS) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPECTED_REQUEST_JSON_WITH_ALL_OPTIONS, + data=None, + files=None, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_read_option_from_yaml_file(self, post_patched, notebooks_create_config_path): + post_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON) + command = self.COMMAND_WITH_OPTIONS_FILE_USED[:] + [notebooks_create_config_path] + + runner = CliRunner() + result = runner.invoke(cli.cli, command) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=self.EXPECTED_REQUEST_JSON_WITH_ALL_OPTIONS, + data=None, + files=None, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, + get_patched): + get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.COMMAND) + + assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPECTED_REQUEST_JSON, + data=None, + files=None, + params=None) + assert result.exit_code == 0 + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, + get_patched): + get_patched.return_value = MockResponse(status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.COMMAND) + + assert result.output == "Failed to create resource\n", result.exc_info + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPECTED_REQUEST_JSON, + data=None, + files=None, + params=None) + assert result.exit_code == 0 + + +class TestNotebooksDelete(object): + URL = "https://api.paperspace.io/notebooks/v2/deleteNotebook" + COMMAND = [ + "notebooks", + "delete", + "--id", "some_id", + ] + EXPECTED_REQUEST_JSON = {"notebookId": "some_id"} + EXPECTED_STDOUT = "Notebook deleted\n" + + COMMAND_WITH_API_KEY_USED = [ + "notebooks", + "delete", + "--id", "some_id", + "--apiKey", "some_key", + ] + + COMMAND_WITH_OPTIONS_FILE_USED = ["notebooks", "delete", "--optionsFile", ] # path added in test + + EXPECTED_HEADERS = gradient.api_sdk.clients.http_client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY = gradient.api_sdk.clients.http_client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" + + RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} + EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Failed to delete resource: Invalid API token\n" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_send_post_request_and_print_notebook_id(self, post_patched): + post_patched.return_value = MockResponse(status_code=204) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPECTED_REQUEST_JSON, + data=None, + files=None, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched): + post_patched.return_value = MockResponse(status_code=204) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=self.EXPECTED_REQUEST_JSON, + data=None, + files=None, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_read_option_from_yaml_file(self, post_patched, notebooks_delete_config_path): + post_patched.return_value = MockResponse(status_code=204) + command = self.COMMAND_WITH_OPTIONS_FILE_USED[:] + [notebooks_delete_config_path] + + runner = CliRunner() + result = runner.invoke(cli.cli, command) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=self.EXPECTED_REQUEST_JSON, + data=None, + files=None, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, get_patched): + get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.COMMAND) + + assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPECTED_REQUEST_JSON, + data=None, + files=None, + params=None) + assert result.exit_code == 0 + + @mock.patch("gradient.api_sdk.clients.http_client.requests.post") + def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, get_patched): + get_patched.return_value = MockResponse(status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.COMMAND) + + assert result.output == "Failed to delete resource\n", result.exc_info + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPECTED_REQUEST_JSON, + data=None, + files=None, + params=None) + assert result.exit_code == 0 + + +class TestNotebooksShow(object): + URL = "https://api.paperspace.io/notebooks/some_id/getNotebook" + COMMAND = ["notebooks", "show", "--id", "some_id"] + EXPECTED_STDOUT = """+---------+-----------------------------------+ +| Name | some_name | ++---------+-----------------------------------+ +| ID | ngw7piq9 | +| VM Type | K80 | +| State | Running | +| FQDN | ngw7piq9.dgradient.paperspace.com | ++---------+-----------------------------------+ +""" + RESPONSE_JSON = example_responses.NOTEBOOK_GET_RESPONSE + + COMMAND_WITH_API_KEY_USED = ["notebooks", "show", "--id", "some_id", "--apiKey", "some_key"] + + COMMAND_WITH_OPTIONS_FILE_USED = ["notebooks", "show", "--optionsFile", ] # path added in test + + EXPECTED_HEADERS = gradient.api_sdk.clients.http_client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY = gradient.api_sdk.clients.http_client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" + + RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} + EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Failed to fetch data: Invalid API token\n" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.get") + def test_should_send_post_request_and_print_notebook_details(self, post_patched): + post_patched.return_value = MockResponse(self.RESPONSE_JSON) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.get") + def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched): + post_patched.return_value = MockResponse(self.RESPONSE_JSON) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=None, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.get") + def test_should_read_option_from_yaml_file(self, post_patched, notebooks_show_config_path): + post_patched.return_value = MockResponse(self.RESPONSE_JSON) + command = self.COMMAND_WITH_OPTIONS_FILE_USED[:] + [notebooks_show_config_path] + + runner = CliRunner() + result = runner.invoke(cli.cli, command) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=None, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.get") + def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, get_patched): + get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.COMMAND) + + assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params=None) + assert result.exit_code == 0 + + @mock.patch("gradient.api_sdk.clients.http_client.requests.get") + def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, get_patched): + get_patched.return_value = MockResponse(status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.COMMAND) + + assert result.output == "Failed to fetch data\n", result.exc_info + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params=None) + assert result.exit_code == 0 + + +class TestNotebooksList(object): + URL = "https://api.paperspace.io/notebooks/getNotebooks" + COMMAND = ["notebooks", "list"] + EXPECTED_STDOUT = """+--------------------+----------+ +| Name | ID | ++--------------------+----------+ +| job 1 | n1vmfj6x | +| job 1 | nhdf8zf3 | +| My Notebook 123 | nslk5r03 | +| My Notebook 123 | ng9a3tp4 | +| some_name | ngw7piq9 | +| some_notebook_name | n8h0d5lf | +| some_notebook_name | nl0b6cn0 | +| some_notebook_name | njmq1zju | +| some_notebook_name | nfcuwqu5 | ++--------------------+----------+ +""" + RESPONSE_JSON = example_responses.NOTEBOOKS_LIST_RESPONSE_JSON + + COMMAND_WITH_API_KEY_USED = ["notebooks", "list", "--apiKey", "some_key"] + + COMMAND_WITH_OPTIONS_FILE_USED = ["notebooks", "list", "--optionsFile", ] # path added in test + + EXPERCTED_REQUEST_JSON = { + "filter": { + "filter": { + "where": { + "dtDeleted": None, + }, + "limit": 11, + "order": "jobId desc", + "offset": 0, + }, + }, + } + + EXPECTED_HEADERS = gradient.api_sdk.clients.http_client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY = gradient.api_sdk.clients.http_client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" + + RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} + EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Failed to fetch data: Invalid API token\n" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.get") + def test_should_send_post_request_and_print_notebook_details(self, post_patched): + post_patched.return_value = MockResponse(self.RESPONSE_JSON) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPERCTED_REQUEST_JSON, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.get") + def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched): + post_patched.return_value = MockResponse(self.RESPONSE_JSON) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_USED) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=self.EXPERCTED_REQUEST_JSON, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.get") + def test_should_read_option_from_yaml_file(self, post_patched, notebooks_list_config_path): + post_patched.return_value = MockResponse(self.RESPONSE_JSON) + command = self.COMMAND_WITH_OPTIONS_FILE_USED[:] + [notebooks_list_config_path] + + runner = CliRunner() + result = runner.invoke(cli.cli, command) + + assert result.output == self.EXPECTED_STDOUT, result.exc_info + post_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=self.EXPERCTED_REQUEST_JSON, + params=None) + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("gradient.api_sdk.clients.http_client.requests.get") + def test_should_print_valid_error_message_when_command_was_used_with_invalid_api_token(self, get_patched): + get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.COMMAND) + + assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN, result.exc_info + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPERCTED_REQUEST_JSON, + params=None) + assert result.exit_code == 0 + + @mock.patch("gradient.api_sdk.clients.http_client.requests.get") + def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, get_patched): + get_patched.return_value = MockResponse(status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.COMMAND) + + assert result.output == "Failed to fetch data\n", result.exc_info + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPERCTED_REQUEST_JSON, + params=None) + assert result.exit_code == 0