From 30eb0360efdab086b5ea6f7d41ba1f143f970a8d Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Thu, 18 Apr 2019 18:44:22 +0200 Subject: [PATCH 01/42] Make login and logout be run by click --- paperspace/cli.py | 35 +++++++++++++++++++++++++++++- paperspace/commands/__init__.py | 7 ++++++ paperspace/commands/deployments.py | 7 ++---- paperspace/commands/login.py | 14 ++++++++++++ paperspace/commands/machines.py | 30 +++++++++++-------------- paperspace/main.py | 33 ++-------------------------- 6 files changed, 72 insertions(+), 54 deletions(-) create mode 100644 paperspace/commands/login.py diff --git a/paperspace/cli.py b/paperspace/cli.py index f13af92..7d3fb2f 100644 --- a/paperspace/cli.py +++ b/paperspace/cli.py @@ -7,7 +7,7 @@ from paperspace import constants, client, config from paperspace.commands import experiments as experiments_commands, deployments as deployments_commands, \ - machines as machines_commands + machines as machines_commands, login as login_commands class ChoiceType(click.Choice): @@ -1071,3 +1071,36 @@ def wait_for_machine_state(machine_id, state, api_key): machines_api = client.API(config.CONFIG_HOST, api_key=api_key) command = machines_commands.WaitForMachineStateCommand(api=machines_api) command.execute(machine_id, state) + + +@cli.command("login", help="Log in with email and password") +@click.option( + "--email", + "email", + required=True, + callback=validate_email, + help="Email used to create Paperspace account", +) +@click.option( + "--password", + "password", + prompt=True, + hide_input=True, + help="Password used to create Paperspace account", +) +@click.option( + "--apiTokenName", + "api_token_name", + help="Name of api token used to log in", +) +def login(email, password, api_token_name): + machines_api = client.API(config.CONFIG_HOST) + command = login_commands.LogInCommand(api=machines_api) + command.execute(email, password, api_token_name) + + +@cli.command("logout", help="Log out / remove apiKey from config file") +def logout(): + machines_api = client.API(config.CONFIG_HOST) + command = login_commands.LogOutCommand(api=machines_api) + command.execute() diff --git a/paperspace/commands/__init__.py b/paperspace/commands/__init__.py index e69de29..aa0b1b4 100644 --- a/paperspace/commands/__init__.py +++ b/paperspace/commands/__init__.py @@ -0,0 +1,7 @@ +from paperspace import logger + + +class CommandBase(object): + def __init__(self, api=None, logger_=logger): + self.api = api + self.logger = logger_ diff --git a/paperspace/commands/deployments.py b/paperspace/commands/deployments.py index 7c6789a..d923777 100644 --- a/paperspace/commands/deployments.py +++ b/paperspace/commands/deployments.py @@ -3,6 +3,7 @@ import terminaltables from paperspace import config, version, client, logger +from paperspace.commands import CommandBase from paperspace.utils import get_terminal_lines default_headers = {"X-API-Key": config.PAPERSPACE_API_KEY, @@ -11,11 +12,7 @@ deployments_api = client.API(config.CONFIG_HOST, headers=default_headers) -class _DeploymentCommandBase(object): - def __init__(self, api=deployments_api, logger_=logger): - self.api = api - self.logger = logger_ - +class _DeploymentCommandBase(CommandBase): def _log_message(self, response, success_msg_template, error_msg): if response.ok: try: diff --git a/paperspace/commands/login.py b/paperspace/commands/login.py new file mode 100644 index 0000000..544cc97 --- /dev/null +++ b/paperspace/commands/login.py @@ -0,0 +1,14 @@ +from paperspace import login, logout +from paperspace.commands import CommandBase + + +class LogInCommand(CommandBase): + def execute(self, email, password, api_token_name=None): + login(email, password, api_token_name) + + +class LogOutCommand(CommandBase): + def execute(self): + logout() + + diff --git a/paperspace/commands/machines.py b/paperspace/commands/machines.py index 461f6cb..8c4d6b0 100644 --- a/paperspace/commands/machines.py +++ b/paperspace/commands/machines.py @@ -3,16 +3,12 @@ import terminaltables -from paperspace import logger +from paperspace.commands import CommandBase from paperspace.exceptions import BadResponse from paperspace.utils import get_terminal_lines -class CommandBase(object): - def __init__(self, api=None, logger_=logger): - self.api = api - self.logger = logger_ - +class _MachinesCommandBase(CommandBase): def _log_message(self, response, success_msg_template, error_msg): if response.ok: try: @@ -30,7 +26,7 @@ def _log_message(self, response, success_msg_template, error_msg): self.logger.log(error_msg) -class CheckAvailabilityCommand(CommandBase): +class CheckAvailabilityCommand(_MachinesCommandBase): def execute(self, region, machine_type): params = {"region": region, "machineType": machine_type} @@ -40,7 +36,7 @@ def execute(self, region, machine_type): "Unknown error while checking machine availability") -class CreateMachineCommand(CommandBase): +class CreateMachineCommand(_MachinesCommandBase): def execute(self, kwargs): response = self.api.post("/machines/createSingleMachinePublic/", json=kwargs) self._log_message(response, @@ -48,7 +44,7 @@ def execute(self, kwargs): "Unknown error while creating machine") -class UpdateMachineCommand(CommandBase): +class UpdateMachineCommand(_MachinesCommandBase): def execute(self, machine_id, kwargs): url = "/machines/{}/updateMachinePublic/".format(machine_id) response = self.api.post(url, json=kwargs) @@ -57,7 +53,7 @@ def execute(self, machine_id, kwargs): "Unknown error while updating machine") -class StartMachineCommand(CommandBase): +class StartMachineCommand(_MachinesCommandBase): def execute(self, machine_id): url = "/machines/{}/start/".format(machine_id) response = self.api.post(url) @@ -66,7 +62,7 @@ def execute(self, machine_id): "Unknown error while starting the machine") -class StopMachineCommand(CommandBase): +class StopMachineCommand(_MachinesCommandBase): def execute(self, machine_id): url = "/machines/{}/stop/".format(machine_id) response = self.api.post(url) @@ -75,7 +71,7 @@ def execute(self, machine_id): "Unknown error while stopping the machine") -class RestartMachineCommand(CommandBase): +class RestartMachineCommand(_MachinesCommandBase): def execute(self, machine_id): url = "/machines/{}/restart/".format(machine_id) response = self.api.post(url) @@ -84,7 +80,7 @@ def execute(self, machine_id): "Unknown error while restarting the machine") -class ShowMachineCommand(CommandBase): +class ShowMachineCommand(_MachinesCommandBase): def execute(self, machine_id): params = {"machineId": machine_id} response = self.api.get("/machines/getMachinePublic/", params=params) @@ -142,7 +138,7 @@ def make_details_table(machine): return table_string -class ListMachinesCommand(CommandBase): +class ListMachinesCommand(_MachinesCommandBase): def execute(self, kwargs): json_ = {"params": kwargs} if kwargs else None response = self.api.get("/machines/getMachines/", json=json_) @@ -186,7 +182,7 @@ def _make_machines_list_table(machines): return table_string -class DestroyMachineCommand(CommandBase): +class DestroyMachineCommand(_MachinesCommandBase): def execute(self, machine_id, release_public_ip): json_ = {"releasePublicIp": release_public_ip} if release_public_ip else None url = "/machines/{}/destroyMachine/".format(machine_id) @@ -196,7 +192,7 @@ def execute(self, machine_id, release_public_ip): "Unknown error while destroying the machine") -class ShowMachineUtilisationCommand(CommandBase): +class ShowMachineUtilisationCommand(_MachinesCommandBase): def execute(self, machine_id, billing_month): params = {"machineId": machine_id, "billingMonth": billing_month} @@ -227,7 +223,7 @@ def make_details_table(machine): return table_string -class WaitForMachineStateCommand(CommandBase): +class WaitForMachineStateCommand(_MachinesCommandBase): def execute(self, machine_id, state, interval=5): while True: try: diff --git a/paperspace/main.py b/paperspace/main.py index ec0f484..0deb156 100644 --- a/paperspace/main.py +++ b/paperspace/main.py @@ -3,12 +3,12 @@ from .cli import cli from .jobs import run, print_json_pretty -from .login import login, logout, set_apikey +from .login import set_apikey from .version import version def main(): - if len(sys.argv) >= 2 and sys.argv[1] in ('experiments', 'deployments', 'machines'): + if len(sys.argv) >= 2 and sys.argv[1] in ('experiments', 'deployments', 'machines', 'login', 'logout'): cli(sys.argv[1:]) args = sys.argv[:] @@ -30,35 +30,6 @@ def main(): vers(prog) sys.exit(0) - if cmd == 'login': - email = None - password = None - apiToken = None - while args: - opt = args.pop(0) - if opt in help_opts: - print('usage: %s' % login_usage(prog)) - sys.exit(0) - elif opt == '--email': - email = args.pop(0) if args else None - elif opt == '--password': - password = args.pop(0) if args else None - elif opt == '--apiToken': - apiToken = args.pop(0) if args else None - elif not email: - email = opt - elif not password: - password = opt - elif not apiToken: - apiToken = opt - return not login(email, password, apiToken) - - if cmd == 'logout': - if args: - print('usage: %s logout' % prog) - sys.exit(not (args[0] in help_opts)) - return not logout() - if cmd == 'apikey' or cmd == 'apiKey': if not args or args[0] in help_opts: print('usage: %s' % apikey_usage(prog)) From 25ea835a1ca5216f4baa84b8717cf6d7f79b7bf2 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Fri, 19 Apr 2019 10:44:54 +0200 Subject: [PATCH 02/42] Refactor version command --- paperspace/cli.py | 9 +++++++-- paperspace/commands/login.py | 4 ++++ paperspace/main.py | 6 +----- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/paperspace/cli.py b/paperspace/cli.py index 7d3fb2f..448b3f2 100644 --- a/paperspace/cli.py +++ b/paperspace/cli.py @@ -1101,6 +1101,11 @@ def login(email, password, api_token_name): @cli.command("logout", help="Log out / remove apiKey from config file") def logout(): - machines_api = client.API(config.CONFIG_HOST) - command = login_commands.LogOutCommand(api=machines_api) + command = login_commands.LogOutCommand() + command.execute() + + +@cli.command("version", help="Show the version and exit") +def version(): + command = login_commands.ShowVersionCommand() command.execute() diff --git a/paperspace/commands/login.py b/paperspace/commands/login.py index 544cc97..7ee72eb 100644 --- a/paperspace/commands/login.py +++ b/paperspace/commands/login.py @@ -1,5 +1,6 @@ from paperspace import login, logout from paperspace.commands import CommandBase +from paperspace.version import version class LogInCommand(CommandBase): @@ -12,3 +13,6 @@ def execute(self): logout() +class ShowVersionCommand(CommandBase): + def execute(self): + self.logger.log(version) diff --git a/paperspace/main.py b/paperspace/main.py index 0deb156..a4ce33d 100644 --- a/paperspace/main.py +++ b/paperspace/main.py @@ -8,7 +8,7 @@ def main(): - if len(sys.argv) >= 2 and sys.argv[1] in ('experiments', 'deployments', 'machines', 'login', 'logout'): + if len(sys.argv) >= 2 and sys.argv[1] in ('experiments', 'deployments', 'machines', 'login', 'logout', 'version'): cli(sys.argv[1:]) args = sys.argv[:] @@ -26,10 +26,6 @@ def main(): usage(prog) sys.exit(0) - if cmd in ['version', '--version', '-v']: - vers(prog) - sys.exit(0) - if cmd == 'apikey' or cmd == 'apiKey': if not args or args[0] in help_opts: print('usage: %s' % apikey_usage(prog)) From b2717c795f54c3c55991f540b416189ebf5b7908 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Wed, 24 Apr 2019 18:13:02 +0200 Subject: [PATCH 03/42] Add jobs support starting with job delete --- paperspace/cli/__init__.py | 0 paperspace/{ => cli}/cli.py | 4 + paperspace/cli/jobs/__init__.py | 0 paperspace/cli/jobs/commands.py | 21 +++++ paperspace/commands/jobs.py | 28 ++++++ paperspace/main.py | 2 +- tests/functional/test_deployments.py | 34 ++++---- tests/functional/test_experiments.py | 33 +++---- tests/functional/test_machines.py | 124 +++++++++++++-------------- tests/test_click_commands.py | 23 ++--- 10 files changed, 162 insertions(+), 107 deletions(-) create mode 100644 paperspace/cli/__init__.py rename paperspace/{ => cli}/cli.py (99%) create mode 100644 paperspace/cli/jobs/__init__.py create mode 100644 paperspace/cli/jobs/commands.py create mode 100644 paperspace/commands/jobs.py diff --git a/paperspace/cli/__init__.py b/paperspace/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/paperspace/cli.py b/paperspace/cli/cli.py similarity index 99% rename from paperspace/cli.py rename to paperspace/cli/cli.py index 448b3f2..189d5ef 100644 --- a/paperspace/cli.py +++ b/paperspace/cli/cli.py @@ -6,6 +6,7 @@ import click from paperspace import constants, client, config +from paperspace.cli.jobs.commands import jobs_group from paperspace.commands import experiments as experiments_commands, deployments as deployments_commands, \ machines as machines_commands, login as login_commands @@ -1109,3 +1110,6 @@ def logout(): def version(): command = login_commands.ShowVersionCommand() command.execute() + + +cli.add_command(jobs_group) diff --git a/paperspace/cli/jobs/__init__.py b/paperspace/cli/jobs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/paperspace/cli/jobs/commands.py b/paperspace/cli/jobs/commands.py new file mode 100644 index 0000000..9bbb517 --- /dev/null +++ b/paperspace/cli/jobs/commands.py @@ -0,0 +1,21 @@ +import click + +from paperspace import client, config +from paperspace.commands import jobs as jobs_commands + + +@click.group("jobs", help="Manage gradient jobs") +def jobs_group(): + pass + + +@jobs_group.command("delete", help="Delete job") +@click.option( + "--jobId", + "job_id", + required=True, +) +def delete_job(job_id, api_key=None): + jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) + command = jobs_commands.DeleteJobCommand(api=jobs_api) + command.execute(job_id) diff --git a/paperspace/commands/jobs.py b/paperspace/commands/jobs.py new file mode 100644 index 0000000..06a222a --- /dev/null +++ b/paperspace/commands/jobs.py @@ -0,0 +1,28 @@ +from paperspace.commands import CommandBase + + +class _JobsCommandBase(CommandBase): + def _log_message(self, response, success_msg_template, error_msg): + if response.ok: + try: + handle = response.json() + except (ValueError, KeyError): + self.logger.log(success_msg_template) + else: + msg = success_msg_template.format(**handle) + self.logger.log(msg) + else: + try: + data = response.json() + self.logger.log_error_response(data) + except ValueError: + self.logger.log(error_msg) + + +class DeleteJobCommand(_JobsCommandBase): + def execute(self, job_id): + url = "/jobs/{}/destroy/".format(job_id) + response = self.api.post(url) + self._log_message(response, + "Job deleted", + "Unknown error while deleting job") diff --git a/paperspace/main.py b/paperspace/main.py index a4ce33d..a013709 100644 --- a/paperspace/main.py +++ b/paperspace/main.py @@ -1,7 +1,7 @@ import os import sys -from .cli import cli +from paperspace.cli.cli import cli from .jobs import run, print_json_pretty from .login import set_apikey from .version import version diff --git a/tests/functional/test_deployments.py b/tests/functional/test_deployments.py index 371cba6..3201582 100644 --- a/tests/functional/test_deployments.py +++ b/tests/functional/test_deployments.py @@ -2,7 +2,7 @@ from click.testing import CliRunner import paperspace.client -from paperspace import cli +from paperspace.cli import cli from paperspace.commands import deployments as deployments_commands from tests import example_responses, MockResponse @@ -47,7 +47,7 @@ class TestDeploymentsCreate(object): RESPONSE_CONTENT_404_MODEL_NOT_FOUND = b'{"error":{"name":"Error","status":404,"message":"Unable to find model"}}\n' EXPECTED_STDOUT_MODEL_NOT_FOUND = "Unable to find model\n" - @mock.patch("paperspace.cli.deployments_commands.client.requests.post") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_and_print_message_when_create_deployment_with_basic_options(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_200, 200, "fake content") @@ -61,7 +61,7 @@ def test_should_send_proper_data_and_print_message_when_create_deployment_with_b assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.deployments_commands.client.requests.post") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_different_api_key_when_api_key_parameter_was_used(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_200, 200, "fake content") @@ -75,7 +75,7 @@ def test_should_send_different_api_key_when_api_key_parameter_was_used(self, pos assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.deployments_commands.client.requests.post") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_and_print_message_when_create_wrong_model_id_was_given(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_404_MODEL_NOT_FOUND, 404, self.RESPONSE_CONTENT_404_MODEL_NOT_FOUND) @@ -116,7 +116,7 @@ class TestDeploymentsList(object): +-----------+-----------------+----------------------------------------------------------------------------------+---------------+---------------------------+ """ - @mock.patch("paperspace.cli.deployments_commands.client.requests.get") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.get") def test_should_send_get_request_and_print_list_of_deployments(self, get_patched): get_patched.return_value = MockResponse(self.LIST_JSON, 200, "fake content") @@ -129,7 +129,7 @@ def test_should_send_get_request_and_print_list_of_deployments(self, get_patched params=None) assert result.output == self.DETAILS_STDOUT - @mock.patch("paperspace.cli.deployments_commands.client.requests.get") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.get") def test_should_send_get_request_with_custom_api_key_when_api_key_parameter_was_provided(self, get_patched): get_patched.return_value = MockResponse(self.LIST_JSON, 200, "fake content") @@ -142,8 +142,8 @@ def test_should_send_get_request_with_custom_api_key_when_api_key_parameter_was_ params=None) assert result.output == self.DETAILS_STDOUT - @mock.patch("paperspace.cli.deployments_commands.pydoc") - @mock.patch("paperspace.cli.deployments_commands.client.requests.get") + @mock.patch("paperspace.cli.cli.deployments_commands.pydoc") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.get") def test_should_send_get_request_and_paginate_list_when_output_table_len_is_gt_lines_in_terminal(self, get_patched, pydoc_patched): list_json = {"deploymentList": self.LIST_JSON["deploymentList"] * 40} @@ -159,7 +159,7 @@ def test_should_send_get_request_and_paginate_list_when_output_table_len_is_gt_l pydoc_patched.pager.assert_called_once() assert result.exit_code == 0 - @mock.patch("paperspace.cli.deployments_commands.client.requests.get") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.get") def test_should_send_get_request_and_print_list_of_deployments_filtered_by_state(self, get_patched): get_patched.return_value = MockResponse(self.LIST_JSON, 200, "fake content") @@ -172,7 +172,7 @@ def test_should_send_get_request_and_print_list_of_deployments_filtered_by_state params=None) assert result.output == self.DETAILS_STDOUT - @mock.patch("paperspace.cli.deployments_commands.client.requests.get") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.get") def test_should_send_get_request_and_print_list_of_deployments_filtered_with_state_but_none_found( self, get_patched): get_patched.return_value = MockResponse(self.LIST_WITH_FILTER_RESPONSE_JSON_WHEN_NO_DEPLOYMENTS_FOUND, 200, @@ -212,7 +212,7 @@ class TestDeploymentsUpdate(object): RESPONSE_JSON_400 = {"error": {"name": "Error", "status": 400, "message": "Unable to access deployment"}} EXPECTED_STDOUT_WITH_WRONG_ID = "Unable to access deployment\n" - @mock.patch("paperspace.cli.deployments_commands.client.requests.post") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_and_print_message_when_update_deployment(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_200, 200, "fake content") @@ -226,7 +226,7 @@ def test_should_send_proper_data_and_print_message_when_update_deployment(self, assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.deployments_commands.client.requests.post") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_with_custom_api_key_when_api_key_parameter_was_provided(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_200, 200, "fake content") @@ -240,7 +240,7 @@ def test_should_send_proper_data_with_custom_api_key_when_api_key_parameter_was_ assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.deployments_commands.client.requests.post") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_and_print_message_when_update_deployment_used_with_wrong_id(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_400, 400, "fake content") @@ -262,7 +262,7 @@ class TestStartDeployment(object): REQUEST_JSON = {"isRunning": True, "id": u"some_id"} EXPECTED_STDOUT = "Deployment started\n" - @mock.patch("paperspace.cli.deployments_commands.client.requests.post") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_and_print_message_when_deployments_start_was_used(self, post_patched): post_patched.return_value = MockResponse(None, 204, "fake content") @@ -295,7 +295,7 @@ class TestDeleteDeployment(object): RESPONSE_JSON_400 = {"error": {"name": "Error", "status": 400, "message": "Unable to access deployment"}} EXPECTED_STDOUT_WITH_WRONG_ID = "Unable to access deployment\n" - @mock.patch("paperspace.cli.deployments_commands.client.requests.post") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_and_print_message_when_deployments_delete_was_used(self, post_patched): post_patched.return_value = MockResponse(None, 204, "fake content") @@ -309,7 +309,7 @@ def test_should_send_proper_data_and_print_message_when_deployments_delete_was_u assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.deployments_commands.client.requests.post") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_with_custom_api_key_when_api_key_parameter_was_provided(self, post_patched): post_patched.return_value = MockResponse(None, 204, "fake content") @@ -323,7 +323,7 @@ def test_should_send_proper_data_with_custom_api_key_when_api_key_parameter_was_ assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.deployments_commands.client.requests.post") + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_and_print_message_when_deployments_delete_used_with_wrong_id(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_400, 400, "fake content") diff --git a/tests/functional/test_experiments.py b/tests/functional/test_experiments.py index 76e0173..357b8b5 100644 --- a/tests/functional/test_experiments.py +++ b/tests/functional/test_experiments.py @@ -2,7 +2,8 @@ from click.testing import CliRunner import paperspace.client -from paperspace import cli, constants +from paperspace import constants +from paperspace.cli import cli from tests import example_responses, MockResponse @@ -76,7 +77,7 @@ class TestExperimentsCreateSingleNode(object): RESPONSE_CONTENT_404_PROJECT_NOT_FOUND = b'{"details":{"handle":"wrong_handle"},"error":"Project not found"}\n' EXPECTED_STDOUT_PROJECT_NOT_FOUND = "Project not found\nhandle: wrong_handle\n" - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_proper_data_and_print_message_when_create_experiment_was_run_with_basic_options(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_200, 200, self.RESPONSE_CONTENT_200) @@ -91,7 +92,7 @@ def test_should_send_proper_data_and_print_message_when_create_experiment_was_ru assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_proper_data_and_print_message_when_create_experiment_was_run_with_full_options(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_200, 200, self.RESPONSE_CONTENT_200) @@ -107,7 +108,7 @@ def test_should_send_proper_data_and_print_message_when_create_experiment_was_ru assert result.exit_code == 0 assert self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] == "some_key" - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_proper_data_and_print_message_when_create_wrong_project_handle_was_given(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_404_PROJECT_NOT_FOUND, 404, self.RESPONSE_CONTENT_404_PROJECT_NOT_FOUND) @@ -219,7 +220,7 @@ class TestExperimentsCreateMultiNode(object): RESPONSE_CONTENT_200 = b'{"handle":"sadkfhlskdjh","message":"success"}\n' EXPECTED_STDOUT = "New experiment created with handle: sadkfhlskdjh\n" - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_proper_data_and_print_message_when_create_experiment_was_run_with_basic_options(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_200, 200, self.RESPONSE_CONTENT_200) @@ -234,7 +235,7 @@ def test_should_send_proper_data_and_print_message_when_create_experiment_was_ru assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_proper_data_and_print_message_when_create_experiment_was_run_with_full_options(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_200, 200, self.RESPONSE_CONTENT_200) @@ -506,7 +507,7 @@ class TestExperimentDetail(object): +---------------------+----------------+ """ - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_get_request_and_print_single_node_experiment_details_in_a_table(self, get_patched): get_patched.return_value = MockResponse(self.SINGLE_NODE_RESPONSE_JSON, 200, "fake content") @@ -522,7 +523,7 @@ def test_should_send_get_request_and_print_single_node_experiment_details_in_a_t assert result.exit_code == 0 assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_get_request_and_print_multi_node_experiment_details_in_a_table(self, get_patched): get_patched.return_value = MockResponse(self.MULTI_NODE_DETAILS_JSON, 200, "fake content") @@ -537,7 +538,7 @@ def test_should_send_get_request_and_print_multi_node_experiment_details_in_a_ta assert result.output == self.MULTI_NODE_DETAILS_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_get_request_and_print_request_content_when_response_data_was_malformed(self, get_patched): get_patched.return_value = MockResponse({}, 200, "fake content") g = """Error parsing response data @@ -736,7 +737,7 @@ class TestExperimentList(object): +---------------+---------------+---------+ """ - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_get_request_and_print_list_of_experiments(self, get_patched): get_patched.return_value = MockResponse(self.LIST_JSON, 200, "fake content") @@ -751,8 +752,8 @@ def test_should_send_get_request_and_print_list_of_experiments(self, get_patched assert result.output == self.DETAILS_STDOUT assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" - @mock.patch("paperspace.cli.experiments_commands.pydoc") - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.experiments_commands.pydoc") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_get_request_and_paginate_list_when_output_table_len_is_gt_lines_in_terminal(self, get_patched, pydoc_patched): list_json = {"data": self.LIST_JSON["data"] * 40} @@ -769,7 +770,7 @@ def test_should_send_get_request_and_paginate_list_when_output_table_len_is_gt_l pydoc_patched.pager.assert_called_once() assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_get_request_and_print_list_of_experiments_filtered_with_two_projects(self, get_patched): get_patched.return_value = MockResponse(example_responses.LIST_OF_EXPERIMENTS_FILTERED_WITH_TWO_PROJECTS, 200, "fake content") @@ -786,7 +787,7 @@ def test_should_send_get_request_and_print_list_of_experiments_filtered_with_two assert result.output == example_responses.LIST_OF_EXPERIMENTS_FILTERED_WITH_TWO_PROJECTS_STDOUT - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_get_request_and_print_list_of_experiments_filtered_with_two_projects_but_none_found( self, get_patched): get_patched.return_value = MockResponse(example_responses.LIST_OF_EXPERIMENTS_FILTERED_BUT_NONE_FOUND, 200, @@ -815,7 +816,7 @@ class TestStartExperiment(object): RESPONSE_JSON = {"message": "success"} START_STDOUT = "Experiment started\n" - @mock.patch("paperspace.cli.client.requests.put") + @mock.patch("paperspace.cli.cli.client.requests.put") def test_should_send_put_request_and_print_confirmation(self, put_patched): put_patched.return_value = MockResponse(self.RESPONSE_JSON, 200, "fake content") expected_headers = paperspace.client.default_headers.copy() @@ -830,7 +831,7 @@ def test_should_send_put_request_and_print_confirmation(self, put_patched): assert result.output == self.START_STDOUT - @mock.patch("paperspace.cli.client.requests.put") + @mock.patch("paperspace.cli.cli.client.requests.put") def test_should_send_put_request_with_changed_api_key_when_api_key_option_was_provided(self, put_patched): put_patched.return_value = MockResponse(self.RESPONSE_JSON, 200, "fake content") diff --git a/tests/functional/test_machines.py b/tests/functional/test_machines.py index bc566e1..10faf9b 100644 --- a/tests/functional/test_machines.py +++ b/tests/functional/test_machines.py @@ -2,7 +2,7 @@ from click.testing import CliRunner import paperspace.client -from paperspace import cli +from paperspace.cli import cli from tests import MockResponse, example_responses @@ -30,7 +30,7 @@ class TestMachineAvailability(object): RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Invalid API token\n" - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_get_request_and_print_valid_message_when_availability_command_was_used(self, get_patched): get_patched.return_value = MockResponse(self.RESPONSE_JSON, 200) @@ -44,7 +44,7 @@ def test_should_send_get_request_and_print_valid_message_when_availability_comma assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_changed_headers_when_api_key_option_was_used(self, get_patched): get_patched.return_value = MockResponse(self.RESPONSE_JSON, 200) @@ -58,7 +58,7 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, get_patc assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_valid_error_message_when_availability_command_was_used_with_invalid_api_token(self, get_patched): get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400) @@ -73,7 +73,7 @@ def test_should_print_valid_error_message_when_availability_command_was_used_wit assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, get_patched): get_patched.return_value = MockResponse(status_code=400) @@ -178,7 +178,7 @@ class TestCreateMachine(object): "--email", "some@email.com", ] - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_valid_post_request_when_machine_create_was_used_with_requested_options(self, get_patched): get_patched.return_value = MockResponse(example_responses.CREATE_MACHINE_RESPONSE, 200) @@ -192,7 +192,7 @@ def test_should_send_valid_post_request_when_machine_create_was_used_with_reques assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_valid_post_request_when_machine_create_was_used_with_all_options(self, get_patched): get_patched.return_value = MockResponse(example_responses.CREATE_MACHINE_RESPONSE, 200) @@ -206,7 +206,7 @@ def test_should_send_valid_post_request_when_machine_create_was_used_with_all_op assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_changed_headers_when_api_key_option_was_used(self, get_patched): get_patched.return_value = MockResponse(example_responses.CREATE_MACHINE_RESPONSE, 200) @@ -220,7 +220,7 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, get_patc assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_wrong_api_key_was_used(self, get_patched): get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400) @@ -234,7 +234,7 @@ def test_should_print_error_message_when_wrong_api_key_was_used(self, get_patche assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_wrong_template_id_was_used(self, get_patched): get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_TEMPLATE_ID, 400) @@ -248,7 +248,7 @@ def test_should_print_error_message_when_wrong_template_id_was_used(self, get_pa assert result.output == "templateId not found\n" assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_no_content_was_received_in_response(self, get_patched): get_patched.return_value = MockResponse(status_code=400) @@ -262,7 +262,7 @@ def test_should_print_error_message_when_no_content_was_received_in_response(sel assert result.output == "Unknown error while creating machine\n" assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_mutually_exclusive_options_were_used(self, get_patched): cli_runner = CliRunner() result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITH_MUTUALLY_EXCLUSIVE_OPTIONS_USED) @@ -309,7 +309,7 @@ class TestDestroyMachine(object): } EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND = "Not found. Please contact support@paperspace.com for help.\n" - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_valid_post_request_when_machines_destroy_was_used(self, post_patched): post_patched.return_value = MockResponse(status_code=200) @@ -323,7 +323,7 @@ def test_should_send_valid_post_request_when_machines_destroy_was_used(self, pos assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_valid_post_request_when_machines_destroy_was_used_with_all_options(self, post_patched): post_patched.return_value = MockResponse(status_code=200) @@ -337,7 +337,7 @@ def test_should_send_valid_post_request_when_machines_destroy_was_used_with_all_ assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_valid_post_request_when_machines_destroy_was_used_with_api_key_option(self, post_patched): post_patched.return_value = MockResponse(status_code=200) @@ -351,7 +351,7 @@ def test_should_send_valid_post_request_when_machines_destroy_was_used_with_api_ assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_valid_post_request_when_machines_destroy_was_used_with_wrong_api_key(self, post_patched): post_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) @@ -365,7 +365,7 @@ def test_should_send_valid_post_request_when_machines_destroy_was_used_with_wron assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_machine_with_given_id_was_not_found(self, post_patched): post_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_404_MACHINE_NOT_FOUND, status_code=400) @@ -380,7 +380,7 @@ def test_should_print_error_message_when_machine_with_given_id_was_not_found(sel assert result.output == self.EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, post_patched): post_patched.return_value = MockResponse(status_code=400) @@ -489,7 +489,7 @@ class TestListMachines(object): "--name", "some_name", ] - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_and_print_table_when_machines_list_was_used(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -503,7 +503,7 @@ def test_should_send_valid_post_request_and_print_table_when_machines_list_was_u assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_when_all_options_were_used(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -517,7 +517,7 @@ def test_should_send_valid_post_request_when_all_options_were_used(self, get_pat assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_when_params_option_was_used(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -531,7 +531,7 @@ def test_should_send_valid_post_request_when_params_option_was_used(self, get_pa assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_when_machines_list_was_used_with_api_key_option(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -545,7 +545,7 @@ def test_should_send_valid_post_request_when_machines_list_was_used_with_api_key assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_when_machines_list_was_used_with_wrong_api_key(self, get_patched): get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) @@ -559,7 +559,7 @@ def test_should_send_valid_post_request_when_machines_list_was_used_with_wrong_a assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_error_message_when_no_machine_was_not_found(self, get_patched): get_patched.return_value = MockResponse(json_data=[], status_code=200) @@ -573,7 +573,7 @@ def test_should_print_error_message_when_no_machine_was_not_found(self, get_patc assert result.output == self.EXPECTED_STDOUT_WHEN_NO_MACHINES_WERE_FOUND assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, get_patched): get_patched.return_value = MockResponse(status_code=400) @@ -587,7 +587,7 @@ def test_should_print_error_message_when_error_status_code_received_but_no_conte assert result.output == "Error while parsing response data: No JSON\n" assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_error_message_when_params_option_was_used_with_mutually_exclusive_option(self, get_patched): cli_runner = CliRunner() result = cli_runner.invoke(cli.cli, self.COMMAND_WITH_MUTUALLY_EXCLUSIVE_OPTIONS) @@ -626,7 +626,7 @@ class TestRestartMachine(object): } EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND = "Not found. Please contact support@paperspace.com for help.\n" - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_get_request_and_print_valid_message_when_restart_command_was_used(self, post_patched): post_patched.return_value = MockResponse(status_code=200) @@ -640,7 +640,7 @@ def test_should_send_get_request_and_print_valid_message_when_restart_command_wa assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched): post_patched.return_value = MockResponse(status_code=200) @@ -654,7 +654,7 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, post_pat assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_valid_error_message_when_start_command_was_used_with_invalid_api_token(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) @@ -668,7 +668,7 @@ def test_should_print_valid_error_message_when_start_command_was_used_with_inval assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, post_patched): post_patched.return_value = MockResponse(status_code=400) @@ -682,7 +682,7 @@ def test_should_print_valid_error_message_when_no_content_was_received_in_respon assert result.output == "Unknown error while restarting the machine\n" assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_machine_with_given_id_was_not_found(self, post_patched): post_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_404_MACHINE_NOT_FOUND, status_code=400) @@ -757,7 +757,7 @@ class TestShowMachine(object): } EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND = "Machine not found\n" - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_and_print_table_when_machines_list_was_used(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -771,7 +771,7 @@ def test_should_send_valid_post_request_and_print_table_when_machines_list_was_u assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_when_machines_list_was_used_with_api_key_option(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -785,7 +785,7 @@ def test_should_send_valid_post_request_when_machines_list_was_used_with_api_key assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_when_machines_list_was_used_with_wrong_api_key(self, get_patched): get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) @@ -799,7 +799,7 @@ def test_should_send_valid_post_request_when_machines_list_was_used_with_wrong_a assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_error_message_when_machine_was_not_found(self, get_patched): get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WHEN_MACHINE_WAS_NOT_FOUND, status_code=404) @@ -814,7 +814,7 @@ def test_should_print_error_message_when_machine_was_not_found(self, get_patched assert result.output == self.EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, get_patched): get_patched.return_value = MockResponse(status_code=400) @@ -858,7 +858,7 @@ class TestStartMachine(object): } EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND = "Not found. Please contact support@paperspace.com for help.\n" - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_get_request_and_print_valid_message_when_start_command_was_used(self, post_patched): post_patched.return_value = MockResponse(status_code=200) @@ -872,7 +872,7 @@ def test_should_send_get_request_and_print_valid_message_when_start_command_was_ assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched): post_patched.return_value = MockResponse(status_code=200) @@ -886,7 +886,7 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, post_pat assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_valid_error_message_when_start_command_was_used_with_invalid_api_token(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) @@ -900,7 +900,7 @@ def test_should_print_valid_error_message_when_start_command_was_used_with_inval assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, post_patched): post_patched.return_value = MockResponse(status_code=400) @@ -914,7 +914,7 @@ def test_should_print_valid_error_message_when_no_content_was_received_in_respon assert result.output == "Unknown error while starting the machine\n" assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_machine_with_given_id_was_not_found(self, post_patched): post_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_404_MACHINE_NOT_FOUND, status_code=400) @@ -959,7 +959,7 @@ class TestStopMachine(object): } EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND = "Not found. Please contact support@paperspace.com for help.\n" - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_get_request_and_print_valid_message_when_stop_command_was_used(self, post_patched): post_patched.return_value = MockResponse(status_code=200) @@ -973,7 +973,7 @@ def test_should_send_get_request_and_print_valid_message_when_stop_command_was_u assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_changed_headers_when_api_key_option_was_used(self, post_patched): post_patched.return_value = MockResponse(status_code=200) @@ -987,7 +987,7 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, post_pat assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_valid_error_message_when_stop_command_was_used_with_invalid_api_token(self, post_patched): post_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) @@ -1001,7 +1001,7 @@ def test_should_print_valid_error_message_when_stop_command_was_used_with_invali assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, post_patched): post_patched.return_value = MockResponse(status_code=400) @@ -1015,7 +1015,7 @@ def test_should_print_valid_error_message_when_no_content_was_received_in_respon assert result.output == "Unknown error while stopping the machine\n" assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_machine_with_given_id_was_not_found(self, post_patched): post_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_404_MACHINE_NOT_FOUND, status_code=400) @@ -1077,7 +1077,7 @@ class TestUpdateMachine(object): RESPONSE_JSON_WITH_WRONG_MACHINE_ID = {"error": {"name": "Error", "status": 404, "message": "Not found"}} - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_valid_post_request_when_machine_create_was_used_with_requested_options(self, get_patched): get_patched.return_value = MockResponse({}, 200) @@ -1091,7 +1091,7 @@ def test_should_send_valid_post_request_when_machine_create_was_used_with_reques assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_valid_post_request_when_machine_create_was_used_with_all_options(self, get_patched): get_patched.return_value = MockResponse({}, 200) @@ -1105,7 +1105,7 @@ def test_should_send_valid_post_request_when_machine_create_was_used_with_all_op assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_send_changed_headers_when_api_key_option_was_used(self, get_patched): get_patched.return_value = MockResponse(example_responses.CREATE_MACHINE_RESPONSE, 200) @@ -1119,7 +1119,7 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, get_patc assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_wrong_api_key_was_used(self, get_patched): get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, 400) @@ -1133,7 +1133,7 @@ def test_should_print_error_message_when_wrong_api_key_was_used(self, get_patche assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_wrong_machine_id_was_used(self, get_patched): get_patched.return_value = MockResponse(self.RESPONSE_JSON_WITH_WRONG_MACHINE_ID, 400) @@ -1147,7 +1147,7 @@ def test_should_print_error_message_when_wrong_machine_id_was_used(self, get_pat assert result.output == "Not found\n" assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.post") + @mock.patch("paperspace.cli.cli.client.requests.post") def test_should_print_error_message_when_no_content_was_received_in_response(self, get_patched): get_patched.return_value = MockResponse(status_code=400) @@ -1203,7 +1203,7 @@ class TestShowMachineUtilization(object): } EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND = "Machine not found\n" - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_and_print_table_when_machines_utilizaation_was_used(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -1217,7 +1217,7 @@ def test_should_send_valid_post_request_and_print_table_when_machines_utilizaati assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_when_machines_utilization_was_used_with_api_key_option(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -1231,7 +1231,7 @@ def test_should_send_valid_post_request_when_machines_utilization_was_used_with_ assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_when_machines_utilization_was_used_with_wrong_api_key(self, get_patched): get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) @@ -1245,7 +1245,7 @@ def test_should_send_valid_post_request_when_machines_utilization_was_used_with_ assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_error_message_when_machine_was_not_found(self, get_patched): get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WHEN_MACHINE_WAS_NOT_FOUND, status_code=404) @@ -1260,7 +1260,7 @@ def test_should_print_error_message_when_machine_was_not_found(self, get_patched assert result.output == self.EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, get_patched): get_patched.return_value = MockResponse(status_code=400) @@ -1308,7 +1308,7 @@ class TestWaitForMachine(object): } EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND = "Machine not found\nError while reading machine state\n" - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_and_print_table_when_machines_waitfor_was_used(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -1322,7 +1322,7 @@ def test_should_send_valid_post_request_and_print_table_when_machines_waitfor_wa assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_when_machines_waitfor_was_used_with_api_key_option(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -1336,7 +1336,7 @@ def test_should_send_valid_post_request_when_machines_waitfor_was_used_with_api_ assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_when_machines_waitfor_was_used_with_wrong_api_key(self, get_patched): get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) @@ -1350,7 +1350,7 @@ def test_should_send_valid_post_request_when_machines_waitfor_was_used_with_wron assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_error_message_when_machine_was_not_found(self, get_patched): get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WHEN_MACHINE_WAS_NOT_FOUND, status_code=404) @@ -1365,7 +1365,7 @@ def test_should_print_error_message_when_machine_was_not_found(self, get_patched assert result.output == self.EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND assert result.exit_code == 0 - @mock.patch("paperspace.cli.client.requests.get") + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, get_patched): get_patched.return_value = MockResponse(status_code=400) diff --git a/tests/test_click_commands.py b/tests/test_click_commands.py index 2f17827..1638ed1 100644 --- a/tests/test_click_commands.py +++ b/tests/test_click_commands.py @@ -1,11 +1,12 @@ import mock from click.testing import CliRunner -from paperspace import cli, constants +from paperspace import constants +from paperspace.cli import cli -@mock.patch("paperspace.cli.client.API") -@mock.patch("paperspace.cli.experiments_commands") +@mock.patch("paperspace.cli.cli.client.API") +@mock.patch("paperspace.cli.cli.experiments_commands") def test_should_execute_create_experiment_command_when_cli_singlenode_command_was_executed(commands_patched, api_patched): api_patched.return_value = mock.MagicMock() @@ -33,8 +34,8 @@ def test_should_execute_create_experiment_command_when_cli_singlenode_command_wa commands_patched.create_experiment.assert_called_once_with(expected_kwargs, api=api_patched()) -@mock.patch("paperspace.cli.client.API") -@mock.patch("paperspace.cli.experiments_commands") +@mock.patch("paperspace.cli.cli.client.API") +@mock.patch("paperspace.cli.cli.experiments_commands") def test_should_execute_create_experiment_command_when_cli_multinode_mpi_command_was_executed(commands_patched, api_patched): api_patched.return_value = mock.MagicMock() @@ -73,8 +74,8 @@ def test_should_execute_create_experiment_command_when_cli_multinode_mpi_command commands_patched.create_experiment.assert_called_once_with(expected_kwargs, api=api_patched()) -@mock.patch("paperspace.cli.client.API") -@mock.patch("paperspace.cli.experiments_commands") +@mock.patch("paperspace.cli.cli.client.API") +@mock.patch("paperspace.cli.cli.experiments_commands") def test_should_execute_create_experiment_command_when_cli_multinode_grpc_command_was_executed(commands_patched, api_patched): api_patched.return_value = mock.MagicMock() @@ -112,8 +113,8 @@ def test_should_execute_create_experiment_command_when_cli_multinode_grpc_comman commands_patched.create_experiment.assert_called_once_with(expected_kwargs, api=api_patched()) -@mock.patch("paperspace.cli.client.API") -@mock.patch("paperspace.cli.experiments_commands") +@mock.patch("paperspace.cli.cli.client.API") +@mock.patch("paperspace.cli.cli.experiments_commands") def test_should_execute_create_experiment_command_when_cli_create_and_start_singlenode_command_was_executed( commands_patched, api_patched): api_patched.return_value = mock.MagicMock() @@ -141,8 +142,8 @@ def test_should_execute_create_experiment_command_when_cli_create_and_start_sing commands_patched.create_and_start_experiment.assert_called_once_with(expected_kwargs, api=api_patched()) -@mock.patch("paperspace.cli.client.API") -@mock.patch("paperspace.cli.experiments_commands") +@mock.patch("paperspace.cli.cli.client.API") +@mock.patch("paperspace.cli.cli.experiments_commands") def test_should_execute_create_experiment_command_when_cli_create_and_start_multinode_mpi_command_was_executed( commands_patched, api_patched): api_patched.return_value = mock.MagicMock() From 14db57233900c3a467656474481efd9dcf6b8d35 Mon Sep 17 00:00:00 2001 From: kossak Date: Thu, 25 Apr 2019 13:10:00 +0200 Subject: [PATCH 04/42] dummy command --- paperspace/cli/jobs/commands.py | 6 ++++++ paperspace/commands/jobs.py | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/paperspace/cli/jobs/commands.py b/paperspace/cli/jobs/commands.py index 9bbb517..fbf272c 100644 --- a/paperspace/cli/jobs/commands.py +++ b/paperspace/cli/jobs/commands.py @@ -19,3 +19,9 @@ def delete_job(job_id, api_key=None): jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) command = jobs_commands.DeleteJobCommand(api=jobs_api) command.execute(job_id) + + +def create_job(api_key=None): + jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) + command = jobs_commands.CreateJobCommand(api=jobs_api) + command.execute() diff --git a/paperspace/commands/jobs.py b/paperspace/commands/jobs.py index 06a222a..c71ea58 100644 --- a/paperspace/commands/jobs.py +++ b/paperspace/commands/jobs.py @@ -26,3 +26,8 @@ def execute(self, job_id): self._log_message(response, "Job deleted", "Unknown error while deleting job") + + +class CreateJobCommand(_JobsCommandBase): + def execute(self, **kwargs): + url = "/jobs/create" From f68ae5e4e89aeeacb1a7db672327c9a32898d0fa Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Thu, 25 Apr 2019 11:56:09 +0200 Subject: [PATCH 05/42] Add jobs stop command --- paperspace/cli/__init__.py | 7 +++++++ paperspace/cli/cli.py | 8 +------- paperspace/cli/jobs/commands.py | 15 +++++++++++++++ paperspace/commands/jobs.py | 13 +++++++++++-- 4 files changed, 34 insertions(+), 9 deletions(-) diff --git a/paperspace/cli/__init__.py b/paperspace/cli/__init__.py index e69de29..e434bec 100644 --- a/paperspace/cli/__init__.py +++ b/paperspace/cli/__init__.py @@ -0,0 +1,7 @@ +import click + +api_key_option = click.option( + "--apiKey", + "api_key", + help="API key to use this time only", +) \ No newline at end of file diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 189d5ef..7a81ab9 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -6,6 +6,7 @@ import click from paperspace import constants, client, config +from paperspace.cli import api_key_option from paperspace.cli.jobs.commands import jobs_group from paperspace.commands import experiments as experiments_commands, deployments as deployments_commands, \ machines as machines_commands, login as login_commands @@ -58,13 +59,6 @@ def del_if_value_is_none(dict_): del dict_[key] -api_key_option = click.option( - "--apiKey", - "api_key", - help="API key to use this time only", -) - - def validate_mutually_exclusive(options_1, options_2, error_message): used_option_in_options_1 = any(option is not None for option in options_1) used_option_in_options_2 = any(option is not None for option in options_2) diff --git a/paperspace/cli/jobs/commands.py b/paperspace/cli/jobs/commands.py index 9bbb517..73668a6 100644 --- a/paperspace/cli/jobs/commands.py +++ b/paperspace/cli/jobs/commands.py @@ -1,6 +1,7 @@ import click from paperspace import client, config +from paperspace.cli import api_key_option from paperspace.commands import jobs as jobs_commands @@ -15,7 +16,21 @@ def jobs_group(): "job_id", required=True, ) +@api_key_option def delete_job(job_id, api_key=None): jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) command = jobs_commands.DeleteJobCommand(api=jobs_api) command.execute(job_id) + + +@jobs_group.command("stop", help="Stop running job") +@click.option( + "--jobId", + "job_id", + required=True, +) +@api_key_option +def stop_job(job_id, api_key=None): + jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) + command = jobs_commands.StopJobCommand(api=jobs_api) + command.execute(job_id) diff --git a/paperspace/commands/jobs.py b/paperspace/commands/jobs.py index 06a222a..2eb1b06 100644 --- a/paperspace/commands/jobs.py +++ b/paperspace/commands/jobs.py @@ -1,7 +1,7 @@ from paperspace.commands import CommandBase -class _JobsCommandBase(CommandBase): +class JobsCommandBase(CommandBase): def _log_message(self, response, success_msg_template, error_msg): if response.ok: try: @@ -19,10 +19,19 @@ def _log_message(self, response, success_msg_template, error_msg): self.logger.log(error_msg) -class DeleteJobCommand(_JobsCommandBase): +class DeleteJobCommand(JobsCommandBase): def execute(self, job_id): url = "/jobs/{}/destroy/".format(job_id) response = self.api.post(url) self._log_message(response, "Job deleted", "Unknown error while deleting job") + + +class StopJobCommand(JobsCommandBase): + def execute(self, job_id): + url = "/jobs/{}/stop/".format(job_id) + response = self.api.post(url) + self._log_message(response, + "Job stopped", + "Unknown error while stopping job") From bd242d6e973c5a34f40d999d17e58071423a6b93 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Thu, 25 Apr 2019 15:29:01 +0200 Subject: [PATCH 06/42] Move common objects to avoid circular imports --- paperspace/cli/__init__.py | 7 --- paperspace/cli/cli.py | 63 ++------------------ paperspace/cli/common.py | 14 +++++ paperspace/cli/{jobs/commands.py => jobs.py} | 2 +- paperspace/cli/jobs/__init__.py | 0 paperspace/cli/types.py | 35 +++++++++++ paperspace/cli/validators.py | 18 ++++++ 7 files changed, 72 insertions(+), 67 deletions(-) create mode 100644 paperspace/cli/common.py rename paperspace/cli/{jobs/commands.py => jobs.py} (94%) delete mode 100644 paperspace/cli/jobs/__init__.py create mode 100644 paperspace/cli/types.py create mode 100644 paperspace/cli/validators.py diff --git a/paperspace/cli/__init__.py b/paperspace/cli/__init__.py index e434bec..e69de29 100644 --- a/paperspace/cli/__init__.py +++ b/paperspace/cli/__init__.py @@ -1,7 +0,0 @@ -import click - -api_key_option = click.option( - "--apiKey", - "api_key", - help="API key to use this time only", -) \ No newline at end of file diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 7a81ab9..5afff71 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -1,29 +1,16 @@ import collections import functools -import json -import re import click from paperspace import constants, client, config -from paperspace.cli import api_key_option -from paperspace.cli.jobs.commands import jobs_group +from paperspace.cli.common import api_key_option, del_if_value_is_none +from paperspace.cli.jobs import jobs_group +from paperspace.cli.types import ChoiceType, json_string +from paperspace.cli.validators import validate_mutually_exclusive, validate_email from paperspace.commands import experiments as experiments_commands, deployments as deployments_commands, \ machines as machines_commands, login as login_commands - -class ChoiceType(click.Choice): - """Takes a string-keyed map and converts cli-provided parameter to corresponding value""" - - def __init__(self, type_map, case_sensitive=True): - super(ChoiceType, self).__init__(tuple(type_map.keys()), case_sensitive=case_sensitive) - self.type_map = type_map - - def convert(self, value, param, ctx): - value = super(ChoiceType, self).convert(value, param, ctx).upper() - return self.type_map[value] - - MULTI_NODE_EXPERIMENT_TYPES_MAP = collections.OrderedDict( ( ("GRPC", constants.ExperimentType.GRPC_MULTI_NODE), @@ -32,48 +19,6 @@ def convert(self, value, param, ctx): ) -class Number(click.ParamType): - name = "number" - - def convert(self, value, param, ctx): - try: - number = int(value) - except ValueError: - try: - number = float(value) - except ValueError: - self.fail('{} is not a valid number'.format(value), param, ctx) - - return number - - -def json_string(val): - """Wraps json.loads so the cli help shows proper option's type name instead of 'LOADS'""" - return json.loads(val) - - -def del_if_value_is_none(dict_): - """Remove all elements with value == None""" - for key, val in list(dict_.items()): - if val is None: - del dict_[key] - - -def validate_mutually_exclusive(options_1, options_2, error_message): - used_option_in_options_1 = any(option is not None for option in options_1) - used_option_in_options_2 = any(option is not None for option in options_2) - if used_option_in_options_1 and used_option_in_options_2: - raise click.UsageError(error_message) - - -def validate_email(ctx, param, value): - if value is not None \ - and not re.match(r"[^@]+@[^@]+\.[^@]+", value): - raise click.BadParameter("Bad email address format") - - return value - - @click.group() def cli(): pass diff --git a/paperspace/cli/common.py b/paperspace/cli/common.py new file mode 100644 index 0000000..d744117 --- /dev/null +++ b/paperspace/cli/common.py @@ -0,0 +1,14 @@ +import click + +api_key_option = click.option( + "--apiKey", + "api_key", + help="API key to use this time only", +) + + +def del_if_value_is_none(dict_): + """Remove all elements with value == None""" + for key, val in list(dict_.items()): + if val is None: + del dict_[key] \ No newline at end of file diff --git a/paperspace/cli/jobs/commands.py b/paperspace/cli/jobs.py similarity index 94% rename from paperspace/cli/jobs/commands.py rename to paperspace/cli/jobs.py index 73668a6..fc37cbc 100644 --- a/paperspace/cli/jobs/commands.py +++ b/paperspace/cli/jobs.py @@ -1,7 +1,7 @@ import click from paperspace import client, config -from paperspace.cli import api_key_option +from paperspace.cli.common import api_key_option from paperspace.commands import jobs as jobs_commands diff --git a/paperspace/cli/jobs/__init__.py b/paperspace/cli/jobs/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/paperspace/cli/types.py b/paperspace/cli/types.py new file mode 100644 index 0000000..24aa916 --- /dev/null +++ b/paperspace/cli/types.py @@ -0,0 +1,35 @@ +import json + +import click + + +class ChoiceType(click.Choice): + """Takes a string-keyed map and converts cli-provided parameter to corresponding value""" + + def __init__(self, type_map, case_sensitive=True): + super(ChoiceType, self).__init__(tuple(type_map.keys()), case_sensitive=case_sensitive) + self.type_map = type_map + + def convert(self, value, param, ctx): + value = super(ChoiceType, self).convert(value, param, ctx).upper() + return self.type_map[value] + + +class Number(click.ParamType): + name = "number" + + def convert(self, value, param, ctx): + try: + number = int(value) + except ValueError: + try: + number = float(value) + except ValueError: + self.fail('{} is not a valid number'.format(value), param, ctx) + + return number + + +def json_string(val): + """Wraps json.loads so the cli help shows proper option's type name instead of 'LOADS'""" + return json.loads(val) \ No newline at end of file diff --git a/paperspace/cli/validators.py b/paperspace/cli/validators.py new file mode 100644 index 0000000..27d4f63 --- /dev/null +++ b/paperspace/cli/validators.py @@ -0,0 +1,18 @@ +import re + +import click + + +def validate_mutually_exclusive(options_1, options_2, error_message): + used_option_in_options_1 = any(option is not None for option in options_1) + used_option_in_options_2 = any(option is not None for option in options_2) + if used_option_in_options_1 and used_option_in_options_2: + raise click.UsageError(error_message) + + +def validate_email(ctx, param, value): + if value is not None \ + and not re.match(r"[^@]+@[^@]+\.[^@]+", value): + raise click.BadParameter("Bad email address format") + + return value \ No newline at end of file From 19a619379617ae2debd2644684094b73e59be166 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Thu, 25 Apr 2019 15:29:01 +0200 Subject: [PATCH 07/42] Move common objects to avoid circular imports --- paperspace/cli/__init__.py | 7 --- paperspace/cli/cli.py | 63 ++------------------ paperspace/cli/common.py | 14 +++++ paperspace/cli/{jobs/commands.py => jobs.py} | 2 +- paperspace/cli/jobs/__init__.py | 0 paperspace/cli/types.py | 35 +++++++++++ paperspace/cli/validators.py | 18 ++++++ 7 files changed, 72 insertions(+), 67 deletions(-) create mode 100644 paperspace/cli/common.py rename paperspace/cli/{jobs/commands.py => jobs.py} (94%) delete mode 100644 paperspace/cli/jobs/__init__.py create mode 100644 paperspace/cli/types.py create mode 100644 paperspace/cli/validators.py diff --git a/paperspace/cli/__init__.py b/paperspace/cli/__init__.py index e434bec..e69de29 100644 --- a/paperspace/cli/__init__.py +++ b/paperspace/cli/__init__.py @@ -1,7 +0,0 @@ -import click - -api_key_option = click.option( - "--apiKey", - "api_key", - help="API key to use this time only", -) \ No newline at end of file diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 7a81ab9..5afff71 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -1,29 +1,16 @@ import collections import functools -import json -import re import click from paperspace import constants, client, config -from paperspace.cli import api_key_option -from paperspace.cli.jobs.commands import jobs_group +from paperspace.cli.common import api_key_option, del_if_value_is_none +from paperspace.cli.jobs import jobs_group +from paperspace.cli.types import ChoiceType, json_string +from paperspace.cli.validators import validate_mutually_exclusive, validate_email from paperspace.commands import experiments as experiments_commands, deployments as deployments_commands, \ machines as machines_commands, login as login_commands - -class ChoiceType(click.Choice): - """Takes a string-keyed map and converts cli-provided parameter to corresponding value""" - - def __init__(self, type_map, case_sensitive=True): - super(ChoiceType, self).__init__(tuple(type_map.keys()), case_sensitive=case_sensitive) - self.type_map = type_map - - def convert(self, value, param, ctx): - value = super(ChoiceType, self).convert(value, param, ctx).upper() - return self.type_map[value] - - MULTI_NODE_EXPERIMENT_TYPES_MAP = collections.OrderedDict( ( ("GRPC", constants.ExperimentType.GRPC_MULTI_NODE), @@ -32,48 +19,6 @@ def convert(self, value, param, ctx): ) -class Number(click.ParamType): - name = "number" - - def convert(self, value, param, ctx): - try: - number = int(value) - except ValueError: - try: - number = float(value) - except ValueError: - self.fail('{} is not a valid number'.format(value), param, ctx) - - return number - - -def json_string(val): - """Wraps json.loads so the cli help shows proper option's type name instead of 'LOADS'""" - return json.loads(val) - - -def del_if_value_is_none(dict_): - """Remove all elements with value == None""" - for key, val in list(dict_.items()): - if val is None: - del dict_[key] - - -def validate_mutually_exclusive(options_1, options_2, error_message): - used_option_in_options_1 = any(option is not None for option in options_1) - used_option_in_options_2 = any(option is not None for option in options_2) - if used_option_in_options_1 and used_option_in_options_2: - raise click.UsageError(error_message) - - -def validate_email(ctx, param, value): - if value is not None \ - and not re.match(r"[^@]+@[^@]+\.[^@]+", value): - raise click.BadParameter("Bad email address format") - - return value - - @click.group() def cli(): pass diff --git a/paperspace/cli/common.py b/paperspace/cli/common.py new file mode 100644 index 0000000..d744117 --- /dev/null +++ b/paperspace/cli/common.py @@ -0,0 +1,14 @@ +import click + +api_key_option = click.option( + "--apiKey", + "api_key", + help="API key to use this time only", +) + + +def del_if_value_is_none(dict_): + """Remove all elements with value == None""" + for key, val in list(dict_.items()): + if val is None: + del dict_[key] \ No newline at end of file diff --git a/paperspace/cli/jobs/commands.py b/paperspace/cli/jobs.py similarity index 94% rename from paperspace/cli/jobs/commands.py rename to paperspace/cli/jobs.py index 73668a6..fc37cbc 100644 --- a/paperspace/cli/jobs/commands.py +++ b/paperspace/cli/jobs.py @@ -1,7 +1,7 @@ import click from paperspace import client, config -from paperspace.cli import api_key_option +from paperspace.cli.common import api_key_option from paperspace.commands import jobs as jobs_commands diff --git a/paperspace/cli/jobs/__init__.py b/paperspace/cli/jobs/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/paperspace/cli/types.py b/paperspace/cli/types.py new file mode 100644 index 0000000..24aa916 --- /dev/null +++ b/paperspace/cli/types.py @@ -0,0 +1,35 @@ +import json + +import click + + +class ChoiceType(click.Choice): + """Takes a string-keyed map and converts cli-provided parameter to corresponding value""" + + def __init__(self, type_map, case_sensitive=True): + super(ChoiceType, self).__init__(tuple(type_map.keys()), case_sensitive=case_sensitive) + self.type_map = type_map + + def convert(self, value, param, ctx): + value = super(ChoiceType, self).convert(value, param, ctx).upper() + return self.type_map[value] + + +class Number(click.ParamType): + name = "number" + + def convert(self, value, param, ctx): + try: + number = int(value) + except ValueError: + try: + number = float(value) + except ValueError: + self.fail('{} is not a valid number'.format(value), param, ctx) + + return number + + +def json_string(val): + """Wraps json.loads so the cli help shows proper option's type name instead of 'LOADS'""" + return json.loads(val) \ No newline at end of file diff --git a/paperspace/cli/validators.py b/paperspace/cli/validators.py new file mode 100644 index 0000000..27d4f63 --- /dev/null +++ b/paperspace/cli/validators.py @@ -0,0 +1,18 @@ +import re + +import click + + +def validate_mutually_exclusive(options_1, options_2, error_message): + used_option_in_options_1 = any(option is not None for option in options_1) + used_option_in_options_2 = any(option is not None for option in options_2) + if used_option_in_options_1 and used_option_in_options_2: + raise click.UsageError(error_message) + + +def validate_email(ctx, param, value): + if value is not None \ + and not re.match(r"[^@]+@[^@]+\.[^@]+", value): + raise click.BadParameter("Bad email address format") + + return value \ No newline at end of file From b0616cf12a23328399635a6050d1960902cef19a Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Thu, 25 Apr 2019 19:10:02 +0200 Subject: [PATCH 08/42] Add 'projects list' command --- paperspace/cli/cli.py | 2 + paperspace/cli/projects.py | 18 +++ paperspace/commands/projects.py | 58 +++++++++ paperspace/main.py | 3 +- tests/example_responses.py | 189 ++++++++++++++++++++++++++++++ tests/functional/test_projects.py | 105 +++++++++++++++++ 6 files changed, 374 insertions(+), 1 deletion(-) create mode 100644 paperspace/cli/projects.py create mode 100644 paperspace/commands/projects.py create mode 100644 tests/functional/test_projects.py diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 5afff71..c1ef0c0 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -6,6 +6,7 @@ from paperspace import constants, client, config from paperspace.cli.common import api_key_option, del_if_value_is_none from paperspace.cli.jobs import jobs_group +from paperspace.cli.projects import projects_group from paperspace.cli.types import ChoiceType, json_string from paperspace.cli.validators import validate_mutually_exclusive, validate_email from paperspace.commands import experiments as experiments_commands, deployments as deployments_commands, \ @@ -1052,3 +1053,4 @@ def version(): cli.add_command(jobs_group) +cli.add_command(projects_group) diff --git a/paperspace/cli/projects.py b/paperspace/cli/projects.py new file mode 100644 index 0000000..26e9a4b --- /dev/null +++ b/paperspace/cli/projects.py @@ -0,0 +1,18 @@ +import click + +from paperspace import client, config +from paperspace.commands import projects as projects_commands +from . import common + + +@click.group("projects", help="Manage projects") +def projects_group(): + pass + + +@projects_group.command("list", help="List projects") +@common.api_key_option +def delete_job(api_key): + projects_api = client.API(config.CONFIG_HOST, api_key=api_key) + command = projects_commands.ListProjectsCommand(api=projects_api) + command.execute() diff --git a/paperspace/commands/projects.py b/paperspace/commands/projects.py new file mode 100644 index 0000000..5ccdd16 --- /dev/null +++ b/paperspace/commands/projects.py @@ -0,0 +1,58 @@ +import pydoc + +import terminaltables + +from paperspace import client, config, version, logger +from paperspace.utils import get_terminal_lines + +default_headers = {"X-API-Key": config.PAPERSPACE_API_KEY, + "ps_client_name": "paperspace-python", + "ps_client_version": version.version} +deployments_api = client.API(config.CONFIG_HOST, headers=default_headers) + + +class ProjectsCommandBase(object): + def __init__(self, api=deployments_api, logger_=logger): + self.api = api + self.logger = logger_ + + +class ListProjectsCommand(ProjectsCommandBase): + def execute(self): + # TODO: PS_API should not require teamId but it does now, so change the following line + # TODO: to `json_ = None` or whatever works when PS_API is fixed: + json_ = {"teamId": 666} + response = self.api.get("/projects/", json=json_) + + try: + data = response.json() + if not response.ok: + self.logger.log_error_response(data) + return + except (ValueError, KeyError) as e: + self.logger.log("Error while parsing response data: {}".format(e)) + else: + self._log_projects_list(data) + + def _log_projects_list(self, data): + if not data.get("data"): + self.logger.log("No projects found") + else: + table_str = self._make_table(data["data"]) + if len(table_str.splitlines()) > get_terminal_lines(): + pydoc.pager(table_str) + else: + self.logger.log(table_str) + + @staticmethod + def _make_table(projects): + data = [("ID", "Name", "Repository")] + for project in projects: + id_ = project.get("handle") + name = project.get("name") + repo_url = project.get("repoUrl") + data.append((id_, name, repo_url)) + + ascii_table = terminaltables.AsciiTable(data) + table_string = ascii_table.table + return table_string diff --git a/paperspace/main.py b/paperspace/main.py index a013709..a1c8504 100644 --- a/paperspace/main.py +++ b/paperspace/main.py @@ -8,7 +8,8 @@ def main(): - if len(sys.argv) >= 2 and sys.argv[1] in ('experiments', 'deployments', 'machines', 'login', 'logout', 'version'): + if len(sys.argv) >= 2 and sys.argv[1] in ('experiments', 'deployments', 'machines', 'login', 'logout', 'version', + 'projects', 'jobs'): cli(sys.argv[1:]) args = sys.argv[:] diff --git a/tests/example_responses.py b/tests/example_responses.py index 7665f5f..980656d 100644 --- a/tests/example_responses.py +++ b/tests/example_responses.py @@ -1273,3 +1273,192 @@ "billingMonth": "2019-04", }, } + + +LIST_PROJECTS_RESPONSE = { + "data": [ + { + "name": "test_project", + "handle": "prq70zy79", + "dtCreated": "2019-03-18T13:24:46.666Z", + "dtDeleted": None, + "lastJobSeqNum": 2, + "repoNodeId": None, + "repoName": None, + "repoUrl": None, + "experiments": { + "data": [ + { + "dtCreated": "2019-04-05T15:10:55.692629+00:00", + "dtDeleted": None, + "dtFinished": None, + "dtModified": "2019-04-05T15:10:55.692629+00:00", + "dtProvisioningFinished": None, + "dtProvisioningStarted": None, + "dtStarted": None, + "dtTeardownFinished": None, + "dtTeardownStarted": None, + "experimentError": None, + "experimentTemplateHistoryId": 22159, + "experimentTemplateId": 60, + "experimentTypeId": 1, + "handle": "estgcoux8igx32", + "id": 22123, + "projectHandle": "prq70zy79", + "projectId": 612, + "started_by_user_id": 1655, + "state": 1, + "templateHistory": { + "dtCreated": "2019-04-05T15:10:54.923725+00:00", + "dtDeleted": None, + "experimentTemplateId": 60, + "id": 22159, + "params": { + "is_preemptible": False, + "name": "dsfads", + "ports": 5000, + "project_handle": "prq70zy79", + "worker_command": "sadas", + "worker_container": "asd", + "worker_machine_type": "C2", + "worker_use_dockerfile": False, + "workspaceUrl": "example.com" + }, + "triggerEvent": None, + "triggerEventId": None + } + } + ], + "meta": { + "itemGroup": { + "key": "projectHandle", + "value": "prq70zy79" + }, + "totalItems": 1 + } + } + }, + { + "name": "keton", + "handle": "prmr22ve0", + "dtCreated": "2019-03-25T14:50:43.202Z", + "dtDeleted": None, + "lastJobSeqNum": 8, + "repoNodeId": None, + "repoName": None, + "repoUrl": None, + "experiments": { + "data": [ + { + "dtCreated": "2019-04-02T15:17:03.393886+00:00", + "dtDeleted": None, + "dtFinished": "2019-04-02T17:02:54.654569+00:00", + "dtModified": "2019-04-02T15:17:03.393886+00:00", + "dtProvisioningFinished": "2019-04-02T15:17:10.978198+00:00", + "dtProvisioningStarted": "2019-04-02T15:17:10.978198+00:00", + "dtStarted": "2019-04-02T15:17:10.978198+00:00", + "dtTeardownFinished": "2019-04-02T17:02:54.654569+00:00", + "dtTeardownStarted": "2019-04-02T17:02:54.654569+00:00", + "experimentError": None, + "experimentTemplateHistoryId": 22130, + "experimentTemplateId": 174, + "experimentTypeId": 1, + "handle": "ehla1kvbwzaco", + "id": 22094, + "projectHandle": "prmr22ve0", + "projectId": 626, + "started_by_user_id": 1655, + "state": 5, + "templateHistory": { + "dtCreated": "2019-04-02T15:17:02.663449+00:00", + "dtDeleted": None, + "experimentTemplateId": 174, + "id": 22130, + "params": { + "is_preemptible": False, + "model_path": "/artifacts", + "model_type": "Tensorflow", + "name": "Test1", + "ports": 5000, + "project_handle": "prmr22ve0", + "worker_command": "python mnist.py --data_format=channels_last", + "worker_container": "tensorflow/tensorflow:1.13.1-py3", + "worker_machine_type": "K80", + "workspaceUrl": "https://github.com/Paperspace/mnist-sample" + }, + "triggerEvent": None, + "triggerEventId": None + } + } + ], + "meta": { + "itemGroup": { + "key": "projectHandle", + "value": "prmr22ve0" + }, + "totalItems": 1 + } + } + }, + { + "name": "paperspace-python", + "handle": "przhbct98", + "dtCreated": "2019-04-04T15:12:34.229Z", + "dtDeleted": None, + "lastJobSeqNum": 3, + "repoNodeId": None, + "repoName": None, + "repoUrl": None, + "experiments": { + "data": [ + { + "dtCreated": "2019-04-24T10:18:30.523193+00:00", + "dtDeleted": None, + "dtFinished": "2019-04-24T10:18:43.613748+00:00", + "dtModified": "2019-04-24T10:18:30.523193+00:00", + "dtProvisioningFinished": "2019-04-24T10:18:35.010792+00:00", + "dtProvisioningStarted": "2019-04-24T10:18:35.010792+00:00", + "dtStarted": "2019-04-24T10:18:35.010792+00:00", + "dtTeardownFinished": "2019-04-24T10:18:43.613748+00:00", + "dtTeardownStarted": "2019-04-24T10:18:43.613748+00:00", + "experimentError": None, + "experimentTemplateHistoryId": 22311, + "experimentTemplateId": 186, + "experimentTypeId": 1, + "handle": "es47og38wzhnuo", + "id": 22270, + "projectHandle": "przhbct98", + "projectId": 649, + "started_by_user_id": 1655, + "state": 7, + "templateHistory": { + "dtCreated": "2019-04-24T10:18:30.523193+00:00", + "dtDeleted": None, + "experimentTemplateId": 186, + "id": 22311, + "params": { + "command": ". test.sh\npython2 hello.py", + "container": "paperspace/tensorflow-python", + "machineType": "G1", + "project": "paperspace-python", + "workspaceFileName": "temp.zip" + }, + "triggerEvent": None, + "triggerEventId": None + } + } + ], + "meta": { + "itemGroup": { + "key": "projectHandle", + "value": "przhbct98" + }, + "totalItems": 1 + } + } + } + ], + "meta": { + "totalItems": 3 + } +} diff --git a/tests/functional/test_projects.py b/tests/functional/test_projects.py new file mode 100644 index 0000000..22be33d --- /dev/null +++ b/tests/functional/test_projects.py @@ -0,0 +1,105 @@ +import mock +from click.testing import CliRunner + +import paperspace +from paperspace.cli import cli +from paperspace.client import default_headers +from tests import example_responses, MockResponse + + +class TestListProjects(object): + URL = "https://api.paperspace.io/projects/" + EXPECTED_HEADERS = default_headers.copy() + BASIC_COMMAND = ["projects", "list"] + # TODO: change to `REQUEST_JSON = None` or whatever works when PS_API is fixed + REQUEST_JSON = {'teamId': 666} + EXPECTED_RESPONSE_JSON = example_responses.LIST_PROJECTS_RESPONSE + EXPECTED_STDOUT = """+-----------+-------------------+------------+ +| ID | Name | Repository | ++-----------+-------------------+------------+ +| prq70zy79 | test_project | None | +| prmr22ve0 | keton | None | +| przhbct98 | paperspace-python | None | ++-----------+-------------------+------------+ +""" + + BASIC_COMMAND_WITH_API_KEY = ["projects", "list", "--apiKey", "some_key"] + EXPECTED_HEADERS_WITH_CHANGED_API_KEY = paperspace.client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" + + RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} + EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Invalid API token\n" + + RESPONSE_JSON_WHEN_NO_PROJECTS_WERE_FOUND = {"data": [], "meta": {"totalItems": 0}} + EXPECTED_STDOUT_WHEN_NO_PROJECTS_WERE_FOUND = "No projects found\n" + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_post_request_and_print_table_when_projects_list_was_used(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.REQUEST_JSON, + params=None) + assert result.output == self.EXPECTED_STDOUT + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_post_request_when_projects_list_was_used_with_api_key_option(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITH_API_KEY) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=self.REQUEST_JSON, + params=None) + assert result.output == self.EXPECTED_STDOUT + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_post_request_when_projects_list_was_used_with_wrong_api_key(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITH_API_KEY) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=self.REQUEST_JSON, + params=None) + assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_error_message_when_no_project_was_not_found(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WHEN_NO_PROJECTS_WERE_FOUND, + status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.REQUEST_JSON, + params=None) + assert result.output == self.EXPECTED_STDOUT_WHEN_NO_PROJECTS_WERE_FOUND + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, get_patched): + get_patched.return_value = MockResponse(status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.REQUEST_JSON, + params=None) + assert result.output == "Error while parsing response data: No JSON\n" + assert result.exit_code == 0 From eb4caa02d4882bef398c95dcfafe14aba6cd2d55 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Thu, 25 Apr 2019 19:10:02 +0200 Subject: [PATCH 09/42] Add 'projects list' command --- paperspace/cli/cli.py | 2 + paperspace/cli/projects.py | 18 +++ paperspace/commands/projects.py | 58 +++++++++ paperspace/main.py | 3 +- tests/example_responses.py | 189 ++++++++++++++++++++++++++++++ tests/functional/test_projects.py | 105 +++++++++++++++++ 6 files changed, 374 insertions(+), 1 deletion(-) create mode 100644 paperspace/cli/projects.py create mode 100644 paperspace/commands/projects.py create mode 100644 tests/functional/test_projects.py diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 5afff71..c1ef0c0 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -6,6 +6,7 @@ from paperspace import constants, client, config from paperspace.cli.common import api_key_option, del_if_value_is_none from paperspace.cli.jobs import jobs_group +from paperspace.cli.projects import projects_group from paperspace.cli.types import ChoiceType, json_string from paperspace.cli.validators import validate_mutually_exclusive, validate_email from paperspace.commands import experiments as experiments_commands, deployments as deployments_commands, \ @@ -1052,3 +1053,4 @@ def version(): cli.add_command(jobs_group) +cli.add_command(projects_group) diff --git a/paperspace/cli/projects.py b/paperspace/cli/projects.py new file mode 100644 index 0000000..26e9a4b --- /dev/null +++ b/paperspace/cli/projects.py @@ -0,0 +1,18 @@ +import click + +from paperspace import client, config +from paperspace.commands import projects as projects_commands +from . import common + + +@click.group("projects", help="Manage projects") +def projects_group(): + pass + + +@projects_group.command("list", help="List projects") +@common.api_key_option +def delete_job(api_key): + projects_api = client.API(config.CONFIG_HOST, api_key=api_key) + command = projects_commands.ListProjectsCommand(api=projects_api) + command.execute() diff --git a/paperspace/commands/projects.py b/paperspace/commands/projects.py new file mode 100644 index 0000000..5ccdd16 --- /dev/null +++ b/paperspace/commands/projects.py @@ -0,0 +1,58 @@ +import pydoc + +import terminaltables + +from paperspace import client, config, version, logger +from paperspace.utils import get_terminal_lines + +default_headers = {"X-API-Key": config.PAPERSPACE_API_KEY, + "ps_client_name": "paperspace-python", + "ps_client_version": version.version} +deployments_api = client.API(config.CONFIG_HOST, headers=default_headers) + + +class ProjectsCommandBase(object): + def __init__(self, api=deployments_api, logger_=logger): + self.api = api + self.logger = logger_ + + +class ListProjectsCommand(ProjectsCommandBase): + def execute(self): + # TODO: PS_API should not require teamId but it does now, so change the following line + # TODO: to `json_ = None` or whatever works when PS_API is fixed: + json_ = {"teamId": 666} + response = self.api.get("/projects/", json=json_) + + try: + data = response.json() + if not response.ok: + self.logger.log_error_response(data) + return + except (ValueError, KeyError) as e: + self.logger.log("Error while parsing response data: {}".format(e)) + else: + self._log_projects_list(data) + + def _log_projects_list(self, data): + if not data.get("data"): + self.logger.log("No projects found") + else: + table_str = self._make_table(data["data"]) + if len(table_str.splitlines()) > get_terminal_lines(): + pydoc.pager(table_str) + else: + self.logger.log(table_str) + + @staticmethod + def _make_table(projects): + data = [("ID", "Name", "Repository")] + for project in projects: + id_ = project.get("handle") + name = project.get("name") + repo_url = project.get("repoUrl") + data.append((id_, name, repo_url)) + + ascii_table = terminaltables.AsciiTable(data) + table_string = ascii_table.table + return table_string diff --git a/paperspace/main.py b/paperspace/main.py index a013709..a1c8504 100644 --- a/paperspace/main.py +++ b/paperspace/main.py @@ -8,7 +8,8 @@ def main(): - if len(sys.argv) >= 2 and sys.argv[1] in ('experiments', 'deployments', 'machines', 'login', 'logout', 'version'): + if len(sys.argv) >= 2 and sys.argv[1] in ('experiments', 'deployments', 'machines', 'login', 'logout', 'version', + 'projects', 'jobs'): cli(sys.argv[1:]) args = sys.argv[:] diff --git a/tests/example_responses.py b/tests/example_responses.py index 7665f5f..980656d 100644 --- a/tests/example_responses.py +++ b/tests/example_responses.py @@ -1273,3 +1273,192 @@ "billingMonth": "2019-04", }, } + + +LIST_PROJECTS_RESPONSE = { + "data": [ + { + "name": "test_project", + "handle": "prq70zy79", + "dtCreated": "2019-03-18T13:24:46.666Z", + "dtDeleted": None, + "lastJobSeqNum": 2, + "repoNodeId": None, + "repoName": None, + "repoUrl": None, + "experiments": { + "data": [ + { + "dtCreated": "2019-04-05T15:10:55.692629+00:00", + "dtDeleted": None, + "dtFinished": None, + "dtModified": "2019-04-05T15:10:55.692629+00:00", + "dtProvisioningFinished": None, + "dtProvisioningStarted": None, + "dtStarted": None, + "dtTeardownFinished": None, + "dtTeardownStarted": None, + "experimentError": None, + "experimentTemplateHistoryId": 22159, + "experimentTemplateId": 60, + "experimentTypeId": 1, + "handle": "estgcoux8igx32", + "id": 22123, + "projectHandle": "prq70zy79", + "projectId": 612, + "started_by_user_id": 1655, + "state": 1, + "templateHistory": { + "dtCreated": "2019-04-05T15:10:54.923725+00:00", + "dtDeleted": None, + "experimentTemplateId": 60, + "id": 22159, + "params": { + "is_preemptible": False, + "name": "dsfads", + "ports": 5000, + "project_handle": "prq70zy79", + "worker_command": "sadas", + "worker_container": "asd", + "worker_machine_type": "C2", + "worker_use_dockerfile": False, + "workspaceUrl": "example.com" + }, + "triggerEvent": None, + "triggerEventId": None + } + } + ], + "meta": { + "itemGroup": { + "key": "projectHandle", + "value": "prq70zy79" + }, + "totalItems": 1 + } + } + }, + { + "name": "keton", + "handle": "prmr22ve0", + "dtCreated": "2019-03-25T14:50:43.202Z", + "dtDeleted": None, + "lastJobSeqNum": 8, + "repoNodeId": None, + "repoName": None, + "repoUrl": None, + "experiments": { + "data": [ + { + "dtCreated": "2019-04-02T15:17:03.393886+00:00", + "dtDeleted": None, + "dtFinished": "2019-04-02T17:02:54.654569+00:00", + "dtModified": "2019-04-02T15:17:03.393886+00:00", + "dtProvisioningFinished": "2019-04-02T15:17:10.978198+00:00", + "dtProvisioningStarted": "2019-04-02T15:17:10.978198+00:00", + "dtStarted": "2019-04-02T15:17:10.978198+00:00", + "dtTeardownFinished": "2019-04-02T17:02:54.654569+00:00", + "dtTeardownStarted": "2019-04-02T17:02:54.654569+00:00", + "experimentError": None, + "experimentTemplateHistoryId": 22130, + "experimentTemplateId": 174, + "experimentTypeId": 1, + "handle": "ehla1kvbwzaco", + "id": 22094, + "projectHandle": "prmr22ve0", + "projectId": 626, + "started_by_user_id": 1655, + "state": 5, + "templateHistory": { + "dtCreated": "2019-04-02T15:17:02.663449+00:00", + "dtDeleted": None, + "experimentTemplateId": 174, + "id": 22130, + "params": { + "is_preemptible": False, + "model_path": "/artifacts", + "model_type": "Tensorflow", + "name": "Test1", + "ports": 5000, + "project_handle": "prmr22ve0", + "worker_command": "python mnist.py --data_format=channels_last", + "worker_container": "tensorflow/tensorflow:1.13.1-py3", + "worker_machine_type": "K80", + "workspaceUrl": "https://github.com/Paperspace/mnist-sample" + }, + "triggerEvent": None, + "triggerEventId": None + } + } + ], + "meta": { + "itemGroup": { + "key": "projectHandle", + "value": "prmr22ve0" + }, + "totalItems": 1 + } + } + }, + { + "name": "paperspace-python", + "handle": "przhbct98", + "dtCreated": "2019-04-04T15:12:34.229Z", + "dtDeleted": None, + "lastJobSeqNum": 3, + "repoNodeId": None, + "repoName": None, + "repoUrl": None, + "experiments": { + "data": [ + { + "dtCreated": "2019-04-24T10:18:30.523193+00:00", + "dtDeleted": None, + "dtFinished": "2019-04-24T10:18:43.613748+00:00", + "dtModified": "2019-04-24T10:18:30.523193+00:00", + "dtProvisioningFinished": "2019-04-24T10:18:35.010792+00:00", + "dtProvisioningStarted": "2019-04-24T10:18:35.010792+00:00", + "dtStarted": "2019-04-24T10:18:35.010792+00:00", + "dtTeardownFinished": "2019-04-24T10:18:43.613748+00:00", + "dtTeardownStarted": "2019-04-24T10:18:43.613748+00:00", + "experimentError": None, + "experimentTemplateHistoryId": 22311, + "experimentTemplateId": 186, + "experimentTypeId": 1, + "handle": "es47og38wzhnuo", + "id": 22270, + "projectHandle": "przhbct98", + "projectId": 649, + "started_by_user_id": 1655, + "state": 7, + "templateHistory": { + "dtCreated": "2019-04-24T10:18:30.523193+00:00", + "dtDeleted": None, + "experimentTemplateId": 186, + "id": 22311, + "params": { + "command": ". test.sh\npython2 hello.py", + "container": "paperspace/tensorflow-python", + "machineType": "G1", + "project": "paperspace-python", + "workspaceFileName": "temp.zip" + }, + "triggerEvent": None, + "triggerEventId": None + } + } + ], + "meta": { + "itemGroup": { + "key": "projectHandle", + "value": "przhbct98" + }, + "totalItems": 1 + } + } + } + ], + "meta": { + "totalItems": 3 + } +} diff --git a/tests/functional/test_projects.py b/tests/functional/test_projects.py new file mode 100644 index 0000000..22be33d --- /dev/null +++ b/tests/functional/test_projects.py @@ -0,0 +1,105 @@ +import mock +from click.testing import CliRunner + +import paperspace +from paperspace.cli import cli +from paperspace.client import default_headers +from tests import example_responses, MockResponse + + +class TestListProjects(object): + URL = "https://api.paperspace.io/projects/" + EXPECTED_HEADERS = default_headers.copy() + BASIC_COMMAND = ["projects", "list"] + # TODO: change to `REQUEST_JSON = None` or whatever works when PS_API is fixed + REQUEST_JSON = {'teamId': 666} + EXPECTED_RESPONSE_JSON = example_responses.LIST_PROJECTS_RESPONSE + EXPECTED_STDOUT = """+-----------+-------------------+------------+ +| ID | Name | Repository | ++-----------+-------------------+------------+ +| prq70zy79 | test_project | None | +| prmr22ve0 | keton | None | +| przhbct98 | paperspace-python | None | ++-----------+-------------------+------------+ +""" + + BASIC_COMMAND_WITH_API_KEY = ["projects", "list", "--apiKey", "some_key"] + EXPECTED_HEADERS_WITH_CHANGED_API_KEY = paperspace.client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" + + RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} + EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Invalid API token\n" + + RESPONSE_JSON_WHEN_NO_PROJECTS_WERE_FOUND = {"data": [], "meta": {"totalItems": 0}} + EXPECTED_STDOUT_WHEN_NO_PROJECTS_WERE_FOUND = "No projects found\n" + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_post_request_and_print_table_when_projects_list_was_used(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.REQUEST_JSON, + params=None) + assert result.output == self.EXPECTED_STDOUT + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_post_request_when_projects_list_was_used_with_api_key_option(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITH_API_KEY) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=self.REQUEST_JSON, + params=None) + assert result.output == self.EXPECTED_STDOUT + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_post_request_when_projects_list_was_used_with_wrong_api_key(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITH_API_KEY) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=self.REQUEST_JSON, + params=None) + assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_error_message_when_no_project_was_not_found(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WHEN_NO_PROJECTS_WERE_FOUND, + status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.REQUEST_JSON, + params=None) + assert result.output == self.EXPECTED_STDOUT_WHEN_NO_PROJECTS_WERE_FOUND + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, get_patched): + get_patched.return_value = MockResponse(status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.REQUEST_JSON, + params=None) + assert result.output == "Error while parsing response data: No JSON\n" + assert result.exit_code == 0 From 0adf2a90e0d73229d203b5742168c365a62747fe Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Thu, 25 Apr 2019 19:28:17 +0200 Subject: [PATCH 10/42] Add 'Created' column to projects list command --- paperspace/commands/projects.py | 5 +++-- tests/functional/test_projects.py | 14 +++++++------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/paperspace/commands/projects.py b/paperspace/commands/projects.py index 5ccdd16..a74003f 100644 --- a/paperspace/commands/projects.py +++ b/paperspace/commands/projects.py @@ -46,12 +46,13 @@ def _log_projects_list(self, data): @staticmethod def _make_table(projects): - data = [("ID", "Name", "Repository")] + data = [("ID", "Name", "Repository", "Created")] for project in projects: id_ = project.get("handle") name = project.get("name") repo_url = project.get("repoUrl") - data.append((id_, name, repo_url)) + created = project.get("dtCreated") + data.append((id_, name, repo_url, created)) ascii_table = terminaltables.AsciiTable(data) table_string = ascii_table.table diff --git a/tests/functional/test_projects.py b/tests/functional/test_projects.py index 22be33d..2e34cce 100644 --- a/tests/functional/test_projects.py +++ b/tests/functional/test_projects.py @@ -14,13 +14,13 @@ class TestListProjects(object): # TODO: change to `REQUEST_JSON = None` or whatever works when PS_API is fixed REQUEST_JSON = {'teamId': 666} EXPECTED_RESPONSE_JSON = example_responses.LIST_PROJECTS_RESPONSE - EXPECTED_STDOUT = """+-----------+-------------------+------------+ -| ID | Name | Repository | -+-----------+-------------------+------------+ -| prq70zy79 | test_project | None | -| prmr22ve0 | keton | None | -| przhbct98 | paperspace-python | None | -+-----------+-------------------+------------+ + EXPECTED_STDOUT = """+-----------+-------------------+------------+--------------------------+ +| ID | Name | Repository | Created | ++-----------+-------------------+------------+--------------------------+ +| prq70zy79 | test_project | None | 2019-03-18T13:24:46.666Z | +| prmr22ve0 | keton | None | 2019-03-25T14:50:43.202Z | +| przhbct98 | paperspace-python | None | 2019-04-04T15:12:34.229Z | ++-----------+-------------------+------------+--------------------------+ """ BASIC_COMMAND_WITH_API_KEY = ["projects", "list", "--apiKey", "some_key"] From ea6d3325ff19911bc23f4e43b78da2f6cec5c51d Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Fri, 26 Apr 2019 12:49:41 +0200 Subject: [PATCH 11/42] Add jobs list command --- paperspace/cli/jobs.py | 14 +- paperspace/commands/jobs.py | 46 ++ tests/example_responses.py | 785 ++++++++++++++++++++++++++++++++++ tests/functional/test_jobs.py | 111 +++++ 4 files changed, 953 insertions(+), 3 deletions(-) create mode 100644 tests/functional/test_jobs.py diff --git a/paperspace/cli/jobs.py b/paperspace/cli/jobs.py index fc37cbc..d34d235 100644 --- a/paperspace/cli/jobs.py +++ b/paperspace/cli/jobs.py @@ -1,7 +1,7 @@ import click from paperspace import client, config -from paperspace.cli.common import api_key_option +from paperspace.cli import common from paperspace.commands import jobs as jobs_commands @@ -16,7 +16,7 @@ def jobs_group(): "job_id", required=True, ) -@api_key_option +@common.api_key_option def delete_job(job_id, api_key=None): jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) command = jobs_commands.DeleteJobCommand(api=jobs_api) @@ -29,8 +29,16 @@ def delete_job(job_id, api_key=None): "job_id", required=True, ) -@api_key_option +@common.api_key_option def stop_job(job_id, api_key=None): jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) command = jobs_commands.StopJobCommand(api=jobs_api) command.execute(job_id) + + +@jobs_group.command("list", help="List jobs with optional filtering") +@common.api_key_option +def list_jobs(api_key): + jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) + command = jobs_commands.ListJobsCommand(api=jobs_api) + command.execute() diff --git a/paperspace/commands/jobs.py b/paperspace/commands/jobs.py index 2eb1b06..5af0b72 100644 --- a/paperspace/commands/jobs.py +++ b/paperspace/commands/jobs.py @@ -1,4 +1,9 @@ +import pydoc + +import terminaltables + from paperspace.commands import CommandBase +from paperspace.utils import get_terminal_lines class JobsCommandBase(CommandBase): @@ -35,3 +40,44 @@ def execute(self, job_id): self._log_message(response, "Job stopped", "Unknown error while stopping job") + + +class ListJobsCommand(JobsCommandBase): + def execute(self): + response = self.api.get("/jobs/getJobs/", json=None) + + try: + data = response.json() + if not response.ok: + self.logger.log_error_response(data) + return + except (ValueError, KeyError) as e: + self.logger.log("Error while parsing response data: {}".format(e)) + else: + self._log_jobs_list(data) + + def _log_jobs_list(self, data): + if not data: + self.logger.log("No jobs found") + else: + table_str = self._make_table(data) + if len(table_str.splitlines()) > get_terminal_lines(): + pydoc.pager(table_str) + else: + self.logger.log(table_str) + + @staticmethod + def _make_table(jobs): + data = [("ID", "Name", "Project", "Cluster", "Machine Type", "Created")] + for job in jobs: + id_ = job.get("id") + name = job.get("name") + project = job.get("project") + cluster = job.get("cluster") + machine_type = job.get("machineType") + created = job.get("dtCreated") + data.append((id_, name, project, cluster, machine_type, created)) + + ascii_table = terminaltables.AsciiTable(data) + table_string = ascii_table.table + return table_string diff --git a/tests/example_responses.py b/tests/example_responses.py index 980656d..e60e3e6 100644 --- a/tests/example_responses.py +++ b/tests/example_responses.py @@ -1462,3 +1462,788 @@ "totalItems": 3 } } + + +LIST_JOBS_RESPONSE_JSON = [ + { + "id": "jsxeeba5qq99yn", + "name": "job 1", + "state": "Error", + "workspaceUrl": "none", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "nvidia-smi", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "K80", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "K80 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": "Error quering for experiment for job: sql: Scan error on column index 4, name \"model_path\": unsupported Scan, storing driver.Value type into type *string", + "dtCreated": "2019-03-25T14:51:16.118Z", + "dtModified": "2019-03-25T14:51:16.118Z", + "dtProvisioningStarted": None, + "dtProvisioningFinished": None, + "dtStarted": None, + "dtFinished": "2019-03-27T13:53:34.188Z", + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "exitCode": None, + "queuePosition": None, + "seqNum": 1, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553525480", + "fqdn": "jsxeeba5qq99yn.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": None, + "cpuCount": None, + "cpuModel": None, + "cpuFlags": None, + "cpuMem": None, + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esk8lny3pxyqd6" + }, + { + "id": "jfl063dsv634h", + "name": "job 2", + "state": "Stopped", + "workspaceUrl": "none", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "nvidia-smi", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-03-25T14:54:30.866Z", + "dtModified": "2019-03-25T14:54:30.866Z", + "dtProvisioningStarted": "2019-03-25T14:59:15.818Z", + "dtProvisioningFinished": "2019-03-25T14:59:20.542Z", + "dtStarted": "2019-03-25T14:59:20.542Z", + "dtFinished": "2019-03-25T14:59:25.631Z", + "dtTeardownStarted": "2019-03-25T14:59:25.669Z", + "dtTeardownFinished": "2019-03-25T14:59:25.758Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 2, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553525723", + "fqdn": "jfl063dsv634h.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553525723", + "cpuCount": 4, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "24683148 kB", + "gpuName": "Tesla P100-PCIE-16GB", + "gpuSerial": "0324317067114", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "16280 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "ejd2v80p7cw6m" + }, + { + "id": "jsvau8w47k78zm", + "name": "Clone - jfl063dsv634h", + "state": "Stopped", + "workspaceUrl": "none", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "nvidia-smi", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": "jfl063dsv634h", + "jobError": None, + "dtCreated": "2019-03-25T15:04:43.844Z", + "dtModified": "2019-03-25T15:04:43.844Z", + "dtProvisioningStarted": "2019-03-25T15:07:43.854Z", + "dtProvisioningFinished": "2019-03-25T15:07:48.435Z", + "dtStarted": "2019-03-25T15:07:48.435Z", + "dtFinished": "2019-03-25T15:07:53.523Z", + "dtTeardownStarted": "2019-03-25T15:07:53.561Z", + "dtTeardownFinished": "2019-03-25T15:07:53.649Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 3, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553526309", + "fqdn": "jsvau8w47k78zm.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553526309", + "cpuCount": 4, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "24683148 kB", + "gpuName": "Tesla P100-PCIE-16GB", + "gpuSerial": "0324317067114", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "16280 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": None + }, + { + "id": "j2eq99xhvgtum", + "name": "keton1-worker-1", + "state": "Pending", + "workspaceUrl": None, + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "echo keton", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-03-25T15:07:30.383Z", + "dtModified": "2019-03-25T15:07:30.383Z", + "dtProvisioningStarted": None, + "dtProvisioningFinished": None, + "dtStarted": None, + "dtFinished": "2019-03-25T15:07:30.383Z", + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "exitCode": None, + "queuePosition": None, + "seqNum": 4, + "storageRegion": "GCP West", + "clusterMachine": None, + "fqdn": "j2eq99xhvgtum.dgradient.paperspace.com", + "ports": "5000", + "isPublic": False, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": None, + "cpuCount": None, + "cpuModel": None, + "cpuFlags": None, + "cpuMem": None, + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esibxync23szaq" + }, + { + "id": "jzzinybinuxf9", + "name": "keton2-worker-1", + "state": "Stopped", + "workspaceUrl": "git+https://github.com/Paperspace/multinode-mnist", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "echo keton", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-03-25T15:18:51.461Z", + "dtModified": "2019-03-25T15:18:51.461Z", + "dtProvisioningStarted": "2019-03-25T15:18:58.089Z", + "dtProvisioningFinished": "2019-03-25T15:19:03.246Z", + "dtStarted": "2019-03-25T15:19:03.246Z", + "dtFinished": "2019-03-25T15:19:08.337Z", + "dtTeardownStarted": "2019-03-25T15:19:08.374Z", + "dtTeardownFinished": "2019-03-25T15:19:08.461Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 5, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553526309", + "fqdn": "jzzinybinuxf9.dgradient.paperspace.com", + "ports": "5000", + "isPublic": False, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553526309", + "cpuCount": 4, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "24683148 kB", + "gpuName": "Tesla P100-PCIE-16GB", + "gpuSerial": "0324317067114", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "16280 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "ep6hmawh97q0v" + }, + { + "id": "jsb37duc1zlbz0", + "name": "keton4-worker-1", + "state": "Stopped", + "workspaceUrl": "git+https://github.com/Paperspace/multinode-mnist", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "echo siema", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-03-25T15:29:04.601Z", + "dtModified": "2019-03-25T15:29:04.601Z", + "dtProvisioningStarted": "2019-03-25T15:30:42.529Z", + "dtProvisioningFinished": "2019-03-25T15:30:48.252Z", + "dtStarted": "2019-03-25T15:30:48.252Z", + "dtFinished": "2019-03-25T15:30:53.349Z", + "dtTeardownStarted": "2019-03-25T15:30:53.387Z", + "dtTeardownFinished": "2019-03-25T15:30:53.470Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 6, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553526309", + "fqdn": "jsb37duc1zlbz0.dgradient.paperspace.com", + "ports": "3456", + "isPublic": False, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553526309", + "cpuCount": 4, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "24683148 kB", + "gpuName": "Tesla P100-PCIE-16GB", + "gpuSerial": "0324317067114", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "16280 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esgeuvkdokyom2" + }, + { + "id": "jq41vipwy18f7", + "name": "keton4-parameter_server-1", + "state": "Stopped", + "workspaceUrl": "git+https://github.com/Paperspace/multinode-mnist", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "ls", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-03-25T15:29:06.765Z", + "dtModified": "2019-03-25T15:29:06.765Z", + "dtProvisioningStarted": "2019-03-25T15:30:41.416Z", + "dtProvisioningFinished": "2019-03-25T15:30:48.004Z", + "dtStarted": "2019-03-25T15:30:48.004Z", + "dtFinished": "2019-03-25T15:30:53.097Z", + "dtTeardownStarted": "2019-03-25T15:30:53.135Z", + "dtTeardownFinished": "2019-03-25T15:30:53.223Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 7, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553526384", + "fqdn": "jq41vipwy18f7.dgradient.paperspace.com", + "ports": "3456", + "isPublic": False, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553526384", + "cpuCount": 4, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "24683148 kB", + "gpuName": "Tesla P100-PCIE-16GB", + "gpuSerial": "0324317004340", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "16280 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esgeuvkdokyom2" + }, + { + "id": "jsigkjnyb6m3qm", + "name": "Test1-worker-1", + "state": "Stopped", + "workspaceUrl": "git+https://github.com/Paperspace/mnist-sample", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "python mnist.py --data_format=channels_last", + "projectId": "prmr22ve0", + "project": "keton", + "container": "tensorflow/tensorflow:1.13.1-py3", + "containerUrl": "tensorflow/tensorflow:1.13.1-py3", + "baseContainer": "tensorflow/tensorflow:1.13.1-py3", + "baseContainerUrl": "tensorflow/tensorflow:1.13.1-py3", + "machineType": "K80", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "K80 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-04-02T15:17:05.618Z", + "dtModified": "2019-04-02T15:17:05.618Z", + "dtProvisioningStarted": "2019-04-02T15:17:11.018Z", + "dtProvisioningFinished": "2019-04-02T15:17:56.754Z", + "dtStarted": "2019-04-02T15:17:56.754Z", + "dtFinished": "2019-04-02T17:02:26.950Z", + "dtTeardownStarted": "2019-04-02T17:02:26.987Z", + "dtTeardownFinished": "2019-04-02T17:02:54.596Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 8, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553793082-82415ed3", + "fqdn": "jsigkjnyb6m3qm.dgradient.paperspace.com", + "ports": "5000:5000", + "isPublic": False, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553793082-82415ed3", + "cpuCount": 2, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "12297216 kB", + "gpuName": "Tesla K80", + "gpuSerial": "0320617087317", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "11441 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "ehla1kvbwzaco" + }, + { + "id": "j4g76vuppxqao", + "name": "job 1", + "state": "Stopped", + "workspaceUrl": "s3://ps-projects-development/przhbct98/j4g76vuppxqao/__init__.py.zip", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "echo keton", + "projectId": "przhbct98", + "project": "paperspace-python", + "container": "paperspace/tensorflow-python", + "containerUrl": "paperspace/tensorflow-python", + "baseContainer": "paperspace/tensorflow-python", + "baseContainerUrl": "paperspace/tensorflow-python", + "machineType": "K80", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "K80 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-04-04T15:12:34.414Z", + "dtModified": "2019-04-04T15:12:34.414Z", + "dtProvisioningStarted": "2019-04-04T15:12:41.338Z", + "dtProvisioningFinished": "2019-04-04T15:12:47.492Z", + "dtStarted": "2019-04-04T15:12:47.492Z", + "dtFinished": "2019-04-04T15:12:52.582Z", + "dtTeardownStarted": "2019-04-04T15:12:52.621Z", + "dtTeardownFinished": "2019-04-04T15:12:52.752Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 1, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553793082-82415ed3", + "fqdn": "j4g76vuppxqao.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553793082-82415ed3", + "cpuCount": 2, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "12297216 kB", + "gpuName": "Tesla K80", + "gpuSerial": "0320617087317", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "11441 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esfmkbql393ut0" + }, + { + "id": "jsbnvdhwb46vr9", + "name": "job 2", + "state": "Failed", + "workspaceUrl": "s3://ps-projects-development/przhbct98/jsbnvdhwb46vr9/temp.zip", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": ". test.sh\npython2 hello.py", + "projectId": "przhbct98", + "project": "paperspace-python", + "container": "paperspace/tensorflow-python", + "containerUrl": "paperspace/tensorflow-python", + "baseContainer": "paperspace/tensorflow-python", + "baseContainerUrl": "paperspace/tensorflow-python", + "machineType": "G1", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "G1 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-04-24T09:09:53.645Z", + "dtModified": "2019-04-24T09:09:53.645Z", + "dtProvisioningStarted": "2019-04-24T09:10:50.771Z", + "dtProvisioningFinished": "2019-04-24T09:11:50.968Z", + "dtStarted": "2019-04-24T09:11:50.968Z", + "dtFinished": "2019-04-24T09:11:56.092Z", + "dtTeardownStarted": "2019-04-24T09:11:56.150Z", + "dtTeardownFinished": "2019-04-24T09:11:56.346Z", + "dtDeleted": None, + "exitCode": 2, + "queuePosition": None, + "seqNum": 2, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1556074006", + "fqdn": "jsbnvdhwb46vr9.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1556074006", + "cpuCount": 1, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "1783384 kB", + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esmnlol3tavvvf" + }, + { + "id": "jt8alwzv28kha", + "name": "job 3", + "state": "Failed", + "workspaceUrl": "s3://ps-projects-development/przhbct98/jt8alwzv28kha/temp.zip", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": ". test.sh\npython2 hello.py", + "projectId": "przhbct98", + "project": "paperspace-python", + "container": "paperspace/tensorflow-python", + "containerUrl": "paperspace/tensorflow-python", + "baseContainer": "paperspace/tensorflow-python", + "baseContainerUrl": "paperspace/tensorflow-python", + "machineType": "G1", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "G1 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-04-24T10:18:30.620Z", + "dtModified": "2019-04-24T10:18:30.620Z", + "dtProvisioningStarted": "2019-04-24T10:18:35.057Z", + "dtProvisioningFinished": "2019-04-24T10:18:38.241Z", + "dtStarted": "2019-04-24T10:18:38.241Z", + "dtFinished": "2019-04-24T10:18:43.348Z", + "dtTeardownStarted": "2019-04-24T10:18:43.394Z", + "dtTeardownFinished": "2019-04-24T10:18:43.544Z", + "dtDeleted": None, + "exitCode": 2, + "queuePosition": None, + "seqNum": 3, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1556074006", + "fqdn": "jt8alwzv28kha.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1556074006", + "cpuCount": 1, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "1783384 kB", + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "es47og38wzhnuo" + } +] diff --git a/tests/functional/test_jobs.py b/tests/functional/test_jobs.py new file mode 100644 index 0000000..e2bc40d --- /dev/null +++ b/tests/functional/test_jobs.py @@ -0,0 +1,111 @@ +import mock +from click.testing import CliRunner + +import paperspace +from paperspace.cli import cli +from paperspace.client import default_headers +from tests import example_responses, MockResponse + + +class TestListJobs(object): + URL = "https://api.paperspace.io/jobs/getJobs/" + EXPECTED_HEADERS = default_headers.copy() + BASIC_COMMAND = ["jobs", "list"] + EXPECTED_RESPONSE_JSON = example_responses.LIST_JOBS_RESPONSE_JSON + EXPECTED_STDOUT = """+----------------+---------------------------+-------------------+----------------+--------------+--------------------------+ +| ID | Name | Project | Cluster | Machine Type | Created | ++----------------+---------------------------+-------------------+----------------+--------------+--------------------------+ +| jsxeeba5qq99yn | job 1 | keton | PS Jobs on GCP | K80 | 2019-03-25T14:51:16.118Z | +| jfl063dsv634h | job 2 | keton | PS Jobs on GCP | P100 | 2019-03-25T14:54:30.866Z | +| jsvau8w47k78zm | Clone - jfl063dsv634h | keton | PS Jobs on GCP | P100 | 2019-03-25T15:04:43.844Z | +| j2eq99xhvgtum | keton1-worker-1 | keton | PS Jobs on GCP | P100 | 2019-03-25T15:07:30.383Z | +| jzzinybinuxf9 | keton2-worker-1 | keton | PS Jobs on GCP | P100 | 2019-03-25T15:18:51.461Z | +| jsb37duc1zlbz0 | keton4-worker-1 | keton | PS Jobs on GCP | P100 | 2019-03-25T15:29:04.601Z | +| jq41vipwy18f7 | keton4-parameter_server-1 | keton | PS Jobs on GCP | P100 | 2019-03-25T15:29:06.765Z | +| jsigkjnyb6m3qm | Test1-worker-1 | keton | PS Jobs on GCP | K80 | 2019-04-02T15:17:05.618Z | +| j4g76vuppxqao | job 1 | paperspace-python | PS Jobs on GCP | K80 | 2019-04-04T15:12:34.414Z | +| jsbnvdhwb46vr9 | job 2 | paperspace-python | PS Jobs on GCP | G1 | 2019-04-24T09:09:53.645Z | +| jt8alwzv28kha | job 3 | paperspace-python | PS Jobs on GCP | G1 | 2019-04-24T10:18:30.620Z | ++----------------+---------------------------+-------------------+----------------+--------------+--------------------------+ +""" + + BASIC_COMMAND_WITH_API_KEY = ["jobs", "list", "--apiKey", "some_key"] + EXPECTED_HEADERS_WITH_CHANGED_API_KEY = paperspace.client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" + + RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} + EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Invalid API token\n" + + RESPONSE_JSON_WHEN_NO_JOBS_WERE_FOUND = [] + EXPECTED_STDOUT_WHEN_NO_JOBS_WERE_FOUND = "No jobs found\n" + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_post_request_and_print_table_when_jobs_list_was_used(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params=None) + assert result.output == self.EXPECTED_STDOUT + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_post_request_when_jobs_list_was_used_with_api_key_option(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITH_API_KEY) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=None, + params=None) + assert result.output == self.EXPECTED_STDOUT + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_post_request_when_jobs_list_was_used_with_wrong_api_key(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITH_API_KEY) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=None, + params=None) + assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_error_message_when_no_job_was_not_found(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WHEN_NO_JOBS_WERE_FOUND, + status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params=None) + assert result.output == self.EXPECTED_STDOUT_WHEN_NO_JOBS_WERE_FOUND + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, get_patched): + get_patched.return_value = MockResponse(status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params=None) + assert result.output == "Error while parsing response data: No JSON\n" + assert result.exit_code == 0 From 549ff1be1036c90b5f6367149e1f0337526bb135 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Fri, 26 Apr 2019 12:50:15 +0200 Subject: [PATCH 12/42] Remove unused Number type --- paperspace/cli/types.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/paperspace/cli/types.py b/paperspace/cli/types.py index 24aa916..7519d4e 100644 --- a/paperspace/cli/types.py +++ b/paperspace/cli/types.py @@ -15,21 +15,6 @@ def convert(self, value, param, ctx): return self.type_map[value] -class Number(click.ParamType): - name = "number" - - def convert(self, value, param, ctx): - try: - number = int(value) - except ValueError: - try: - number = float(value) - except ValueError: - self.fail('{} is not a valid number'.format(value), param, ctx) - - return number - - def json_string(val): """Wraps json.loads so the cli help shows proper option's type name instead of 'LOADS'""" return json.loads(val) \ No newline at end of file From 52999c05382c90248a5dfc914b8062406305093f Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Fri, 26 Apr 2019 15:43:22 +0200 Subject: [PATCH 13/42] Add filtering to 'jobs list' command --- paperspace/cli/jobs.py | 22 +++++++++++-- paperspace/commands/jobs.py | 5 +-- tests/functional/test_jobs.py | 58 +++++++++++++++++++++++++++++++++++ 3 files changed, 81 insertions(+), 4 deletions(-) diff --git a/paperspace/cli/jobs.py b/paperspace/cli/jobs.py index d34d235..4aafccb 100644 --- a/paperspace/cli/jobs.py +++ b/paperspace/cli/jobs.py @@ -15,6 +15,7 @@ def jobs_group(): "--jobId", "job_id", required=True, + help="Delete job with given ID", ) @common.api_key_option def delete_job(job_id, api_key=None): @@ -28,6 +29,7 @@ def delete_job(job_id, api_key=None): "--jobId", "job_id", required=True, + help="Stop job with given ID", ) @common.api_key_option def stop_job(job_id, api_key=None): @@ -37,8 +39,24 @@ def stop_job(job_id, api_key=None): @jobs_group.command("list", help="List jobs with optional filtering") +@click.option( + "--project", + "project", + help="Use to filter jobs by project name", +) +@click.option( + "--projectId", + "projectId", + help="Use to filter jobs by project ID", +) +@click.option( + "--experimentId", + "experimentId", + help="Use to filter jobs by experiment ID", +) @common.api_key_option -def list_jobs(api_key): +def list_jobs(api_key, **filters): + common.del_if_value_is_none(filters) jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) command = jobs_commands.ListJobsCommand(api=jobs_api) - command.execute() + command.execute(filters) diff --git a/paperspace/commands/jobs.py b/paperspace/commands/jobs.py index 5af0b72..a43cc29 100644 --- a/paperspace/commands/jobs.py +++ b/paperspace/commands/jobs.py @@ -43,8 +43,9 @@ def execute(self, job_id): class ListJobsCommand(JobsCommandBase): - def execute(self): - response = self.api.get("/jobs/getJobs/", json=None) + def execute(self, filters=None): + json_ = filters or None + response = self.api.get("/jobs/getJobs/", json=json_) try: data = response.json() diff --git a/tests/functional/test_jobs.py b/tests/functional/test_jobs.py index e2bc40d..699ca9e 100644 --- a/tests/functional/test_jobs.py +++ b/tests/functional/test_jobs.py @@ -39,6 +39,35 @@ class TestListJobs(object): RESPONSE_JSON_WHEN_NO_JOBS_WERE_FOUND = [] EXPECTED_STDOUT_WHEN_NO_JOBS_WERE_FOUND = "No jobs found\n" + BASIC_COMMAND_WITH_FILTERING = [ + "jobs", "list", + "--project", "some_project_name", + "--experimentId", "some_experiment_id", + ] + EXPECTED_REQUEST_JSON_WITH_FILTERING = { + "project": "some_project_name", + "experimentId": "some_experiment_id", + } + + BASIC_COMMAND_WITH_MUTUALLY_EXCLUSIVE_FILTERS = [ + "jobs", "list", + "--project", "some_project_name", + "--projectId", "some_project_id", + ] + EXPECTED_REQUEST_JSON_WITH_MUTUALLY_EXCLUSIVE_FILTERS = { + "project": "some_project_name", + "projectId": "some_project_id", + } + RESPONSE_JSON_WITH_MUTUALLY_EXCLUSIVE_FILTERS = { + "error": { + "name": "Error", + "status": 422, + "message": "Incompatible parameters: project and projectId cannot both be specified", + }, + } + EXPECTED_STDOUT_WHEN_MUTUALLY_EXCLUSIVE_FILTERS = "Incompatible parameters: project and projectId " \ + "cannot both be specified\n" + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_valid_post_request_and_print_table_when_jobs_list_was_used(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) @@ -109,3 +138,32 @@ def test_should_print_error_message_when_error_status_code_received_but_no_conte params=None) assert result.output == "Error while parsing response data: No JSON\n" assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_post_request_when_jobs_list_was_used_with_filter_options(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITH_FILTERING) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPECTED_REQUEST_JSON_WITH_FILTERING, + params=None) + assert result.output == self.EXPECTED_STDOUT + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_proper_message_when_jobs_list_was_used_with_mutually_exclusive_filters(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_MUTUALLY_EXCLUSIVE_FILTERS, + status_code=422) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITH_MUTUALLY_EXCLUSIVE_FILTERS) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPECTED_REQUEST_JSON_WITH_MUTUALLY_EXCLUSIVE_FILTERS, + params=None) + assert result.output == self.EXPECTED_STDOUT_WHEN_MUTUALLY_EXCLUSIVE_FILTERS + assert result.exit_code == 0 From 8901a9169ed75ad82d21d0dd4d43c8792a6751d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Mudlaff?= Date: Mon, 29 Apr 2019 12:29:58 +0200 Subject: [PATCH 14/42] Feature PS-9868: Prepare logic for retrieving job logs --- paperspace/cli/cli.py | 2 ++ paperspace/cli/logs.py | 23 +++++++++++++++++++++++ paperspace/cli/types.py | 2 +- paperspace/commands/logs.py | 30 ++++++++++++++++++++++++++++++ 4 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 paperspace/cli/logs.py create mode 100644 paperspace/commands/logs.py diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index c1ef0c0..61c3c17 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -6,6 +6,7 @@ from paperspace import constants, client, config from paperspace.cli.common import api_key_option, del_if_value_is_none from paperspace.cli.jobs import jobs_group +from paperspace.cli.logs import logs_group from paperspace.cli.projects import projects_group from paperspace.cli.types import ChoiceType, json_string from paperspace.cli.validators import validate_mutually_exclusive, validate_email @@ -1054,3 +1055,4 @@ def version(): cli.add_command(jobs_group) cli.add_command(projects_group) +cli.add_command(logs_group) diff --git a/paperspace/cli/logs.py b/paperspace/cli/logs.py new file mode 100644 index 0000000..bf6b913 --- /dev/null +++ b/paperspace/cli/logs.py @@ -0,0 +1,23 @@ +import click + +from paperspace import client, config +from paperspace.cli import common +from paperspace.commands import logs as logs_commands + + +@click.group("logs", help="Manage gradient logs") +def logs_group(): + pass + + +@logs_group.command("list", help="List job logs") +@click.option( + "--jobId", + "job_id", + required=True +) +@common.api_key_option +def list_logs(job_id, api_key=None): + logs_api = client.API(config.CONFIG_LOG_HOST, api_key=api_key) + command = logs_commands.ListLogsCommand(api=logs_api) + command.execute(job_id) diff --git a/paperspace/cli/types.py b/paperspace/cli/types.py index 7519d4e..3ca0d8c 100644 --- a/paperspace/cli/types.py +++ b/paperspace/cli/types.py @@ -17,4 +17,4 @@ def convert(self, value, param, ctx): def json_string(val): """Wraps json.loads so the cli help shows proper option's type name instead of 'LOADS'""" - return json.loads(val) \ No newline at end of file + return json.loads(val) diff --git a/paperspace/commands/logs.py b/paperspace/commands/logs.py new file mode 100644 index 0000000..266e73c --- /dev/null +++ b/paperspace/commands/logs.py @@ -0,0 +1,30 @@ +from paperspace.commands import CommandBase + + +class LogsCommandBase(CommandBase): + def _log_message(self, response, success_msg_template, error_msg): + if response.ok: + try: + handle = response.json() + except (ValueError, KeyError): + self.logger.log(success_msg_template) + else: + msg = success_msg_template.format(**handle) + self.logger.log(msg) + else: + try: + data = response.json() + self.logger.log_error_response(data) + except ValueError: + self.logger.log(error_msg) + + +class ListLogsCommand(LogsCommandBase): + def execute(self, job_id): + url = f"/jobs/logs?jobId={job_id}" + response = self.api.get(url) + self._log_message( + response, + "Job logs retrieved", + "Unknown error while retrieving job logs" + ) From 35c6b836e36390d59fd715214693508ca4cb112c Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Fri, 26 Apr 2019 21:11:14 +0200 Subject: [PATCH 15/42] Minor changes to experiments list command --- paperspace/commands/experiments.py | 5 ++--- paperspace/config.py | 2 +- tests/functional/test_experiments.py | 8 ++++---- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/paperspace/commands/experiments.py b/paperspace/commands/experiments.py index d9941fe..7ef9dea 100644 --- a/paperspace/commands/experiments.py +++ b/paperspace/commands/experiments.py @@ -56,6 +56,7 @@ def __init__(self, api=experiments_api, logger_=logger): self.logger = logger_ def execute(self, project_handles=None): + project_handles = project_handles or [] params = self._get_query_params(project_handles) response = self.api.get("/experiments/", params=params) @@ -68,9 +69,7 @@ def execute(self, project_handles=None): @staticmethod def _get_query_params(project_handles): - # TODO: change to limit: -1 when PS-9535 is deployed to production - # to list all experiments - params = {"limit": 1000000} + params = {"limit": -1} # so the API sends back full list without pagination for i, handle in enumerate(project_handles): key = "projectHandle[{}]".format(i) params[key] = handle diff --git a/paperspace/config.py b/paperspace/config.py index e90efe2..5534359 100644 --- a/paperspace/config.py +++ b/paperspace/config.py @@ -16,7 +16,7 @@ def get_api_key(config_dir_path, config_file_name): _DEFAULT_CONFIG_HOST = "https://api.paperspace.io" _DEFAULT_CONFIG_LOG_HOST = "https://logs.paperspace.io" -_DEFAULT_CONFIG_EXPERIMENTS_HOST = "https://services.paperspace.io/experiments/v1/" # TODO: validate this +_DEFAULT_CONFIG_EXPERIMENTS_HOST = "https://services.paperspace.io/experiments/v1/" _DEFAULT_CONFIG_DIR_PATH = "~/.paperspace" _DEFAULT_CONFIG_FILE_NAME = os.path.expanduser("config.json") diff --git a/tests/functional/test_experiments.py b/tests/functional/test_experiments.py index 357b8b5..fcf8b28 100644 --- a/tests/functional/test_experiments.py +++ b/tests/functional/test_experiments.py @@ -747,7 +747,7 @@ def test_should_send_get_request_and_print_list_of_experiments(self, get_patched get_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params={"limit": 1000000}) + params={"limit": -1}) assert result.output == self.DETAILS_STDOUT assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" @@ -765,7 +765,7 @@ def test_should_send_get_request_and_paginate_list_when_output_table_len_is_gt_l get_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params={"limit": 1000000}) + params={"limit": -1}) pydoc_patched.pager.assert_called_once() assert result.exit_code == 0 @@ -781,7 +781,7 @@ def test_should_send_get_request_and_print_list_of_experiments_filtered_with_two get_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params={"limit": 1000000, + params={"limit": -1, "projectHandle[0]": u"handle1", "projectHandle[1]": u"handle2"}) @@ -799,7 +799,7 @@ def test_should_send_get_request_and_print_list_of_experiments_filtered_with_two get_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params={"limit": 1000000, + params={"limit": -1, "projectHandle[0]": u"handle1", "projectHandle[1]": u"handle2"}) From 667452e377fd1d2f7b72a85bfa5d2da26d5a8e8d Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Fri, 26 Apr 2019 22:41:53 +0200 Subject: [PATCH 16/42] Add 'models list' command with filtering by experimentId --- paperspace/cli/cli.py | 2 + paperspace/cli/models.py | 24 + paperspace/commands/models.py | 64 + paperspace/main.py | 2 +- tests/example_responses.py | 2572 ++++++++++++++++++----------- tests/functional/test_machines.py | 14 + tests/functional/test_models.py | 90 + 7 files changed, 1807 insertions(+), 961 deletions(-) create mode 100644 paperspace/cli/models.py create mode 100644 paperspace/commands/models.py create mode 100644 tests/functional/test_models.py diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index c1ef0c0..c0b33fc 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -6,6 +6,7 @@ from paperspace import constants, client, config from paperspace.cli.common import api_key_option, del_if_value_is_none from paperspace.cli.jobs import jobs_group +from paperspace.cli.models import models_group from paperspace.cli.projects import projects_group from paperspace.cli.types import ChoiceType, json_string from paperspace.cli.validators import validate_mutually_exclusive, validate_email @@ -1054,3 +1055,4 @@ def version(): cli.add_command(jobs_group) cli.add_command(projects_group) +cli.add_command(models_group) diff --git a/paperspace/cli/models.py b/paperspace/cli/models.py new file mode 100644 index 0000000..9e42791 --- /dev/null +++ b/paperspace/cli/models.py @@ -0,0 +1,24 @@ +import click + +from paperspace import client, config +from paperspace.cli import common +from paperspace.commands import models as models_commands + + +@click.group("models", help="Manage models") +def models_group(): + pass + + +@models_group.command("list", help="List models with optional filtering") +@click.option( + "--experimentId", + "experimentId", + help="Use to filter jobs by experiment ID", +) +@common.api_key_option +def list_jobs(api_key, **filters): + common.del_if_value_is_none(filters) + jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) + command = models_commands.ListModelsCommand(api=jobs_api) + command.execute(filters) diff --git a/paperspace/commands/models.py b/paperspace/commands/models.py new file mode 100644 index 0000000..67db490 --- /dev/null +++ b/paperspace/commands/models.py @@ -0,0 +1,64 @@ +import pydoc + +import terminaltables + +from paperspace.utils import get_terminal_lines + +from paperspace.commands import CommandBase + + +class ListModelsCommand(CommandBase): + def execute(self, filters): + json_ = self._get_request_json(filters) + params = {"limit": -1} # so the api returns full list without pagination + response = self.api.get("/mlModels/getModelList/", json=json_, params=params) + + try: + models = self._get_models_list(response) + except (ValueError, KeyError) as e: + self.logger.log("Error while parsing response data: {}".format(e)) + else: + self._log_models_list(models) + + @staticmethod + def _get_request_json(filters): + experiment_id = filters.get("experimentId") + if not experiment_id: + return None + + json_ = {"filter": {"where": {"and": [{"experimentId": experiment_id}]}}} + return json_ + + def _get_models_list(self, response): + if not response.ok: + raise ValueError("Unknown error") + + data = response.json()["modelList"] + self.logger.debug(data) + return data + + def _log_models_list(self, model): + if not model: + self.logger.log("No models found") + else: + table_str = self._make_models_list_table(model) + if len(table_str.splitlines()) > get_terminal_lines(): + pydoc.pager(table_str) + else: + self.logger.log(table_str) + + @staticmethod + def _make_models_list_table(models): + data = [("Name", "ID", "Model Type", "Project ID", "Experiment ID")] + for model in models: + name = model.get("name") + id_ = model.get("id") + model_type = model.get("modelType") + project_id = model.get("projectId") + experiment_id = model.get("experimentId") + data.append((name, id_, model_type, project_id, experiment_id)) + + ascii_table = terminaltables.AsciiTable(data) + table_string = ascii_table.table + return table_string + diff --git a/paperspace/main.py b/paperspace/main.py index a1c8504..3acda14 100644 --- a/paperspace/main.py +++ b/paperspace/main.py @@ -9,7 +9,7 @@ def main(): if len(sys.argv) >= 2 and sys.argv[1] in ('experiments', 'deployments', 'machines', 'login', 'logout', 'version', - 'projects', 'jobs'): + 'projects', 'jobs', 'models'): cli(sys.argv[1:]) args = sys.argv[:] diff --git a/tests/example_responses.py b/tests/example_responses.py index e60e3e6..778e8d2 100644 --- a/tests/example_responses.py +++ b/tests/example_responses.py @@ -1274,976 +1274,1628 @@ }, } - LIST_PROJECTS_RESPONSE = { - "data": [ - { - "name": "test_project", - "handle": "prq70zy79", - "dtCreated": "2019-03-18T13:24:46.666Z", - "dtDeleted": None, - "lastJobSeqNum": 2, - "repoNodeId": None, - "repoName": None, - "repoUrl": None, - "experiments": { - "data": [ - { - "dtCreated": "2019-04-05T15:10:55.692629+00:00", + "data": [ + { + "name": "test_project", + "handle": "prq70zy79", + "dtCreated": "2019-03-18T13:24:46.666Z", "dtDeleted": None, - "dtFinished": None, - "dtModified": "2019-04-05T15:10:55.692629+00:00", - "dtProvisioningFinished": None, - "dtProvisioningStarted": None, - "dtStarted": None, - "dtTeardownFinished": None, - "dtTeardownStarted": None, - "experimentError": None, - "experimentTemplateHistoryId": 22159, - "experimentTemplateId": 60, - "experimentTypeId": 1, - "handle": "estgcoux8igx32", - "id": 22123, - "projectHandle": "prq70zy79", - "projectId": 612, - "started_by_user_id": 1655, - "state": 1, - "templateHistory": { - "dtCreated": "2019-04-05T15:10:54.923725+00:00", - "dtDeleted": None, - "experimentTemplateId": 60, - "id": 22159, - "params": { - "is_preemptible": False, - "name": "dsfads", - "ports": 5000, - "project_handle": "prq70zy79", - "worker_command": "sadas", - "worker_container": "asd", - "worker_machine_type": "C2", - "worker_use_dockerfile": False, - "workspaceUrl": "example.com" - }, - "triggerEvent": None, - "triggerEventId": None + "lastJobSeqNum": 2, + "repoNodeId": None, + "repoName": None, + "repoUrl": None, + "experiments": { + "data": [ + { + "dtCreated": "2019-04-05T15:10:55.692629+00:00", + "dtDeleted": None, + "dtFinished": None, + "dtModified": "2019-04-05T15:10:55.692629+00:00", + "dtProvisioningFinished": None, + "dtProvisioningStarted": None, + "dtStarted": None, + "dtTeardownFinished": None, + "dtTeardownStarted": None, + "experimentError": None, + "experimentTemplateHistoryId": 22159, + "experimentTemplateId": 60, + "experimentTypeId": 1, + "handle": "estgcoux8igx32", + "id": 22123, + "projectHandle": "prq70zy79", + "projectId": 612, + "started_by_user_id": 1655, + "state": 1, + "templateHistory": { + "dtCreated": "2019-04-05T15:10:54.923725+00:00", + "dtDeleted": None, + "experimentTemplateId": 60, + "id": 22159, + "params": { + "is_preemptible": False, + "name": "dsfads", + "ports": 5000, + "project_handle": "prq70zy79", + "worker_command": "sadas", + "worker_container": "asd", + "worker_machine_type": "C2", + "worker_use_dockerfile": False, + "workspaceUrl": "example.com" + }, + "triggerEvent": None, + "triggerEventId": None + } + } + ], + "meta": { + "itemGroup": { + "key": "projectHandle", + "value": "prq70zy79" + }, + "totalItems": 1 + } } - } - ], - "meta": { - "itemGroup": { - "key": "projectHandle", - "value": "prq70zy79" - }, - "totalItems": 1 - } - } - }, - { - "name": "keton", - "handle": "prmr22ve0", - "dtCreated": "2019-03-25T14:50:43.202Z", - "dtDeleted": None, - "lastJobSeqNum": 8, - "repoNodeId": None, - "repoName": None, - "repoUrl": None, - "experiments": { - "data": [ - { - "dtCreated": "2019-04-02T15:17:03.393886+00:00", + }, + { + "name": "keton", + "handle": "prmr22ve0", + "dtCreated": "2019-03-25T14:50:43.202Z", "dtDeleted": None, - "dtFinished": "2019-04-02T17:02:54.654569+00:00", - "dtModified": "2019-04-02T15:17:03.393886+00:00", - "dtProvisioningFinished": "2019-04-02T15:17:10.978198+00:00", - "dtProvisioningStarted": "2019-04-02T15:17:10.978198+00:00", - "dtStarted": "2019-04-02T15:17:10.978198+00:00", - "dtTeardownFinished": "2019-04-02T17:02:54.654569+00:00", - "dtTeardownStarted": "2019-04-02T17:02:54.654569+00:00", - "experimentError": None, - "experimentTemplateHistoryId": 22130, - "experimentTemplateId": 174, - "experimentTypeId": 1, - "handle": "ehla1kvbwzaco", - "id": 22094, - "projectHandle": "prmr22ve0", - "projectId": 626, - "started_by_user_id": 1655, - "state": 5, - "templateHistory": { - "dtCreated": "2019-04-02T15:17:02.663449+00:00", - "dtDeleted": None, - "experimentTemplateId": 174, - "id": 22130, - "params": { - "is_preemptible": False, - "model_path": "/artifacts", - "model_type": "Tensorflow", - "name": "Test1", - "ports": 5000, - "project_handle": "prmr22ve0", - "worker_command": "python mnist.py --data_format=channels_last", - "worker_container": "tensorflow/tensorflow:1.13.1-py3", - "worker_machine_type": "K80", - "workspaceUrl": "https://github.com/Paperspace/mnist-sample" - }, - "triggerEvent": None, - "triggerEventId": None + "lastJobSeqNum": 8, + "repoNodeId": None, + "repoName": None, + "repoUrl": None, + "experiments": { + "data": [ + { + "dtCreated": "2019-04-02T15:17:03.393886+00:00", + "dtDeleted": None, + "dtFinished": "2019-04-02T17:02:54.654569+00:00", + "dtModified": "2019-04-02T15:17:03.393886+00:00", + "dtProvisioningFinished": "2019-04-02T15:17:10.978198+00:00", + "dtProvisioningStarted": "2019-04-02T15:17:10.978198+00:00", + "dtStarted": "2019-04-02T15:17:10.978198+00:00", + "dtTeardownFinished": "2019-04-02T17:02:54.654569+00:00", + "dtTeardownStarted": "2019-04-02T17:02:54.654569+00:00", + "experimentError": None, + "experimentTemplateHistoryId": 22130, + "experimentTemplateId": 174, + "experimentTypeId": 1, + "handle": "ehla1kvbwzaco", + "id": 22094, + "projectHandle": "prmr22ve0", + "projectId": 626, + "started_by_user_id": 1655, + "state": 5, + "templateHistory": { + "dtCreated": "2019-04-02T15:17:02.663449+00:00", + "dtDeleted": None, + "experimentTemplateId": 174, + "id": 22130, + "params": { + "is_preemptible": False, + "model_path": "/artifacts", + "model_type": "Tensorflow", + "name": "Test1", + "ports": 5000, + "project_handle": "prmr22ve0", + "worker_command": "python mnist.py --data_format=channels_last", + "worker_container": "tensorflow/tensorflow:1.13.1-py3", + "worker_machine_type": "K80", + "workspaceUrl": "https://github.com/Paperspace/mnist-sample" + }, + "triggerEvent": None, + "triggerEventId": None + } + } + ], + "meta": { + "itemGroup": { + "key": "projectHandle", + "value": "prmr22ve0" + }, + "totalItems": 1 + } } - } - ], - "meta": { - "itemGroup": { - "key": "projectHandle", - "value": "prmr22ve0" - }, - "totalItems": 1 - } - } - }, - { - "name": "paperspace-python", - "handle": "przhbct98", - "dtCreated": "2019-04-04T15:12:34.229Z", - "dtDeleted": None, - "lastJobSeqNum": 3, - "repoNodeId": None, - "repoName": None, - "repoUrl": None, - "experiments": { - "data": [ - { - "dtCreated": "2019-04-24T10:18:30.523193+00:00", + }, + { + "name": "paperspace-python", + "handle": "przhbct98", + "dtCreated": "2019-04-04T15:12:34.229Z", "dtDeleted": None, - "dtFinished": "2019-04-24T10:18:43.613748+00:00", - "dtModified": "2019-04-24T10:18:30.523193+00:00", - "dtProvisioningFinished": "2019-04-24T10:18:35.010792+00:00", - "dtProvisioningStarted": "2019-04-24T10:18:35.010792+00:00", - "dtStarted": "2019-04-24T10:18:35.010792+00:00", - "dtTeardownFinished": "2019-04-24T10:18:43.613748+00:00", - "dtTeardownStarted": "2019-04-24T10:18:43.613748+00:00", - "experimentError": None, - "experimentTemplateHistoryId": 22311, - "experimentTemplateId": 186, - "experimentTypeId": 1, - "handle": "es47og38wzhnuo", - "id": 22270, - "projectHandle": "przhbct98", - "projectId": 649, - "started_by_user_id": 1655, - "state": 7, - "templateHistory": { - "dtCreated": "2019-04-24T10:18:30.523193+00:00", - "dtDeleted": None, - "experimentTemplateId": 186, - "id": 22311, - "params": { - "command": ". test.sh\npython2 hello.py", - "container": "paperspace/tensorflow-python", - "machineType": "G1", - "project": "paperspace-python", - "workspaceFileName": "temp.zip" - }, - "triggerEvent": None, - "triggerEventId": None + "lastJobSeqNum": 3, + "repoNodeId": None, + "repoName": None, + "repoUrl": None, + "experiments": { + "data": [ + { + "dtCreated": "2019-04-24T10:18:30.523193+00:00", + "dtDeleted": None, + "dtFinished": "2019-04-24T10:18:43.613748+00:00", + "dtModified": "2019-04-24T10:18:30.523193+00:00", + "dtProvisioningFinished": "2019-04-24T10:18:35.010792+00:00", + "dtProvisioningStarted": "2019-04-24T10:18:35.010792+00:00", + "dtStarted": "2019-04-24T10:18:35.010792+00:00", + "dtTeardownFinished": "2019-04-24T10:18:43.613748+00:00", + "dtTeardownStarted": "2019-04-24T10:18:43.613748+00:00", + "experimentError": None, + "experimentTemplateHistoryId": 22311, + "experimentTemplateId": 186, + "experimentTypeId": 1, + "handle": "es47og38wzhnuo", + "id": 22270, + "projectHandle": "przhbct98", + "projectId": 649, + "started_by_user_id": 1655, + "state": 7, + "templateHistory": { + "dtCreated": "2019-04-24T10:18:30.523193+00:00", + "dtDeleted": None, + "experimentTemplateId": 186, + "id": 22311, + "params": { + "command": ". test.sh\npython2 hello.py", + "container": "paperspace/tensorflow-python", + "machineType": "G1", + "project": "paperspace-python", + "workspaceFileName": "temp.zip" + }, + "triggerEvent": None, + "triggerEventId": None + } + } + ], + "meta": { + "itemGroup": { + "key": "projectHandle", + "value": "przhbct98" + }, + "totalItems": 1 + } } - } - ], - "meta": { - "itemGroup": { - "key": "projectHandle", - "value": "przhbct98" - }, - "totalItems": 1 } - } + ], + "meta": { + "totalItems": 3 } - ], - "meta": { - "totalItems": 3 - } } - LIST_JOBS_RESPONSE_JSON = [ - { - "id": "jsxeeba5qq99yn", - "name": "job 1", - "state": "Error", - "workspaceUrl": "none", - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": "nvidia-smi", - "projectId": "prmr22ve0", - "project": "keton", - "container": "Test-Container", - "containerUrl": "paperspace/tensorflow:1.5.0-gpu", - "baseContainer": "Test-Container", - "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", - "machineType": "K80", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "K80 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": None, - "jobError": "Error quering for experiment for job: sql: Scan error on column index 4, name \"model_path\": unsupported Scan, storing driver.Value type into type *string", - "dtCreated": "2019-03-25T14:51:16.118Z", - "dtModified": "2019-03-25T14:51:16.118Z", - "dtProvisioningStarted": None, - "dtProvisioningFinished": None, - "dtStarted": None, - "dtFinished": "2019-03-27T13:53:34.188Z", - "dtTeardownStarted": None, - "dtTeardownFinished": None, - "dtDeleted": None, - "exitCode": None, - "queuePosition": None, - "seqNum": 1, - "storageRegion": "GCP West", - "clusterMachine": "gradient-host-1553525480", - "fqdn": "jsxeeba5qq99yn.dgradient.paperspace.com", - "ports": None, - "isPublic": None, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": None, - "cpuCount": None, - "cpuModel": None, - "cpuFlags": None, - "cpuMem": None, - "gpuName": None, - "gpuSerial": None, - "gpuDevice": None, - "gpuDriver": None, - "gpuCount": None, - "gpuMem": None, - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": "esk8lny3pxyqd6" - }, - { - "id": "jfl063dsv634h", - "name": "job 2", - "state": "Stopped", - "workspaceUrl": "none", - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": "nvidia-smi", - "projectId": "prmr22ve0", - "project": "keton", - "container": "Test-Container", - "containerUrl": "paperspace/tensorflow:1.5.0-gpu", - "baseContainer": "Test-Container", - "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", - "machineType": "P100", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "P100 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": None, - "jobError": None, - "dtCreated": "2019-03-25T14:54:30.866Z", - "dtModified": "2019-03-25T14:54:30.866Z", - "dtProvisioningStarted": "2019-03-25T14:59:15.818Z", - "dtProvisioningFinished": "2019-03-25T14:59:20.542Z", - "dtStarted": "2019-03-25T14:59:20.542Z", - "dtFinished": "2019-03-25T14:59:25.631Z", - "dtTeardownStarted": "2019-03-25T14:59:25.669Z", - "dtTeardownFinished": "2019-03-25T14:59:25.758Z", - "dtDeleted": None, - "exitCode": 0, - "queuePosition": None, - "seqNum": 2, - "storageRegion": "GCP West", - "clusterMachine": "gradient-host-1553525723", - "fqdn": "jfl063dsv634h.dgradient.paperspace.com", - "ports": None, - "isPublic": None, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": "gradient-host-1553525723", - "cpuCount": 4, - "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", - "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", - "cpuMem": "24683148 kB", - "gpuName": "Tesla P100-PCIE-16GB", - "gpuSerial": "0324317067114", - "gpuDevice": "/dev/nvidia0", - "gpuDriver": "410.48", - "gpuCount": 1, - "gpuMem": "16280 MiB", - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": "ejd2v80p7cw6m" - }, - { - "id": "jsvau8w47k78zm", - "name": "Clone - jfl063dsv634h", - "state": "Stopped", - "workspaceUrl": "none", - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": "nvidia-smi", - "projectId": "prmr22ve0", - "project": "keton", - "container": "Test-Container", - "containerUrl": "paperspace/tensorflow:1.5.0-gpu", - "baseContainer": "Test-Container", - "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", - "machineType": "P100", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "P100 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": "jfl063dsv634h", - "jobError": None, - "dtCreated": "2019-03-25T15:04:43.844Z", - "dtModified": "2019-03-25T15:04:43.844Z", - "dtProvisioningStarted": "2019-03-25T15:07:43.854Z", - "dtProvisioningFinished": "2019-03-25T15:07:48.435Z", - "dtStarted": "2019-03-25T15:07:48.435Z", - "dtFinished": "2019-03-25T15:07:53.523Z", - "dtTeardownStarted": "2019-03-25T15:07:53.561Z", - "dtTeardownFinished": "2019-03-25T15:07:53.649Z", - "dtDeleted": None, - "exitCode": 0, - "queuePosition": None, - "seqNum": 3, - "storageRegion": "GCP West", - "clusterMachine": "gradient-host-1553526309", - "fqdn": "jsvau8w47k78zm.dgradient.paperspace.com", - "ports": None, - "isPublic": None, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": "gradient-host-1553526309", - "cpuCount": 4, - "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", - "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", - "cpuMem": "24683148 kB", - "gpuName": "Tesla P100-PCIE-16GB", - "gpuSerial": "0324317067114", - "gpuDevice": "/dev/nvidia0", - "gpuDriver": "410.48", - "gpuCount": 1, - "gpuMem": "16280 MiB", - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": None - }, - { - "id": "j2eq99xhvgtum", - "name": "keton1-worker-1", - "state": "Pending", - "workspaceUrl": None, - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": "echo keton", - "projectId": "prmr22ve0", - "project": "keton", - "container": "Test-Container", - "containerUrl": "paperspace/tensorflow:1.5.0-gpu", - "baseContainer": "Test-Container", - "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", - "machineType": "P100", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "P100 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": None, - "jobError": None, - "dtCreated": "2019-03-25T15:07:30.383Z", - "dtModified": "2019-03-25T15:07:30.383Z", - "dtProvisioningStarted": None, - "dtProvisioningFinished": None, - "dtStarted": None, - "dtFinished": "2019-03-25T15:07:30.383Z", - "dtTeardownStarted": None, - "dtTeardownFinished": None, - "dtDeleted": None, - "exitCode": None, - "queuePosition": None, - "seqNum": 4, - "storageRegion": "GCP West", - "clusterMachine": None, - "fqdn": "j2eq99xhvgtum.dgradient.paperspace.com", - "ports": "5000", - "isPublic": False, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": None, - "cpuCount": None, - "cpuModel": None, - "cpuFlags": None, - "cpuMem": None, - "gpuName": None, - "gpuSerial": None, - "gpuDevice": None, - "gpuDriver": None, - "gpuCount": None, - "gpuMem": None, - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": "esibxync23szaq" - }, - { - "id": "jzzinybinuxf9", - "name": "keton2-worker-1", - "state": "Stopped", - "workspaceUrl": "git+https://github.com/Paperspace/multinode-mnist", - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": "echo keton", - "projectId": "prmr22ve0", - "project": "keton", - "container": "Test-Container", - "containerUrl": "paperspace/tensorflow:1.5.0-gpu", - "baseContainer": "Test-Container", - "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", - "machineType": "P100", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "P100 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": None, - "jobError": None, - "dtCreated": "2019-03-25T15:18:51.461Z", - "dtModified": "2019-03-25T15:18:51.461Z", - "dtProvisioningStarted": "2019-03-25T15:18:58.089Z", - "dtProvisioningFinished": "2019-03-25T15:19:03.246Z", - "dtStarted": "2019-03-25T15:19:03.246Z", - "dtFinished": "2019-03-25T15:19:08.337Z", - "dtTeardownStarted": "2019-03-25T15:19:08.374Z", - "dtTeardownFinished": "2019-03-25T15:19:08.461Z", - "dtDeleted": None, - "exitCode": 0, - "queuePosition": None, - "seqNum": 5, - "storageRegion": "GCP West", - "clusterMachine": "gradient-host-1553526309", - "fqdn": "jzzinybinuxf9.dgradient.paperspace.com", - "ports": "5000", - "isPublic": False, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": "gradient-host-1553526309", - "cpuCount": 4, - "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", - "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", - "cpuMem": "24683148 kB", - "gpuName": "Tesla P100-PCIE-16GB", - "gpuSerial": "0324317067114", - "gpuDevice": "/dev/nvidia0", - "gpuDriver": "410.48", - "gpuCount": 1, - "gpuMem": "16280 MiB", - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": "ep6hmawh97q0v" - }, - { - "id": "jsb37duc1zlbz0", - "name": "keton4-worker-1", - "state": "Stopped", - "workspaceUrl": "git+https://github.com/Paperspace/multinode-mnist", - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": "echo siema", - "projectId": "prmr22ve0", - "project": "keton", - "container": "Test-Container", - "containerUrl": "paperspace/tensorflow:1.5.0-gpu", - "baseContainer": "Test-Container", - "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", - "machineType": "P100", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "P100 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": None, - "jobError": None, - "dtCreated": "2019-03-25T15:29:04.601Z", - "dtModified": "2019-03-25T15:29:04.601Z", - "dtProvisioningStarted": "2019-03-25T15:30:42.529Z", - "dtProvisioningFinished": "2019-03-25T15:30:48.252Z", - "dtStarted": "2019-03-25T15:30:48.252Z", - "dtFinished": "2019-03-25T15:30:53.349Z", - "dtTeardownStarted": "2019-03-25T15:30:53.387Z", - "dtTeardownFinished": "2019-03-25T15:30:53.470Z", - "dtDeleted": None, - "exitCode": 0, - "queuePosition": None, - "seqNum": 6, - "storageRegion": "GCP West", - "clusterMachine": "gradient-host-1553526309", - "fqdn": "jsb37duc1zlbz0.dgradient.paperspace.com", - "ports": "3456", - "isPublic": False, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": "gradient-host-1553526309", - "cpuCount": 4, - "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", - "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", - "cpuMem": "24683148 kB", - "gpuName": "Tesla P100-PCIE-16GB", - "gpuSerial": "0324317067114", - "gpuDevice": "/dev/nvidia0", - "gpuDriver": "410.48", - "gpuCount": 1, - "gpuMem": "16280 MiB", - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": "esgeuvkdokyom2" - }, - { - "id": "jq41vipwy18f7", - "name": "keton4-parameter_server-1", - "state": "Stopped", - "workspaceUrl": "git+https://github.com/Paperspace/multinode-mnist", - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": "ls", - "projectId": "prmr22ve0", - "project": "keton", - "container": "Test-Container", - "containerUrl": "paperspace/tensorflow:1.5.0-gpu", - "baseContainer": "Test-Container", - "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", - "machineType": "P100", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "P100 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": None, - "jobError": None, - "dtCreated": "2019-03-25T15:29:06.765Z", - "dtModified": "2019-03-25T15:29:06.765Z", - "dtProvisioningStarted": "2019-03-25T15:30:41.416Z", - "dtProvisioningFinished": "2019-03-25T15:30:48.004Z", - "dtStarted": "2019-03-25T15:30:48.004Z", - "dtFinished": "2019-03-25T15:30:53.097Z", - "dtTeardownStarted": "2019-03-25T15:30:53.135Z", - "dtTeardownFinished": "2019-03-25T15:30:53.223Z", - "dtDeleted": None, - "exitCode": 0, - "queuePosition": None, - "seqNum": 7, - "storageRegion": "GCP West", - "clusterMachine": "gradient-host-1553526384", - "fqdn": "jq41vipwy18f7.dgradient.paperspace.com", - "ports": "3456", - "isPublic": False, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": "gradient-host-1553526384", - "cpuCount": 4, - "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", - "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", - "cpuMem": "24683148 kB", - "gpuName": "Tesla P100-PCIE-16GB", - "gpuSerial": "0324317004340", - "gpuDevice": "/dev/nvidia0", - "gpuDriver": "410.48", - "gpuCount": 1, - "gpuMem": "16280 MiB", - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": "esgeuvkdokyom2" - }, - { - "id": "jsigkjnyb6m3qm", - "name": "Test1-worker-1", - "state": "Stopped", - "workspaceUrl": "git+https://github.com/Paperspace/mnist-sample", - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": "python mnist.py --data_format=channels_last", - "projectId": "prmr22ve0", - "project": "keton", - "container": "tensorflow/tensorflow:1.13.1-py3", - "containerUrl": "tensorflow/tensorflow:1.13.1-py3", - "baseContainer": "tensorflow/tensorflow:1.13.1-py3", - "baseContainerUrl": "tensorflow/tensorflow:1.13.1-py3", - "machineType": "K80", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "K80 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": None, - "jobError": None, - "dtCreated": "2019-04-02T15:17:05.618Z", - "dtModified": "2019-04-02T15:17:05.618Z", - "dtProvisioningStarted": "2019-04-02T15:17:11.018Z", - "dtProvisioningFinished": "2019-04-02T15:17:56.754Z", - "dtStarted": "2019-04-02T15:17:56.754Z", - "dtFinished": "2019-04-02T17:02:26.950Z", - "dtTeardownStarted": "2019-04-02T17:02:26.987Z", - "dtTeardownFinished": "2019-04-02T17:02:54.596Z", - "dtDeleted": None, - "exitCode": 0, - "queuePosition": None, - "seqNum": 8, - "storageRegion": "GCP West", - "clusterMachine": "gradient-host-1553793082-82415ed3", - "fqdn": "jsigkjnyb6m3qm.dgradient.paperspace.com", - "ports": "5000:5000", - "isPublic": False, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": "gradient-host-1553793082-82415ed3", - "cpuCount": 2, - "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", - "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", - "cpuMem": "12297216 kB", - "gpuName": "Tesla K80", - "gpuSerial": "0320617087317", - "gpuDevice": "/dev/nvidia0", - "gpuDriver": "410.48", - "gpuCount": 1, - "gpuMem": "11441 MiB", - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": "ehla1kvbwzaco" - }, - { - "id": "j4g76vuppxqao", - "name": "job 1", - "state": "Stopped", - "workspaceUrl": "s3://ps-projects-development/przhbct98/j4g76vuppxqao/__init__.py.zip", - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": "echo keton", - "projectId": "przhbct98", - "project": "paperspace-python", - "container": "paperspace/tensorflow-python", - "containerUrl": "paperspace/tensorflow-python", - "baseContainer": "paperspace/tensorflow-python", - "baseContainerUrl": "paperspace/tensorflow-python", - "machineType": "K80", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "K80 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": None, - "jobError": None, - "dtCreated": "2019-04-04T15:12:34.414Z", - "dtModified": "2019-04-04T15:12:34.414Z", - "dtProvisioningStarted": "2019-04-04T15:12:41.338Z", - "dtProvisioningFinished": "2019-04-04T15:12:47.492Z", - "dtStarted": "2019-04-04T15:12:47.492Z", - "dtFinished": "2019-04-04T15:12:52.582Z", - "dtTeardownStarted": "2019-04-04T15:12:52.621Z", - "dtTeardownFinished": "2019-04-04T15:12:52.752Z", - "dtDeleted": None, - "exitCode": 0, - "queuePosition": None, - "seqNum": 1, - "storageRegion": "GCP West", - "clusterMachine": "gradient-host-1553793082-82415ed3", - "fqdn": "j4g76vuppxqao.dgradient.paperspace.com", - "ports": None, - "isPublic": None, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": "gradient-host-1553793082-82415ed3", - "cpuCount": 2, - "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", - "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", - "cpuMem": "12297216 kB", - "gpuName": "Tesla K80", - "gpuSerial": "0320617087317", - "gpuDevice": "/dev/nvidia0", - "gpuDriver": "410.48", - "gpuCount": 1, - "gpuMem": "11441 MiB", - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": "esfmkbql393ut0" - }, - { - "id": "jsbnvdhwb46vr9", - "name": "job 2", - "state": "Failed", - "workspaceUrl": "s3://ps-projects-development/przhbct98/jsbnvdhwb46vr9/temp.zip", - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": ". test.sh\npython2 hello.py", - "projectId": "przhbct98", - "project": "paperspace-python", - "container": "paperspace/tensorflow-python", - "containerUrl": "paperspace/tensorflow-python", - "baseContainer": "paperspace/tensorflow-python", - "baseContainerUrl": "paperspace/tensorflow-python", - "machineType": "G1", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "G1 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": None, - "jobError": None, - "dtCreated": "2019-04-24T09:09:53.645Z", - "dtModified": "2019-04-24T09:09:53.645Z", - "dtProvisioningStarted": "2019-04-24T09:10:50.771Z", - "dtProvisioningFinished": "2019-04-24T09:11:50.968Z", - "dtStarted": "2019-04-24T09:11:50.968Z", - "dtFinished": "2019-04-24T09:11:56.092Z", - "dtTeardownStarted": "2019-04-24T09:11:56.150Z", - "dtTeardownFinished": "2019-04-24T09:11:56.346Z", - "dtDeleted": None, - "exitCode": 2, - "queuePosition": None, - "seqNum": 2, - "storageRegion": "GCP West", - "clusterMachine": "gradient-host-1556074006", - "fqdn": "jsbnvdhwb46vr9.dgradient.paperspace.com", - "ports": None, - "isPublic": None, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": "gradient-host-1556074006", - "cpuCount": 1, - "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", - "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", - "cpuMem": "1783384 kB", - "gpuName": None, - "gpuSerial": None, - "gpuDevice": None, - "gpuDriver": None, - "gpuCount": None, - "gpuMem": None, - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": "esmnlol3tavvvf" - }, - { - "id": "jt8alwzv28kha", - "name": "job 3", - "state": "Failed", - "workspaceUrl": "s3://ps-projects-development/przhbct98/jt8alwzv28kha/temp.zip", - "workingDirectory": "/paperspace", - "artifactsDirectory": "/artifacts", - "entrypoint": ". test.sh\npython2 hello.py", - "projectId": "przhbct98", - "project": "paperspace-python", - "container": "paperspace/tensorflow-python", - "containerUrl": "paperspace/tensorflow-python", - "baseContainer": "paperspace/tensorflow-python", - "baseContainerUrl": "paperspace/tensorflow-python", - "machineType": "G1", - "cluster": "PS Jobs on GCP", - "clusterId": "clkyczmyz", - "usageRate": "G1 hourly", - "startedByUserId": "ukgvw4i8", - "parentJobId": None, - "jobError": None, - "dtCreated": "2019-04-24T10:18:30.620Z", - "dtModified": "2019-04-24T10:18:30.620Z", - "dtProvisioningStarted": "2019-04-24T10:18:35.057Z", - "dtProvisioningFinished": "2019-04-24T10:18:38.241Z", - "dtStarted": "2019-04-24T10:18:38.241Z", - "dtFinished": "2019-04-24T10:18:43.348Z", - "dtTeardownStarted": "2019-04-24T10:18:43.394Z", - "dtTeardownFinished": "2019-04-24T10:18:43.544Z", - "dtDeleted": None, - "exitCode": 2, - "queuePosition": None, - "seqNum": 3, - "storageRegion": "GCP West", - "clusterMachine": "gradient-host-1556074006", - "fqdn": "jt8alwzv28kha.dgradient.paperspace.com", - "ports": None, - "isPublic": None, - "containerUser": None, - "hasCode": None, - "codeUploaded": None, - "codeCommit": None, - "runTillCancelled": None, - "pushOnCompletion": None, - "newImageName": None, - "cpuHostname": "gradient-host-1556074006", - "cpuCount": 1, - "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", - "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", - "cpuMem": "1783384 kB", - "gpuName": None, - "gpuSerial": None, - "gpuDevice": None, - "gpuDriver": None, - "gpuCount": None, - "gpuMem": None, - "tpuType": None, - "tpuName": None, - "tpuGrpcUrl": None, - "tpuTFVersion": None, - "tpuDatasetDir": None, - "tpuModelDir": None, - "targetNodeAttrs": None, - "jobEnv": None, - "sharedMemMBytes": None, - "shutdownTimeout": None, - "isPreemptible": False, - "metricsURL": "metrics-gcp-dev.paperspace.io", - "customMetrics": None, - "experimentId": "es47og38wzhnuo" - } + { + "id": "jsxeeba5qq99yn", + "name": "job 1", + "state": "Error", + "workspaceUrl": "none", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "nvidia-smi", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "K80", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "K80 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": "Error quering for experiment for job: sql: Scan error on column index 4, name \"model_path\": unsupported Scan, storing driver.Value type into type *string", + "dtCreated": "2019-03-25T14:51:16.118Z", + "dtModified": "2019-03-25T14:51:16.118Z", + "dtProvisioningStarted": None, + "dtProvisioningFinished": None, + "dtStarted": None, + "dtFinished": "2019-03-27T13:53:34.188Z", + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "exitCode": None, + "queuePosition": None, + "seqNum": 1, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553525480", + "fqdn": "jsxeeba5qq99yn.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": None, + "cpuCount": None, + "cpuModel": None, + "cpuFlags": None, + "cpuMem": None, + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esk8lny3pxyqd6" + }, + { + "id": "jfl063dsv634h", + "name": "job 2", + "state": "Stopped", + "workspaceUrl": "none", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "nvidia-smi", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-03-25T14:54:30.866Z", + "dtModified": "2019-03-25T14:54:30.866Z", + "dtProvisioningStarted": "2019-03-25T14:59:15.818Z", + "dtProvisioningFinished": "2019-03-25T14:59:20.542Z", + "dtStarted": "2019-03-25T14:59:20.542Z", + "dtFinished": "2019-03-25T14:59:25.631Z", + "dtTeardownStarted": "2019-03-25T14:59:25.669Z", + "dtTeardownFinished": "2019-03-25T14:59:25.758Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 2, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553525723", + "fqdn": "jfl063dsv634h.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553525723", + "cpuCount": 4, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "24683148 kB", + "gpuName": "Tesla P100-PCIE-16GB", + "gpuSerial": "0324317067114", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "16280 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "ejd2v80p7cw6m" + }, + { + "id": "jsvau8w47k78zm", + "name": "Clone - jfl063dsv634h", + "state": "Stopped", + "workspaceUrl": "none", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "nvidia-smi", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": "jfl063dsv634h", + "jobError": None, + "dtCreated": "2019-03-25T15:04:43.844Z", + "dtModified": "2019-03-25T15:04:43.844Z", + "dtProvisioningStarted": "2019-03-25T15:07:43.854Z", + "dtProvisioningFinished": "2019-03-25T15:07:48.435Z", + "dtStarted": "2019-03-25T15:07:48.435Z", + "dtFinished": "2019-03-25T15:07:53.523Z", + "dtTeardownStarted": "2019-03-25T15:07:53.561Z", + "dtTeardownFinished": "2019-03-25T15:07:53.649Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 3, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553526309", + "fqdn": "jsvau8w47k78zm.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553526309", + "cpuCount": 4, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "24683148 kB", + "gpuName": "Tesla P100-PCIE-16GB", + "gpuSerial": "0324317067114", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "16280 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": None + }, + { + "id": "j2eq99xhvgtum", + "name": "keton1-worker-1", + "state": "Pending", + "workspaceUrl": None, + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "echo keton", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-03-25T15:07:30.383Z", + "dtModified": "2019-03-25T15:07:30.383Z", + "dtProvisioningStarted": None, + "dtProvisioningFinished": None, + "dtStarted": None, + "dtFinished": "2019-03-25T15:07:30.383Z", + "dtTeardownStarted": None, + "dtTeardownFinished": None, + "dtDeleted": None, + "exitCode": None, + "queuePosition": None, + "seqNum": 4, + "storageRegion": "GCP West", + "clusterMachine": None, + "fqdn": "j2eq99xhvgtum.dgradient.paperspace.com", + "ports": "5000", + "isPublic": False, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": None, + "cpuCount": None, + "cpuModel": None, + "cpuFlags": None, + "cpuMem": None, + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esibxync23szaq" + }, + { + "id": "jzzinybinuxf9", + "name": "keton2-worker-1", + "state": "Stopped", + "workspaceUrl": "git+https://github.com/Paperspace/multinode-mnist", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "echo keton", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-03-25T15:18:51.461Z", + "dtModified": "2019-03-25T15:18:51.461Z", + "dtProvisioningStarted": "2019-03-25T15:18:58.089Z", + "dtProvisioningFinished": "2019-03-25T15:19:03.246Z", + "dtStarted": "2019-03-25T15:19:03.246Z", + "dtFinished": "2019-03-25T15:19:08.337Z", + "dtTeardownStarted": "2019-03-25T15:19:08.374Z", + "dtTeardownFinished": "2019-03-25T15:19:08.461Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 5, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553526309", + "fqdn": "jzzinybinuxf9.dgradient.paperspace.com", + "ports": "5000", + "isPublic": False, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553526309", + "cpuCount": 4, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "24683148 kB", + "gpuName": "Tesla P100-PCIE-16GB", + "gpuSerial": "0324317067114", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "16280 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "ep6hmawh97q0v" + }, + { + "id": "jsb37duc1zlbz0", + "name": "keton4-worker-1", + "state": "Stopped", + "workspaceUrl": "git+https://github.com/Paperspace/multinode-mnist", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "echo siema", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-03-25T15:29:04.601Z", + "dtModified": "2019-03-25T15:29:04.601Z", + "dtProvisioningStarted": "2019-03-25T15:30:42.529Z", + "dtProvisioningFinished": "2019-03-25T15:30:48.252Z", + "dtStarted": "2019-03-25T15:30:48.252Z", + "dtFinished": "2019-03-25T15:30:53.349Z", + "dtTeardownStarted": "2019-03-25T15:30:53.387Z", + "dtTeardownFinished": "2019-03-25T15:30:53.470Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 6, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553526309", + "fqdn": "jsb37duc1zlbz0.dgradient.paperspace.com", + "ports": "3456", + "isPublic": False, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553526309", + "cpuCount": 4, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "24683148 kB", + "gpuName": "Tesla P100-PCIE-16GB", + "gpuSerial": "0324317067114", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "16280 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esgeuvkdokyom2" + }, + { + "id": "jq41vipwy18f7", + "name": "keton4-parameter_server-1", + "state": "Stopped", + "workspaceUrl": "git+https://github.com/Paperspace/multinode-mnist", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "ls", + "projectId": "prmr22ve0", + "project": "keton", + "container": "Test-Container", + "containerUrl": "paperspace/tensorflow:1.5.0-gpu", + "baseContainer": "Test-Container", + "baseContainerUrl": "paperspace/tensorflow:1.5.0-gpu", + "machineType": "P100", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "P100 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-03-25T15:29:06.765Z", + "dtModified": "2019-03-25T15:29:06.765Z", + "dtProvisioningStarted": "2019-03-25T15:30:41.416Z", + "dtProvisioningFinished": "2019-03-25T15:30:48.004Z", + "dtStarted": "2019-03-25T15:30:48.004Z", + "dtFinished": "2019-03-25T15:30:53.097Z", + "dtTeardownStarted": "2019-03-25T15:30:53.135Z", + "dtTeardownFinished": "2019-03-25T15:30:53.223Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 7, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553526384", + "fqdn": "jq41vipwy18f7.dgradient.paperspace.com", + "ports": "3456", + "isPublic": False, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553526384", + "cpuCount": 4, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "24683148 kB", + "gpuName": "Tesla P100-PCIE-16GB", + "gpuSerial": "0324317004340", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "16280 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esgeuvkdokyom2" + }, + { + "id": "jsigkjnyb6m3qm", + "name": "Test1-worker-1", + "state": "Stopped", + "workspaceUrl": "git+https://github.com/Paperspace/mnist-sample", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "python mnist.py --data_format=channels_last", + "projectId": "prmr22ve0", + "project": "keton", + "container": "tensorflow/tensorflow:1.13.1-py3", + "containerUrl": "tensorflow/tensorflow:1.13.1-py3", + "baseContainer": "tensorflow/tensorflow:1.13.1-py3", + "baseContainerUrl": "tensorflow/tensorflow:1.13.1-py3", + "machineType": "K80", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "K80 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-04-02T15:17:05.618Z", + "dtModified": "2019-04-02T15:17:05.618Z", + "dtProvisioningStarted": "2019-04-02T15:17:11.018Z", + "dtProvisioningFinished": "2019-04-02T15:17:56.754Z", + "dtStarted": "2019-04-02T15:17:56.754Z", + "dtFinished": "2019-04-02T17:02:26.950Z", + "dtTeardownStarted": "2019-04-02T17:02:26.987Z", + "dtTeardownFinished": "2019-04-02T17:02:54.596Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 8, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553793082-82415ed3", + "fqdn": "jsigkjnyb6m3qm.dgradient.paperspace.com", + "ports": "5000:5000", + "isPublic": False, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553793082-82415ed3", + "cpuCount": 2, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "12297216 kB", + "gpuName": "Tesla K80", + "gpuSerial": "0320617087317", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "11441 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "ehla1kvbwzaco" + }, + { + "id": "j4g76vuppxqao", + "name": "job 1", + "state": "Stopped", + "workspaceUrl": "s3://ps-projects-development/przhbct98/j4g76vuppxqao/__init__.py.zip", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": "echo keton", + "projectId": "przhbct98", + "project": "paperspace-python", + "container": "paperspace/tensorflow-python", + "containerUrl": "paperspace/tensorflow-python", + "baseContainer": "paperspace/tensorflow-python", + "baseContainerUrl": "paperspace/tensorflow-python", + "machineType": "K80", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "K80 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-04-04T15:12:34.414Z", + "dtModified": "2019-04-04T15:12:34.414Z", + "dtProvisioningStarted": "2019-04-04T15:12:41.338Z", + "dtProvisioningFinished": "2019-04-04T15:12:47.492Z", + "dtStarted": "2019-04-04T15:12:47.492Z", + "dtFinished": "2019-04-04T15:12:52.582Z", + "dtTeardownStarted": "2019-04-04T15:12:52.621Z", + "dtTeardownFinished": "2019-04-04T15:12:52.752Z", + "dtDeleted": None, + "exitCode": 0, + "queuePosition": None, + "seqNum": 1, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1553793082-82415ed3", + "fqdn": "j4g76vuppxqao.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1553793082-82415ed3", + "cpuCount": 2, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "12297216 kB", + "gpuName": "Tesla K80", + "gpuSerial": "0320617087317", + "gpuDevice": "/dev/nvidia0", + "gpuDriver": "410.48", + "gpuCount": 1, + "gpuMem": "11441 MiB", + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esfmkbql393ut0" + }, + { + "id": "jsbnvdhwb46vr9", + "name": "job 2", + "state": "Failed", + "workspaceUrl": "s3://ps-projects-development/przhbct98/jsbnvdhwb46vr9/temp.zip", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": ". test.sh\npython2 hello.py", + "projectId": "przhbct98", + "project": "paperspace-python", + "container": "paperspace/tensorflow-python", + "containerUrl": "paperspace/tensorflow-python", + "baseContainer": "paperspace/tensorflow-python", + "baseContainerUrl": "paperspace/tensorflow-python", + "machineType": "G1", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "G1 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-04-24T09:09:53.645Z", + "dtModified": "2019-04-24T09:09:53.645Z", + "dtProvisioningStarted": "2019-04-24T09:10:50.771Z", + "dtProvisioningFinished": "2019-04-24T09:11:50.968Z", + "dtStarted": "2019-04-24T09:11:50.968Z", + "dtFinished": "2019-04-24T09:11:56.092Z", + "dtTeardownStarted": "2019-04-24T09:11:56.150Z", + "dtTeardownFinished": "2019-04-24T09:11:56.346Z", + "dtDeleted": None, + "exitCode": 2, + "queuePosition": None, + "seqNum": 2, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1556074006", + "fqdn": "jsbnvdhwb46vr9.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1556074006", + "cpuCount": 1, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "1783384 kB", + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "esmnlol3tavvvf" + }, + { + "id": "jt8alwzv28kha", + "name": "job 3", + "state": "Failed", + "workspaceUrl": "s3://ps-projects-development/przhbct98/jt8alwzv28kha/temp.zip", + "workingDirectory": "/paperspace", + "artifactsDirectory": "/artifacts", + "entrypoint": ". test.sh\npython2 hello.py", + "projectId": "przhbct98", + "project": "paperspace-python", + "container": "paperspace/tensorflow-python", + "containerUrl": "paperspace/tensorflow-python", + "baseContainer": "paperspace/tensorflow-python", + "baseContainerUrl": "paperspace/tensorflow-python", + "machineType": "G1", + "cluster": "PS Jobs on GCP", + "clusterId": "clkyczmyz", + "usageRate": "G1 hourly", + "startedByUserId": "ukgvw4i8", + "parentJobId": None, + "jobError": None, + "dtCreated": "2019-04-24T10:18:30.620Z", + "dtModified": "2019-04-24T10:18:30.620Z", + "dtProvisioningStarted": "2019-04-24T10:18:35.057Z", + "dtProvisioningFinished": "2019-04-24T10:18:38.241Z", + "dtStarted": "2019-04-24T10:18:38.241Z", + "dtFinished": "2019-04-24T10:18:43.348Z", + "dtTeardownStarted": "2019-04-24T10:18:43.394Z", + "dtTeardownFinished": "2019-04-24T10:18:43.544Z", + "dtDeleted": None, + "exitCode": 2, + "queuePosition": None, + "seqNum": 3, + "storageRegion": "GCP West", + "clusterMachine": "gradient-host-1556074006", + "fqdn": "jt8alwzv28kha.dgradient.paperspace.com", + "ports": None, + "isPublic": None, + "containerUser": None, + "hasCode": None, + "codeUploaded": None, + "codeCommit": None, + "runTillCancelled": None, + "pushOnCompletion": None, + "newImageName": None, + "cpuHostname": "gradient-host-1556074006", + "cpuCount": 1, + "cpuModel": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "cpuFlags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht syscall nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xtopology nonstop_tsc cpuid pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm invpcid_single pti ssbd ibrs ibpb stibp fsgsbase tsc_adjust bmi1 avx2 smep bmi2 erms invpcid xsaveopt arat arch_capabilities", + "cpuMem": "1783384 kB", + "gpuName": None, + "gpuSerial": None, + "gpuDevice": None, + "gpuDriver": None, + "gpuCount": None, + "gpuMem": None, + "tpuType": None, + "tpuName": None, + "tpuGrpcUrl": None, + "tpuTFVersion": None, + "tpuDatasetDir": None, + "tpuModelDir": None, + "targetNodeAttrs": None, + "jobEnv": None, + "sharedMemMBytes": None, + "shutdownTimeout": None, + "isPreemptible": False, + "metricsURL": "metrics-gcp-dev.paperspace.io", + "customMetrics": None, + "experimentId": "es47og38wzhnuo" + } ] + +LIST_MODELS_RESPONSE_JSON = { + "modelList": [ + { + "id": "mosu30xm7q8vb0p", + "projectId": "prmr22ve0", + "experimentId": "ehla1kvbwzaco", + "modelType": "Tensorflow", + "name": None, + "tag": None, + "summary": { + "loss": { + "result": { + "max": 0.028335485607385635, + "min": 0.028335485607385635, + "var": 0, + "mean": 0.028335485607385635, + "median": 0.028335485607385635, + "stddev": 0 + }, + "scalar": "loss" + }, + "accuracy": { + "result": { + "max": 0.991100013256073, + "min": 0.991100013256073, + "var": 0, + "mean": 0.991100013256073, + "median": 0.991100013256073, + "stddev": 0 + }, + "scalar": "accuracy" + } + }, + "detail": [ + { + "scalars": [ + { + "data": [ + { + "operation": "mean", + "checkpoints": [ + { + "value": 0.0621405728161335, + "checkpoint": 1175 + }, + { + "value": 0.039062466472387314, + "checkpoint": 2323 + }, + { + "value": 0.03250512480735779, + "checkpoint": 3471 + }, + { + "value": 0.025037841871380806, + "checkpoint": 4606 + }, + { + "value": 0.02336057461798191, + "checkpoint": 5785 + }, + { + "value": 0.024296583607792854, + "checkpoint": 6956 + }, + { + "value": 0.021974526345729828, + "checkpoint": 8116 + }, + { + "value": 0.021116264164447784, + "checkpoint": 9262 + }, + { + "value": 0.020851025357842445, + "checkpoint": 10397 + }, + { + "value": 0.022611157968640327, + "checkpoint": 11566 + }, + { + "value": 0.028335485607385635, + "checkpoint": 12000 + } + ] + }, + { + "operation": "amin", + "checkpoints": [ + { + "value": 0.0621405728161335, + "checkpoint": 1175 + }, + { + "value": 0.039062466472387314, + "checkpoint": 2323 + }, + { + "value": 0.03250512480735779, + "checkpoint": 3471 + }, + { + "value": 0.025037841871380806, + "checkpoint": 4606 + }, + { + "value": 0.02336057461798191, + "checkpoint": 5785 + }, + { + "value": 0.024296583607792854, + "checkpoint": 6956 + }, + { + "value": 0.021974526345729828, + "checkpoint": 8116 + }, + { + "value": 0.021116264164447784, + "checkpoint": 9262 + }, + { + "value": 0.020851025357842445, + "checkpoint": 10397 + }, + { + "value": 0.022611157968640327, + "checkpoint": 11566 + }, + { + "value": 0.028335485607385635, + "checkpoint": 12000 + } + ] + }, + { + "operation": "amax", + "checkpoints": [ + { + "value": 0.0621405728161335, + "checkpoint": 1175 + }, + { + "value": 0.039062466472387314, + "checkpoint": 2323 + }, + { + "value": 0.03250512480735779, + "checkpoint": 3471 + }, + { + "value": 0.025037841871380806, + "checkpoint": 4606 + }, + { + "value": 0.02336057461798191, + "checkpoint": 5785 + }, + { + "value": 0.024296583607792854, + "checkpoint": 6956 + }, + { + "value": 0.021974526345729828, + "checkpoint": 8116 + }, + { + "value": 0.021116264164447784, + "checkpoint": 9262 + }, + { + "value": 0.020851025357842445, + "checkpoint": 10397 + }, + { + "value": 0.022611157968640327, + "checkpoint": 11566 + }, + { + "value": 0.028335485607385635, + "checkpoint": 12000 + } + ] + }, + { + "operation": "median", + "checkpoints": [ + { + "value": 0.0621405728161335, + "checkpoint": 1175 + }, + { + "value": 0.039062466472387314, + "checkpoint": 2323 + }, + { + "value": 0.03250512480735779, + "checkpoint": 3471 + }, + { + "value": 0.025037841871380806, + "checkpoint": 4606 + }, + { + "value": 0.02336057461798191, + "checkpoint": 5785 + }, + { + "value": 0.024296583607792854, + "checkpoint": 6956 + }, + { + "value": 0.021974526345729828, + "checkpoint": 8116 + }, + { + "value": 0.021116264164447784, + "checkpoint": 9262 + }, + { + "value": 0.020851025357842445, + "checkpoint": 10397 + }, + { + "value": 0.022611157968640327, + "checkpoint": 11566 + }, + { + "value": 0.028335485607385635, + "checkpoint": 12000 + } + ] + }, + { + "operation": "std", + "checkpoints": [ + { + "value": 0, + "checkpoint": 1175 + }, + { + "value": 0, + "checkpoint": 2323 + }, + { + "value": 0, + "checkpoint": 3471 + }, + { + "value": 0, + "checkpoint": 4606 + }, + { + "value": 0, + "checkpoint": 5785 + }, + { + "value": 0, + "checkpoint": 6956 + }, + { + "value": 0, + "checkpoint": 8116 + }, + { + "value": 0, + "checkpoint": 9262 + }, + { + "value": 0, + "checkpoint": 10397 + }, + { + "value": 0, + "checkpoint": 11566 + }, + { + "value": 0, + "checkpoint": 12000 + } + ] + }, + { + "operation": "var", + "checkpoints": [ + { + "value": 0, + "checkpoint": 1175 + }, + { + "value": 0, + "checkpoint": 2323 + }, + { + "value": 0, + "checkpoint": 3471 + }, + { + "value": 0, + "checkpoint": 4606 + }, + { + "value": 0, + "checkpoint": 5785 + }, + { + "value": 0, + "checkpoint": 6956 + }, + { + "value": 0, + "checkpoint": 8116 + }, + { + "value": 0, + "checkpoint": 9262 + }, + { + "value": 0, + "checkpoint": 10397 + }, + { + "value": 0, + "checkpoint": 11566 + }, + { + "value": 0, + "checkpoint": 12000 + } + ] + } + ], + "scalar": "loss" + }, + { + "data": [ + { + "operation": "mean", + "checkpoints": [ + { + "value": 0.9804999828338623, + "checkpoint": 1175 + }, + { + "value": 0.9872000217437744, + "checkpoint": 2323 + }, + { + "value": 0.9904000163078308, + "checkpoint": 3471 + }, + { + "value": 0.9912999868392944, + "checkpoint": 4606 + }, + { + "value": 0.9919999837875366, + "checkpoint": 5785 + }, + { + "value": 0.991599977016449, + "checkpoint": 6956 + }, + { + "value": 0.992900013923645, + "checkpoint": 8116 + }, + { + "value": 0.9922000169754028, + "checkpoint": 9262 + }, + { + "value": 0.9930999875068665, + "checkpoint": 10397 + }, + { + "value": 0.9926000237464905, + "checkpoint": 11566 + }, + { + "value": 0.991100013256073, + "checkpoint": 12000 + } + ] + }, + { + "operation": "amin", + "checkpoints": [ + { + "value": 0.9804999828338623, + "checkpoint": 1175 + }, + { + "value": 0.9872000217437744, + "checkpoint": 2323 + }, + { + "value": 0.9904000163078308, + "checkpoint": 3471 + }, + { + "value": 0.9912999868392944, + "checkpoint": 4606 + }, + { + "value": 0.9919999837875366, + "checkpoint": 5785 + }, + { + "value": 0.991599977016449, + "checkpoint": 6956 + }, + { + "value": 0.992900013923645, + "checkpoint": 8116 + }, + { + "value": 0.9922000169754028, + "checkpoint": 9262 + }, + { + "value": 0.9930999875068665, + "checkpoint": 10397 + }, + { + "value": 0.9926000237464905, + "checkpoint": 11566 + }, + { + "value": 0.991100013256073, + "checkpoint": 12000 + } + ] + }, + { + "operation": "amax", + "checkpoints": [ + { + "value": 0.9804999828338623, + "checkpoint": 1175 + }, + { + "value": 0.9872000217437744, + "checkpoint": 2323 + }, + { + "value": 0.9904000163078308, + "checkpoint": 3471 + }, + { + "value": 0.9912999868392944, + "checkpoint": 4606 + }, + { + "value": 0.9919999837875366, + "checkpoint": 5785 + }, + { + "value": 0.991599977016449, + "checkpoint": 6956 + }, + { + "value": 0.992900013923645, + "checkpoint": 8116 + }, + { + "value": 0.9922000169754028, + "checkpoint": 9262 + }, + { + "value": 0.9930999875068665, + "checkpoint": 10397 + }, + { + "value": 0.9926000237464905, + "checkpoint": 11566 + }, + { + "value": 0.991100013256073, + "checkpoint": 12000 + } + ] + }, + { + "operation": "median", + "checkpoints": [ + { + "value": 0.9804999828338623, + "checkpoint": 1175 + }, + { + "value": 0.9872000217437744, + "checkpoint": 2323 + }, + { + "value": 0.9904000163078308, + "checkpoint": 3471 + }, + { + "value": 0.9912999868392944, + "checkpoint": 4606 + }, + { + "value": 0.9919999837875366, + "checkpoint": 5785 + }, + { + "value": 0.991599977016449, + "checkpoint": 6956 + }, + { + "value": 0.992900013923645, + "checkpoint": 8116 + }, + { + "value": 0.9922000169754028, + "checkpoint": 9262 + }, + { + "value": 0.9930999875068665, + "checkpoint": 10397 + }, + { + "value": 0.9926000237464905, + "checkpoint": 11566 + }, + { + "value": 0.991100013256073, + "checkpoint": 12000 + } + ] + }, + { + "operation": "std", + "checkpoints": [ + { + "value": 0, + "checkpoint": 1175 + }, + { + "value": 0, + "checkpoint": 2323 + }, + { + "value": 0, + "checkpoint": 3471 + }, + { + "value": 0, + "checkpoint": 4606 + }, + { + "value": 0, + "checkpoint": 5785 + }, + { + "value": 0, + "checkpoint": 6956 + }, + { + "value": 0, + "checkpoint": 8116 + }, + { + "value": 0, + "checkpoint": 9262 + }, + { + "value": 0, + "checkpoint": 10397 + }, + { + "value": 0, + "checkpoint": 11566 + }, + { + "value": 0, + "checkpoint": 12000 + } + ] + }, + { + "operation": "var", + "checkpoints": [ + { + "value": 0, + "checkpoint": 1175 + }, + { + "value": 0, + "checkpoint": 2323 + }, + { + "value": 0, + "checkpoint": 3471 + }, + { + "value": 0, + "checkpoint": 4606 + }, + { + "value": 0, + "checkpoint": 5785 + }, + { + "value": 0, + "checkpoint": 6956 + }, + { + "value": 0, + "checkpoint": 8116 + }, + { + "value": 0, + "checkpoint": 9262 + }, + { + "value": 0, + "checkpoint": 10397 + }, + { + "value": 0, + "checkpoint": 11566 + }, + { + "value": 0, + "checkpoint": 12000 + } + ] + } + ], + "scalar": "accuracy" + } + ], + "session": "eval" + } + ], + "params": None, + "url": "s3://ps-projects-development/prmr22ve0/ehla1kvbwzaco/model/", + "notes": None, + "isDeleted": False, + "isPublic": False, + "dtCreated": "2019-04-02T17:02:47.157Z", + "dtModified": "2019-04-02T17:02:54.273Z", + "dtUploaded": "2019-04-02T17:02:54.273Z", + "dtDeleted": None, + "modelPath": "/artifacts" + } + ], + "total": 1, + "displayTotal": 1 +} diff --git a/tests/functional/test_machines.py b/tests/functional/test_machines.py index 10faf9b..bf7d1cd 100644 --- a/tests/functional/test_machines.py +++ b/tests/functional/test_machines.py @@ -87,6 +87,20 @@ def test_should_print_valid_error_message_when_no_content_was_received_in_respon assert result.output == "Unknown error while checking machine availability\n" assert result.exit_code == 0 + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_valid_error_message_when_no_content_was_received_in_response(self, get_patched): + get_patched.return_value = MockResponse(status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params=self.PARAMS) + assert result.output == "Unknown error while checking machine availability\n" + assert result.exit_code == 0 + class TestCreateMachine(object): URL = "https://api.paperspace.io/machines/createSingleMachinePublic/" diff --git a/tests/functional/test_models.py b/tests/functional/test_models.py new file mode 100644 index 0000000..465c8e6 --- /dev/null +++ b/tests/functional/test_models.py @@ -0,0 +1,90 @@ +import mock +from click.testing import CliRunner + +import paperspace.client +from paperspace.cli import cli +from tests import example_responses, MockResponse + + +class TestModelsList(object): + URL = "https://api.paperspace.io/mlModels/getModelList/" + COMMAND = ["models", "list"] + EXPECTED_HEADERS = paperspace.client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY = paperspace.client.default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" + + COMMAND_WITH_FILTERING_BY_EXPERIMENT_ID = [ + "models", "list", + "--experimentId", "some_experiment_id", + ] + EXPECTED_REQUEST_JSON_WITH_FILTERING = {"filter": {"where": {"and": [{"experimentId": "some_experiment_id"}]}}} + + COMMAND_WITH_API_KEY_PARAMETER_USED = ["models", "list", "--apiKey", "some_key"] + + EXPECTED_RESPONSE_JSON_WHEN_NO_MODELS_WERE_FOUND = {"modelList": [], "total": 1, "displayTotal": 0} + + EXPECTED_STDOUT = """+------+-----------------+------------+------------+---------------+ +| Name | ID | Model Type | Project ID | Experiment ID | ++------+-----------------+------------+------------+---------------+ +| None | mosu30xm7q8vb0p | Tensorflow | prmr22ve0 | ehla1kvbwzaco | ++------+-----------------+------------+------------+---------------+ +""" + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_get_request_and_print_list_of_experiments(self, get_patched): + get_patched.return_value = MockResponse(example_responses.LIST_MODELS_RESPONSE_JSON, 200, "fake content") + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND) + + get_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params={"limit": -1}) + + assert result.output == self.EXPECTED_STDOUT + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_replate_api_key_in_headers_when_api_key_parameter_was_used(self, get_patched): + get_patched.return_value = MockResponse(example_responses.LIST_MODELS_RESPONSE_JSON, 200, "fake content") + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY_PARAMETER_USED) + + get_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=None, + params={"limit": -1}) + + assert result.output == self.EXPECTED_STDOUT + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_get_request_and_print_list_of_models_filtered_experiment_id(self, get_patched): + get_patched.return_value = MockResponse(example_responses.LIST_MODELS_RESPONSE_JSON, 200) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND_WITH_FILTERING_BY_EXPERIMENT_ID) + + get_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=self.EXPECTED_REQUEST_JSON_WITH_FILTERING, + params={"limit": -1}) + + assert result.output == self.EXPECTED_STDOUT + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_get_request_and_print_proper_message_when_no_models_were_found( + self, get_patched): + get_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_JSON_WHEN_NO_MODELS_WERE_FOUND, 200) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND) + + get_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params={"limit": -1}) + + assert result.output == "No models found\n" From e4e69a6befbf99fbfa32e71f962325d7f703ac83 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Mon, 29 Apr 2019 12:12:40 +0200 Subject: [PATCH 17/42] Add filtering models by project ID and fix message when wrong api key was used --- paperspace/cli/models.py | 5 +++++ paperspace/commands/models.py | 17 ++++++++++------- tests/functional/test_models.py | 16 ++++++++++++++++ 3 files changed, 31 insertions(+), 7 deletions(-) diff --git a/paperspace/cli/models.py b/paperspace/cli/models.py index 9e42791..398c1ed 100644 --- a/paperspace/cli/models.py +++ b/paperspace/cli/models.py @@ -16,6 +16,11 @@ def models_group(): "experimentId", help="Use to filter jobs by experiment ID", ) +@click.option( + "--projectId", + "projectId", + help="Use to filter jobs by project ID", +) @common.api_key_option def list_jobs(api_key, **filters): common.del_if_value_is_none(filters) diff --git a/paperspace/commands/models.py b/paperspace/commands/models.py index 67db490..ad6b839 100644 --- a/paperspace/commands/models.py +++ b/paperspace/commands/models.py @@ -14,7 +14,11 @@ def execute(self, filters): response = self.api.get("/mlModels/getModelList/", json=json_, params=params) try: - models = self._get_models_list(response) + data = response.json() + if not response.ok: + self.logger.log_error_response(data) + return + models = data["modelList"] except (ValueError, KeyError) as e: self.logger.log("Error while parsing response data: {}".format(e)) else: @@ -22,11 +26,10 @@ def execute(self, filters): @staticmethod def _get_request_json(filters): - experiment_id = filters.get("experimentId") - if not experiment_id: + if not filters: return None - json_ = {"filter": {"where": {"and": [{"experimentId": experiment_id}]}}} + json_ = {"filter": {"where": {"and": [filters]}}} return json_ def _get_models_list(self, response): @@ -37,11 +40,11 @@ def _get_models_list(self, response): self.logger.debug(data) return data - def _log_models_list(self, model): - if not model: + def _log_models_list(self, models): + if not models: self.logger.log("No models found") else: - table_str = self._make_models_list_table(model) + table_str = self._make_models_list_table(models) if len(table_str.splitlines()) > get_terminal_lines(): pydoc.pager(table_str) else: diff --git a/tests/functional/test_models.py b/tests/functional/test_models.py index 465c8e6..8c28c82 100644 --- a/tests/functional/test_models.py +++ b/tests/functional/test_models.py @@ -30,6 +30,8 @@ class TestModelsList(object): +------+-----------------+------------+------------+---------------+ """ + EXPECTED_RESPONSE_WHEN_WRONG_API_KEY_WAS_USED = {"status": 401, "message": "No such API token"} + @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_get_request_and_print_list_of_experiments(self, get_patched): get_patched.return_value = MockResponse(example_responses.LIST_MODELS_RESPONSE_JSON, 200, "fake content") @@ -88,3 +90,17 @@ def test_should_send_get_request_and_print_proper_message_when_no_models_were_fo params={"limit": -1}) assert result.output == "No models found\n" + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_proper_message_when_wrong_api_key_was_used(self, get_patched): + get_patched.return_value = MockResponse(self.EXPECTED_RESPONSE_WHEN_WRONG_API_KEY_WAS_USED, 401) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND) + + get_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params={"limit": -1}) + + assert result.output == "No such API token\n" From 4480440fbeeaffecc10a86a0ab15f24527f782d9 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Mon, 29 Apr 2019 15:02:22 +0200 Subject: [PATCH 18/42] Add filtering options to 'deployments list' command --- paperspace/cli/cli.py | 16 +++++++++++++--- paperspace/commands/deployments.py | 21 ++++++++++++--------- paperspace/commands/models.py | 9 +++------ tests/functional/test_deployments.py | 20 +++++++++++++++++--- 4 files changed, 45 insertions(+), 21 deletions(-) diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index c0b33fc..cfce783 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -401,12 +401,22 @@ def create_deployment(api_key=None, **kwargs): type=ChoiceType(DEPLOYMENT_STATES_MAP, case_sensitive=False), help="Filter by deployment state", ) +@click.option( + "--projectId", + "projectId", + help="Use to filter by project ID", +) +@click.option( + "--modelId", + "modelId", + help="Use to filter by project ID", +) @api_key_option -def get_deployments_list(api_key=None, **kwargs): - del_if_value_is_none(kwargs) +def get_deployments_list(api_key=None, **filters): + del_if_value_is_none(filters) deployments_api = client.API(config.CONFIG_HOST, api_key=api_key) command = deployments_commands.ListDeploymentsCommand(api=deployments_api) - command.execute(kwargs) + command.execute(filters) @deployments.command("update", help="Update deployment properties") diff --git a/paperspace/commands/deployments.py b/paperspace/commands/deployments.py index d923777..4193530 100644 --- a/paperspace/commands/deployments.py +++ b/paperspace/commands/deployments.py @@ -40,25 +40,28 @@ def execute(self, kwargs): class ListDeploymentsCommand(_DeploymentCommandBase): - def execute(self, kwargs): - json_ = self._get_request_json(kwargs) + def execute(self, filters=None): + json_ = self._get_request_json(filters) response = self.api.get("/deployments/getDeploymentList/", json=json_) try: - deployments = self._get_deployments_list(response) + data = response.json() + if not response.ok: + self.logger.log_error_response(data) + return + models = self._get_deployments_list(response) except (ValueError, KeyError) as e: self.logger.log("Error while parsing response data: {}".format(e)) else: - self._log_deployments_list(deployments) + self._log_deployments_list(models) @staticmethod - def _get_request_json(kwargs): - state = kwargs.get("state") - if not state: + def _get_request_json(filters): + if not filters: return None - params = {"filter": {"where": {"and": [{"state": state}]}}} - return params + json_ = {"filter": {"where": {"and": [filters]}}} + return json_ @staticmethod def _get_deployments_list(response): diff --git a/paperspace/commands/models.py b/paperspace/commands/models.py index ad6b839..c154fb7 100644 --- a/paperspace/commands/models.py +++ b/paperspace/commands/models.py @@ -18,7 +18,7 @@ def execute(self, filters): if not response.ok: self.logger.log_error_response(data) return - models = data["modelList"] + models = self._get_objects_list(response) except (ValueError, KeyError) as e: self.logger.log("Error while parsing response data: {}".format(e)) else: @@ -32,12 +32,9 @@ def _get_request_json(filters): json_ = {"filter": {"where": {"and": [filters]}}} return json_ - def _get_models_list(self, response): - if not response.ok: - raise ValueError("Unknown error") - + @staticmethod + def _get_objects_list(response): data = response.json()["modelList"] - self.logger.debug(data) return data def _log_models_list(self, models): diff --git a/tests/functional/test_deployments.py b/tests/functional/test_deployments.py index 3201582..d57af56 100644 --- a/tests/functional/test_deployments.py +++ b/tests/functional/test_deployments.py @@ -187,6 +187,20 @@ def test_should_send_get_request_and_print_list_of_deployments_filtered_with_sta params=None) assert result.output == "No deployments found\n" + @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.get") + def test_should_print_proper_message_when_wrong_api_key_was_used(self, get_patched): + get_patched.return_value = MockResponse({"status": 400, "message": "Invalid API token"}, + 400) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND) + + get_patched.assert_called_once_with(self.URL, + headers=EXPECTED_HEADERS, + json=None, + params=None) + assert result.output == "Invalid API token\n" + class TestDeploymentsUpdate(object): URL = "https://api.paperspace.io/deployments/updateDeployment/" @@ -264,7 +278,7 @@ class TestStartDeployment(object): @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_and_print_message_when_deployments_start_was_used(self, post_patched): - post_patched.return_value = MockResponse(None, 204, "fake content") + post_patched.return_value = MockResponse(status_code=204) runner = CliRunner() result = runner.invoke(cli.cli, self.COMMAND) @@ -297,7 +311,7 @@ class TestDeleteDeployment(object): @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_and_print_message_when_deployments_delete_was_used(self, post_patched): - post_patched.return_value = MockResponse(None, 204, "fake content") + post_patched.return_value = MockResponse(status_code=204) runner = CliRunner() result = runner.invoke(cli.cli, self.COMMAND) @@ -311,7 +325,7 @@ def test_should_send_proper_data_and_print_message_when_deployments_delete_was_u @mock.patch("paperspace.cli.cli.deployments_commands.client.requests.post") def test_should_send_proper_data_with_custom_api_key_when_api_key_parameter_was_provided(self, post_patched): - post_patched.return_value = MockResponse(None, 204, "fake content") + post_patched.return_value = MockResponse(status_code=204) runner = CliRunner() result = runner.invoke(cli.cli, self.COMMAND_WITH_API_KEY) From 1ea75335eb2e7de7f30230ee42dae2514c56287d Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Mon, 29 Apr 2019 15:16:20 +0200 Subject: [PATCH 19/42] Fixed variable names --- paperspace/cli/cli.py | 2 +- paperspace/cli/models.py | 10 +++++----- paperspace/commands/deployments.py | 18 +++++++++--------- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index cfce783..8b53aff 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -456,7 +456,7 @@ def get_deployments_list(api_key=None, **filters): def update_deployment_model(id_, api_key, **kwargs): del_if_value_is_none(kwargs) deployments_api = client.API(config.CONFIG_HOST, api_key=api_key) - command = deployments_commands.UpdateModelCommand(api=deployments_api) + command = deployments_commands.UpdateDeploymentCommand(api=deployments_api) command.execute(id_, kwargs) diff --git a/paperspace/cli/models.py b/paperspace/cli/models.py index 398c1ed..3686b39 100644 --- a/paperspace/cli/models.py +++ b/paperspace/cli/models.py @@ -14,16 +14,16 @@ def models_group(): @click.option( "--experimentId", "experimentId", - help="Use to filter jobs by experiment ID", + help="Use to filter by experiment ID", ) @click.option( "--projectId", "projectId", - help="Use to filter jobs by project ID", + help="Use to filter by project ID", ) @common.api_key_option -def list_jobs(api_key, **filters): +def list_models(api_key, **filters): common.del_if_value_is_none(filters) - jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) - command = models_commands.ListModelsCommand(api=jobs_api) + models_api = client.API(config.CONFIG_HOST, api_key=api_key) + command = models_commands.ListModelsCommand(api=models_api) command.execute(filters) diff --git a/paperspace/commands/deployments.py b/paperspace/commands/deployments.py index 4193530..3e7a768 100644 --- a/paperspace/commands/deployments.py +++ b/paperspace/commands/deployments.py @@ -49,11 +49,11 @@ def execute(self, filters=None): if not response.ok: self.logger.log_error_response(data) return - models = self._get_deployments_list(response) + deployments = self._get_deployments_list(response) except (ValueError, KeyError) as e: self.logger.log("Error while parsing response data: {}".format(e)) else: - self._log_deployments_list(models) + self._log_deployments_list(deployments) @staticmethod def _get_request_json(filters): @@ -98,13 +98,13 @@ def _make_deployments_list_table(deployments): return table_string -class UpdateModelCommand(_DeploymentCommandBase): - def execute(self, model_id, kwargs): +class UpdateDeploymentCommand(_DeploymentCommandBase): + def execute(self, deployment_id, kwargs): if not kwargs: self.logger.log("No parameters to update were given. Use --help for more information.") return - json_ = {"id": model_id, + json_ = {"id": deployment_id, "upd": kwargs} response = self.api.post("/deployments/updateDeployment/", json=json_) self._log_message(response, @@ -113,8 +113,8 @@ def execute(self, model_id, kwargs): class StartDeploymentCommand(_DeploymentCommandBase): - def execute(self, model_id): - json_ = {"id": model_id, + def execute(self, deployment_id): + json_ = {"id": deployment_id, "isRunning": True} response = self.api.post("/deployments/updateDeployment/", json=json_) self._log_message(response, @@ -123,8 +123,8 @@ def execute(self, model_id): class DeleteDeploymentCommand(_DeploymentCommandBase): - def execute(self, model_id): - json_ = {"id": model_id, + def execute(self, deployment_id): + json_ = {"id": deployment_id, "upd": {"isDeleted": True}} response = self.api.post("/deployments/updateDeployment/", json=json_) self._log_message(response, From f80f436c9a03e112320c5931a203bede0642ea04 Mon Sep 17 00:00:00 2001 From: kossak Date: Mon, 29 Apr 2019 17:35:11 +0200 Subject: [PATCH 20/42] workspace uploading some minor refactoring --- Pipfile | 3 + paperspace/cli/cli.py | 21 +++- paperspace/client.py | 4 +- paperspace/commands/experiments.py | 160 +++++++++++++++++++++++++---- paperspace/commands/machines.py | 8 +- paperspace/exceptions.py | 14 ++- setup.py | 3 +- 7 files changed, 178 insertions(+), 35 deletions(-) diff --git a/Pipfile b/Pipfile index 5914077..f364f98 100644 --- a/Pipfile +++ b/Pipfile @@ -6,6 +6,8 @@ verify_ssl = true [packages] e1839a8 = {path = ".",editable = true} requests = {extras = ["security"]} +requests-toolbelt = * +progressbar2 = * cryptography = {extras = ["security"]} "boto3" = "*" botocore = "*" @@ -14,6 +16,7 @@ gradient-statsd = "*" click = "*" terminaltables = "*" + [dev-packages] twine = "*" pypandoc = "*" diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 189d5ef..9ff0eb8 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -112,10 +112,17 @@ def common_experiments_create_options(f): type=int, help="Port to use in new experiment", ), + click.option( + "--workspace", + "workspace", + required=False, + help="Path to workspace directory or archive", + default="." + ), click.option( "--workspaceUrl", "workspaceUrl", - required=True, + required=False, help="Project git repository url", ), click.option( @@ -309,7 +316,8 @@ def common_experiments_create_single_node_options(f): def create_multi_node(api_key, **kwargs): del_if_value_is_none(kwargs) experiments_api = client.API(config.CONFIG_EXPERIMENTS_HOST, api_key=api_key) - experiments_commands.create_experiment(kwargs, api=experiments_api) + command = experiments_commands.CreateExperimentCommand(api=experiments_api) + command.execute(kwargs) @create_experiment.command(name="singlenode", help="Create single node experiment") @@ -319,7 +327,8 @@ def create_single_node(api_key, **kwargs): kwargs["experimentTypeId"] = constants.ExperimentType.SINGLE_NODE del_if_value_is_none(kwargs) experiments_api = client.API(config.CONFIG_EXPERIMENTS_HOST, api_key=api_key) - experiments_commands.create_experiment(kwargs, api=experiments_api) + command = experiments_commands.CreateExperimentCommand(api=experiments_api) + command.execute(kwargs) @create_and_start_experiment.command(name="multinode", help="Create and start new multi node experiment") @@ -328,7 +337,8 @@ def create_single_node(api_key, **kwargs): def create_and_start_multi_node(api_key, **kwargs): del_if_value_is_none(kwargs) experiments_api = client.API(config.CONFIG_EXPERIMENTS_HOST, api_key=api_key) - experiments_commands.create_and_start_experiment(kwargs, api=experiments_api) + command = experiments_commands.CreateAndStartExperimentCommand(api=experiments_api) + command.execute(kwargs) @create_and_start_experiment.command(name="singlenode", help="Create and start new single node experiment") @@ -338,7 +348,8 @@ def create_and_start_single_node(api_key, **kwargs): kwargs["experimentTypeId"] = constants.ExperimentType.SINGLE_NODE del_if_value_is_none(kwargs) experiments_api = client.API(config.CONFIG_EXPERIMENTS_HOST, api_key=api_key) - experiments_commands.create_and_start_experiment(kwargs, api=experiments_api) + command = experiments_commands.CreateAndStartExperimentCommand(api=experiments_api) + command.execute(kwargs) @experiments.command("start", help="Start experiment") diff --git a/paperspace/client.py b/paperspace/client.py index d7b6a57..5c330c7 100644 --- a/paperspace/client.py +++ b/paperspace/client.py @@ -28,9 +28,9 @@ def get_path(self, url): template = "{}{}" if url.startswith("/") else "{}/{}" return template.format(api_url, url) - def post(self, url, json=None, params=None): + def post(self, url, json=None, params=None, files=None): path = self.get_path(url) - response = requests.post(path, json=json, params=params, headers=self.headers) + response = requests.post(path, json=json, params=params, headers=self.headers, files=files) logger.debug("POST request sent to: {} \n\theaders: {}\n\tjson: {}\n\tparams: {}" .format(response.url, self.headers, json, params)) logger.debug("Response status code: {}".format(response.status_code)) diff --git a/paperspace/commands/experiments.py b/paperspace/commands/experiments.py index d9941fe..289e079 100644 --- a/paperspace/commands/experiments.py +++ b/paperspace/commands/experiments.py @@ -1,41 +1,157 @@ +import os import pydoc +import zipfile +import click +import progressbar +import requests import terminaltables +from requests_toolbelt.multipart import encoder from paperspace import logger, constants, client, config +from paperspace.commands import CommandBase +from paperspace.exceptions import PresignedUrlUnreachableException, S3UploadFailedException from paperspace.logger import log_response from paperspace.utils import get_terminal_lines +# from clint.textui.progress import Bar as ProgressBar + experiments_api = client.API(config.CONFIG_EXPERIMENTS_HOST, headers=client.default_headers) -def _log_create_experiment(response, success_msg_template, error_msg, logger_=logger): - if response.ok: - j = response.json() - handle = j["handle"] - msg = success_msg_template.format(handle) - logger_.log(msg) - else: - try: - data = response.json() - logger_.log_error_response(data) - except ValueError: - logger_.log(error_msg) +class ExperimentCommand(CommandBase): + def _log_create_experiment(self, response, success_msg_template, error_msg): + if response.ok: + j = response.json() + handle = j["handle"] + msg = success_msg_template.format(handle) + self.logger.log(msg) + else: + try: + data = response.json() + self.logger.log_error_response(data) + except ValueError: + self.logger.log(error_msg) + + +class CreateExperimentCommand(ExperimentCommand): + def retrieve_file_paths(self, dirName): + + # setup file paths variable + filePaths = [] + exclude = ['.git'] + # Read all directory, subdirectories and file lists + for root, dirs, files in os.walk(dirName, topdown=True): + dirs[:] = [d for d in dirs if d not in exclude] + for filename in files: + # Create the full filepath by using os module. + filePath = os.path.join(root, filename) + filePaths.append(filePath) + + # return all paths + return filePaths + + def _zip_workspace(self, workspace_path): + if not workspace_path: + workspace_path = '.' + zip_file_name = os.path.basename(os.getcwd()) + '.zip' + else: + zip_file_name = os.path.basename(workspace_path) + '.zip' + + zip_file_path = os.path.join(workspace_path, zip_file_name) + + if os.path.exists(zip_file_path): + self.logger.log('Removing existing archive') + os.remove(zip_file_path) + + file_paths = self.retrieve_file_paths(workspace_path) + + self.logger.log('Creating zip archive: %s' % zip_file_name) + zip_file = zipfile.ZipFile(zip_file_path, 'w') + + bar = progressbar.ProgressBar(max_value=len(file_paths)) + + with zip_file: + i = 0 + for file in file_paths: + i+=1 + self.logger.debug('Adding %s to archive' % file) + zip_file.write(file) + bar.update(i) + bar.finish() + self.logger.log('\nFinished creating archive: %s' % zip_file_name) + return zip_file_path + + def _create_callback(self, encoder_obj): + bar = progressbar.ProgressBar(max_value=encoder_obj.len) + + def callback(monitor): + bar.update(monitor.bytes_read) + if monitor.bytes_read == monitor.len: + bar.finish() + return callback + + def _upload_workspace(self, input_data): + workspace_url = input_data.get('workspaceUrl') + workspace_path = input_data.get('workspacePath') + workspace_archive = input_data.get('workspaceArchive') + if (workspace_archive and workspace_path) or (workspace_archive and workspace_url) or ( + workspace_path and workspace_url): + raise click.UsageError("Use either:\n\t--workspaceUrl to point repository URL" + "\n\t--workspacePath to point on project directory" + "\n\t--workspaceArchive to point on project ZIP archive" + "\n or neither to use current directory") + + if workspace_url: + return # nothing to do + + if workspace_archive: + archive_path = os.path.abspath(workspace_archive) + else: + archive_path = self._zip_workspace(workspace_path) + + s3_upload_data = self._get_upload_data(os.path.basename(archive_path)) + + self.logger.log('Uploading zipped workspace to S3') + + files = {'file': (archive_path, open(archive_path, 'rb'))} + fields = s3_upload_data['fields'] + fields.update(files) + + s3_encoder = encoder.MultipartEncoder(fields=fields) + monitor = encoder.MultipartEncoderMonitor(s3_encoder, callback=self._create_callback(s3_encoder)) + s3_response = requests.post(s3_upload_data['url'], data=monitor, headers={'Content-Type': monitor.content_type}) + if not s3_response.ok: + raise S3UploadFailedException(s3_response) + + self.logger.log('\nUploading completed') + s3_workspace_url = s3_response.headers.get('Location') + return s3_workspace_url + + def execute(self, json_): + workspace_url = self._upload_workspace(json_) + if workspace_url: + json_['workspaceUrl'] = workspace_url + response = self.api.post("/experiments/", json=json_) -def create_experiment(json_, api=experiments_api): - response = api.post("/experiments/", json=json_) + self._log_create_experiment(response, + "New experiment created with handle: {}", + "Unknown error while creating the experiment") - _log_create_experiment(response, - "New experiment created with handle: {}", - "Unknown error while creating the experiment") + def _get_upload_data(self, file_name): + response = self.api.get("/workspace/get_presigned_url", params={'workspaceName': file_name}) + if response.status_code == 404: + raise PresignedUrlUnreachableException + return response.json() -def create_and_start_experiment(json_, api=experiments_api): - response = api.post("/experiments/create_and_start/", json=json_) - _log_create_experiment(response, - "New experiment created and started with handle: {}", - "Unknown error while creating/starting the experiment") +class CreateAndStartExperimentCommand(ExperimentCommand): + def execute(self, json_): + response = self.api.post("/experiments/create_and_start/", json=json_) + self._log_create_experiment(response, + "New experiment created and started with handle: {}", + "Unknown error while creating/starting the experiment") def start_experiment(experiment_handle, api=experiments_api): diff --git a/paperspace/commands/machines.py b/paperspace/commands/machines.py index 8c4d6b0..528fd5d 100644 --- a/paperspace/commands/machines.py +++ b/paperspace/commands/machines.py @@ -4,7 +4,7 @@ import terminaltables from paperspace.commands import CommandBase -from paperspace.exceptions import BadResponse +from paperspace.exceptions import BadResponseException from paperspace.utils import get_terminal_lines @@ -228,7 +228,7 @@ def execute(self, machine_id, state, interval=5): while True: try: current_state = self._get_machine_state(machine_id) - except BadResponse as e: + except BadResponseException as e: self.logger.log(e) return else: @@ -246,8 +246,8 @@ def _get_machine_state(self, machine_id): json_ = response.json() if not response.ok: self.logger.log_error_response(json_) - raise BadResponse("Error while reading machine state") + raise BadResponseException("Error while reading machine state") state = json_.get("state") except (ValueError, AttributeError): - raise BadResponse("Unknown error while reading machine state") + raise BadResponseException("Unknown error while reading machine state") return state diff --git a/paperspace/exceptions.py b/paperspace/exceptions.py index c389b23..cc43760 100644 --- a/paperspace/exceptions.py +++ b/paperspace/exceptions.py @@ -1,2 +1,14 @@ -class BadResponse(Exception): +class ApplicationException(Exception): + pass + + +class BadResponseException(ApplicationException): + pass + + +class PresignedUrlUnreachableException(ApplicationException): + pass + + +class S3UploadFailedException(ApplicationException): pass diff --git a/setup.py b/setup.py index 9706983..f83b28b 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,8 @@ ], keywords='paperspace api development library', packages=find_packages(exclude=['contrib', 'docs', 'tests', 'old_tests']), - install_requires=['requests[security]', 'boto3', 'botocore', 'six', 'gradient-statsd', 'click', 'terminaltables'], + install_requires=['requests[security]', 'boto3', 'botocore', 'six', 'gradient-statsd', 'click', 'terminaltables', + 'requests-toolbelt', 'progressbar2'], entry_points={'console_scripts': [ 'paperspace-python = paperspace.main:main', ]}, From 7bba905f9551954cc62676c0b13d147f7180766a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Mudlaff?= Date: Tue, 30 Apr 2019 15:48:39 +0200 Subject: [PATCH 21/42] Feature PS-9868: Complete logic for retrieve logs for job. Update project requirements. --- Pipfile | 1 + Pipfile.lock | 150 ++++------------------ paperspace/cli/cli.py | 6 +- paperspace/cli/{types.py => cli_types.py} | 0 paperspace/commands/logs.py | 73 +++++++---- setup.py | 4 +- 6 files changed, 84 insertions(+), 150 deletions(-) rename paperspace/cli/{types.py => cli_types.py} (100%) diff --git a/Pipfile b/Pipfile index 5914077..cbdef86 100644 --- a/Pipfile +++ b/Pipfile @@ -13,6 +13,7 @@ six = "*" gradient-statsd = "*" click = "*" terminaltables = "*" +colorclass = "*" [dev-packages] twine = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 4343f67..6956738 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "8386f8e15ffbb940a60e2ec83003e35cae5b6a86a16fbbbd366100c0970cd2a4" + "sha256": "621baffc562273b5632852b94190868a39c486b1bee0c3e9b4d791d91732902f" }, "pipfile-spec": 6, "requires": {}, @@ -23,19 +23,19 @@ }, "boto3": { "hashes": [ - "sha256:883f7143bcb081a834f7c09659524059b66745ea043fffd40420e88ef0143feb", - "sha256:9c789a775f0499743b083ffd63e0e87dae9a727511bb37f2529da52ccd25a360" + "sha256:882cc4869b47b51dae4b4a900769e72171ff00e0b6bca644b2d7a7ad7378f324", + "sha256:cd503a7e7a04f1c14d2801f9727159dfa88c393b4004e98940fa4aa205d920c8" ], "index": "pypi", - "version": "==1.9.134" + "version": "==1.9.137" }, "botocore": { "hashes": [ - "sha256:5c4d9ea1b0fbb1dc98b6a06ed8780096fca981a1c3599bf8f03f338e6aa389ae", - "sha256:c59a74539eb081f4b3a307fc5c3d69d8459e30bfaf4b94aa78e74a9a05583764" + "sha256:0d95794f6b1239c75e2c5f966221bcd4b68020fddb5676f757531eedbb612ed8", + "sha256:3213cf48cf2ceee10fc3b93221f2cd1c38521cca7584f547d5c086213cc60f35" ], "index": "pypi", - "version": "==1.12.134" + "version": "==1.12.137" }, "certifi": { "hashes": [ @@ -92,11 +92,12 @@ "index": "pypi", "version": "==7.0" }, - "click-completion": { + "colorclass": { "hashes": [ - "sha256:78072eecd5e25ea0d25ceaf99cd5f22aa2667d67231ae0819deab9b1ff3456fb" + "sha256:b05c2a348dfc1aff2d502527d78a5b7b7e2f85da94a96c5081210d8e9ee8e18b" ], - "version": "==0.5.1" + "index": "pypi", + "version": "==2.2.0" }, "cryptography": { "extras": [ @@ -124,6 +125,7 @@ "sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6" ], "index": "pypi", + "markers": null, "version": "==2.6.1" }, "datadog": { @@ -152,24 +154,6 @@ "editable": true, "path": "." }, - "enum34": { - "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" - ], - "markers": "python_version < '3'", - "version": "==1.1.6" - }, - "futures": { - "hashes": [ - "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265", - "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1" - ], - "markers": "python_version == '2.6' or python_version == '2.7'", - "version": "==3.2.0" - }, "gradient-statsd": { "hashes": [ "sha256:12965d471cc3e203464229c44839c5a8f67a665ecc4f00e807b88351eb30a565", @@ -183,23 +167,9 @@ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" ], + "markers": "extra == 'security'", "version": "==2.8" }, - "ipaddress": { - "hashes": [ - "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794", - "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c" - ], - "markers": "python_version < '3'", - "version": "==1.0.22" - }, - "jinja2": { - "hashes": [ - "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", - "sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b" - ], - "version": "==2.10.1" - }, "jmespath": { "hashes": [ "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", @@ -207,39 +177,6 @@ ], "version": "==0.9.4" }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" - ], - "version": "==1.1.1" - }, "pycparser": { "hashes": [ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" @@ -251,6 +188,7 @@ "sha256:aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200", "sha256:c727930ad54b10fc157015014b666f2d8b41f70c0d03e83ab67624fd3dd5d1e6" ], + "markers": "extra == 'security'", "version": "==19.0.0" }, "python-dateutil": { @@ -279,13 +217,6 @@ ], "version": "==0.2.0" }, - "shellingham": { - "hashes": [ - "sha256:77d37a4fd287c1e663006f7ecf1b9deca9ad492d0082587bd813c44eb49e4e62", - "sha256:985b23bbd1feae47ca6a6365eacd314d93d95a8a16f8f346945074c28fe6f3e0" - ], - "version": "==1.3.1" - }, "six": { "hashes": [ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", @@ -306,7 +237,7 @@ "sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0", "sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3" ], - "markers": "python_version == '2.7'", + "markers": "python_version >= '3.4'", "version": "==1.24.2" } }, @@ -391,19 +322,12 @@ ], "version": "==0.14" }, - "funcsigs": { - "hashes": [ - "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", - "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - ], - "markers": "python_version < '3.3'", - "version": "==1.0.2" - }, "idna": { "hashes": [ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" ], + "markers": "extra == 'security'", "version": "==2.8" }, "mock": { @@ -416,27 +340,18 @@ }, "more-itertools": { "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" + "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7", + "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a" ], - "markers": "python_version <= '2.7'", - "version": "==5.0.0" - }, - "pathlib2": { - "hashes": [ - "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742", - "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7" - ], - "markers": "python_version < '3.6'", - "version": "==2.3.3" + "markers": "python_version > '2.7'", + "version": "==7.0.0" }, "pbr": { "hashes": [ - "sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843", - "sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824" + "sha256:6901995b9b686cb90cceba67a0f6d4d14ae003cd59bc12beb61549bdfbe3bc89", + "sha256:d950c64aeea5456bbd147468382a5bb77fe692c13c9f00f0219814ce5b642755" ], - "version": "==5.1.3" + "version": "==5.2.0" }, "pkginfo": { "hashes": [ @@ -506,23 +421,6 @@ ], "version": "==0.9.1" }, - "scandir": { - "hashes": [ - "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", - "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", - "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", - "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", - "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", - "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", - "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", - "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", - "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", - "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", - "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" - ], - "markers": "python_version < '3.5'", - "version": "==1.10.0" - }, "six": { "hashes": [ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", @@ -551,7 +449,7 @@ "sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0", "sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3" ], - "markers": "python_version == '2.7'", + "markers": "python_version >= '3.4'", "version": "==1.24.2" }, "webencodings": { diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 61c3c17..7a24f2d 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -8,7 +8,7 @@ from paperspace.cli.jobs import jobs_group from paperspace.cli.logs import logs_group from paperspace.cli.projects import projects_group -from paperspace.cli.types import ChoiceType, json_string +from paperspace.cli.cli_types import ChoiceType, json_string from paperspace.cli.validators import validate_mutually_exclusive, validate_email from paperspace.commands import experiments as experiments_commands, deployments as deployments_commands, \ machines as machines_commands, login as login_commands @@ -1056,3 +1056,7 @@ def version(): cli.add_command(jobs_group) cli.add_command(projects_group) cli.add_command(logs_group) + + +if __name__ == '__main__': + cli() diff --git a/paperspace/cli/types.py b/paperspace/cli/cli_types.py similarity index 100% rename from paperspace/cli/types.py rename to paperspace/cli/cli_types.py diff --git a/paperspace/commands/logs.py b/paperspace/commands/logs.py index 266e73c..35f5d86 100644 --- a/paperspace/commands/logs.py +++ b/paperspace/commands/logs.py @@ -1,30 +1,59 @@ +import pydoc + +import terminaltables +from colorclass import Color + from paperspace.commands import CommandBase +from paperspace.utils import get_terminal_lines + + +class ListLogsCommand(CommandBase): + last_line_number = 0 + base_url = "/jobs/logs?jobId={}&line={}" + is_logs_complete = False + + def execute(self, job_id): + table_data = [("LINE", "MESSAGE")] + table = terminaltables.AsciiTable(table_data, title=f"Job {job_id} logs") + + while not self.is_logs_complete: + response = self._get_logs(job_id) -class LogsCommandBase(CommandBase): - def _log_message(self, response, success_msg_template, error_msg): - if response.ok: try: - handle = response.json() - except (ValueError, KeyError): - self.logger.log(success_msg_template) + data = response.json() + if not response.ok: + self.logger.log_error_response(data) + return + except (ValueError, KeyError) as e: + if response.status_code == 204: + continue + self.logger.log("Error while parsing response data: {}".format(e)) + return else: - msg = success_msg_template.format(**handle) - self.logger.log(msg) + self._log_logs_list(data, table, table_data) + + def _get_logs(self, job_id): + url = self.base_url.format(job_id, self.last_line_number) + return self.api.get(url) + + def _log_logs_list(self, data, table, table_data): + if not data: + self.logger.log("No Logs found") else: - try: - data = response.json() - self.logger.log_error_response(data) - except ValueError: - self.logger.log(error_msg) + table_str = self._make_table(data, table, table_data) + if len(table_str.splitlines()) > get_terminal_lines(): + pydoc.pager(table_str) + else: + self.logger.log(table_str) + + def _make_table(self, logs, table, table_data): + if logs[-1].get("message") == "PSEOF": + self.is_logs_complete = True + else: + self.last_line_number = logs[-1].get("line") + for log in logs: + table_data.append((Color.colorize("red", log.get("line")), log.get("message"))) -class ListLogsCommand(LogsCommandBase): - def execute(self, job_id): - url = f"/jobs/logs?jobId={job_id}" - response = self.api.get(url) - self._log_message( - response, - "Job logs retrieved", - "Unknown error while retrieving job logs" - ) + return table.table diff --git a/setup.py b/setup.py index 9706983..1ac4c45 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,9 @@ ], keywords='paperspace api development library', packages=find_packages(exclude=['contrib', 'docs', 'tests', 'old_tests']), - install_requires=['requests[security]', 'boto3', 'botocore', 'six', 'gradient-statsd', 'click', 'terminaltables'], + install_requires=[ + 'requests[security]', 'boto3', 'botocore', 'six', 'gradient-statsd', 'click', 'terminaltables', 'colorclass' + ], entry_points={'console_scripts': [ 'paperspace-python = paperspace.main:main', ]}, From 47868f69e1fb1a50b71d278c134828041f2601f5 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Mon, 29 Apr 2019 16:23:22 +0200 Subject: [PATCH 22/42] Add git-like 'did you mean ...' --- Pipfile | 1 + Pipfile.lock | 76 ++++++++------------------------------ paperspace/cli/cli.py | 14 ++++--- paperspace/cli/jobs.py | 3 +- paperspace/cli/models.py | 3 +- paperspace/cli/projects.py | 3 +- 6 files changed, 30 insertions(+), 70 deletions(-) diff --git a/Pipfile b/Pipfile index 5914077..490763f 100644 --- a/Pipfile +++ b/Pipfile @@ -13,6 +13,7 @@ six = "*" gradient-statsd = "*" click = "*" terminaltables = "*" +click-didyoumean = "*" [dev-packages] twine = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 4343f67..801beb2 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "8386f8e15ffbb940a60e2ec83003e35cae5b6a86a16fbbbd366100c0970cd2a4" + "sha256": "a378eef190e18d970af4d1252ca154525221fa39a667116adf8f08389c842717" }, "pipfile-spec": 6, "requires": {}, @@ -23,19 +23,19 @@ }, "boto3": { "hashes": [ - "sha256:883f7143bcb081a834f7c09659524059b66745ea043fffd40420e88ef0143feb", - "sha256:9c789a775f0499743b083ffd63e0e87dae9a727511bb37f2529da52ccd25a360" + "sha256:882cc4869b47b51dae4b4a900769e72171ff00e0b6bca644b2d7a7ad7378f324", + "sha256:cd503a7e7a04f1c14d2801f9727159dfa88c393b4004e98940fa4aa205d920c8" ], "index": "pypi", - "version": "==1.9.134" + "version": "==1.9.137" }, "botocore": { "hashes": [ - "sha256:5c4d9ea1b0fbb1dc98b6a06ed8780096fca981a1c3599bf8f03f338e6aa389ae", - "sha256:c59a74539eb081f4b3a307fc5c3d69d8459e30bfaf4b94aa78e74a9a05583764" + "sha256:0d95794f6b1239c75e2c5f966221bcd4b68020fddb5676f757531eedbb612ed8", + "sha256:3213cf48cf2ceee10fc3b93221f2cd1c38521cca7584f547d5c086213cc60f35" ], "index": "pypi", - "version": "==1.12.134" + "version": "==1.12.137" }, "certifi": { "hashes": [ @@ -92,11 +92,12 @@ "index": "pypi", "version": "==7.0" }, - "click-completion": { + "click-didyoumean": { "hashes": [ - "sha256:78072eecd5e25ea0d25ceaf99cd5f22aa2667d67231ae0819deab9b1ff3456fb" + "sha256:112229485c9704ff51362fe34b2d4f0b12fc71cc20f6d2b3afabed4b8bfa6aeb" ], - "version": "==0.5.1" + "index": "pypi", + "version": "==0.0.3" }, "cryptography": { "extras": [ @@ -193,13 +194,6 @@ "markers": "python_version < '3'", "version": "==1.0.22" }, - "jinja2": { - "hashes": [ - "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", - "sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b" - ], - "version": "==2.10.1" - }, "jmespath": { "hashes": [ "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", @@ -207,39 +201,6 @@ ], "version": "==0.9.4" }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" - ], - "version": "==1.1.1" - }, "pycparser": { "hashes": [ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" @@ -279,13 +240,6 @@ ], "version": "==0.2.0" }, - "shellingham": { - "hashes": [ - "sha256:77d37a4fd287c1e663006f7ecf1b9deca9ad492d0082587bd813c44eb49e4e62", - "sha256:985b23bbd1feae47ca6a6365eacd314d93d95a8a16f8f346945074c28fe6f3e0" - ], - "version": "==1.3.1" - }, "six": { "hashes": [ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", @@ -396,7 +350,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "idna": { @@ -433,10 +387,10 @@ }, "pbr": { "hashes": [ - "sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843", - "sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824" + "sha256:6901995b9b686cb90cceba67a0f6d4d14ae003cd59bc12beb61549bdfbe3bc89", + "sha256:d950c64aeea5456bbd147468382a5bb77fe692c13c9f00f0219814ce5b642755" ], - "version": "==5.1.3" + "version": "==5.2.0" }, "pkginfo": { "hashes": [ diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 8b53aff..ddcd07c 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -3,6 +3,8 @@ import click +from click_didyoumean import DYMGroup + from paperspace import constants, client, config from paperspace.cli.common import api_key_option, del_if_value_is_none from paperspace.cli.jobs import jobs_group @@ -21,22 +23,22 @@ ) -@click.group() +@click.group(cls=DYMGroup) def cli(): pass -@cli.group("experiments", help="Manage experiments") +@cli.group("experiments", help="Manage experiments", cls=DYMGroup) def experiments(): pass -@experiments.group("create", help="Create new experiment") +@experiments.group("create", help="Create new experiment", cls=DYMGroup) def create_experiment(): pass -@experiments.group(name="createAndStart", help="Create and start new experiment") +@experiments.group(name="createAndStart", help="Create and start new experiment", cls=DYMGroup) def create_and_start_experiment(): pass @@ -328,7 +330,7 @@ def get_experiment_details(experiment_handle, api_key): ) -@cli.group("deployments", help="Manage deployments") +@cli.group("deployments", help="Manage deployments", cls=DYMGroup) def deployments(): pass @@ -497,7 +499,7 @@ def delete_deployment(id_, api_key=None): ) -@cli.group("machines", help="Manage machines") +@cli.group("machines", help="Manage machines", cls=DYMGroup) def machines_group(): pass diff --git a/paperspace/cli/jobs.py b/paperspace/cli/jobs.py index 4aafccb..0ea378b 100644 --- a/paperspace/cli/jobs.py +++ b/paperspace/cli/jobs.py @@ -1,11 +1,12 @@ import click +from click_didyoumean import DYMGroup from paperspace import client, config from paperspace.cli import common from paperspace.commands import jobs as jobs_commands -@click.group("jobs", help="Manage gradient jobs") +@click.group("jobs", help="Manage gradient jobs", cls=DYMGroup) def jobs_group(): pass diff --git a/paperspace/cli/models.py b/paperspace/cli/models.py index 3686b39..ba27cc9 100644 --- a/paperspace/cli/models.py +++ b/paperspace/cli/models.py @@ -1,11 +1,12 @@ import click +from click_didyoumean import DYMGroup from paperspace import client, config from paperspace.cli import common from paperspace.commands import models as models_commands -@click.group("models", help="Manage models") +@click.group("models", help="Manage models", cls=DYMGroup) def models_group(): pass diff --git a/paperspace/cli/projects.py b/paperspace/cli/projects.py index 26e9a4b..39e6ced 100644 --- a/paperspace/cli/projects.py +++ b/paperspace/cli/projects.py @@ -1,11 +1,12 @@ import click +from click_didyoumean import DYMGroup from paperspace import client, config from paperspace.commands import projects as projects_commands from . import common -@click.group("projects", help="Manage projects") +@click.group("projects", help="Manage projects", cls=DYMGroup) def projects_group(): pass From e640ebee40c39cca06c0eeaf07653264568e2d76 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Tue, 30 Apr 2019 17:23:44 +0200 Subject: [PATCH 23/42] Add colors to help and log errors in red --- Pipfile | 1 + Pipfile.lock | 24 ++++++++++++------ paperspace/cli/cli.py | 18 ++++++++------ paperspace/cli/common.py | 8 +++++- paperspace/cli/jobs.py | 3 +-- paperspace/cli/models.py | 3 +-- paperspace/cli/projects.py | 3 +-- paperspace/commands/deployments.py | 8 +++--- paperspace/commands/experiments.py | 9 +++---- paperspace/commands/jobs.py | 6 ++--- paperspace/commands/machines.py | 12 ++++----- paperspace/commands/models.py | 8 +++--- paperspace/commands/projects.py | 4 +-- paperspace/config.py | 20 +++++++++++++++ paperspace/logger.py | 39 +++++++++++++++++++++--------- setup.py | 17 ++++++++++++- 16 files changed, 122 insertions(+), 61 deletions(-) diff --git a/Pipfile b/Pipfile index 490763f..b518e0c 100644 --- a/Pipfile +++ b/Pipfile @@ -14,6 +14,7 @@ gradient-statsd = "*" click = "*" terminaltables = "*" click-didyoumean = "*" +click-help-colors = "*" [dev-packages] twine = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 801beb2..54c6ce6 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "a378eef190e18d970af4d1252ca154525221fa39a667116adf8f08389c842717" + "sha256": "a622daedf1f812b7e107f97026073540719f1b4d6616498bb15ea6e3d461e5cd" }, "pipfile-spec": 6, "requires": {}, @@ -23,19 +23,19 @@ }, "boto3": { "hashes": [ - "sha256:882cc4869b47b51dae4b4a900769e72171ff00e0b6bca644b2d7a7ad7378f324", - "sha256:cd503a7e7a04f1c14d2801f9727159dfa88c393b4004e98940fa4aa205d920c8" + "sha256:679d8084ad40d18908a97c785d614fed554a424924d4ab30e464c16bfe95722b", + "sha256:d5ccb985caf4ea522f2fbfe4fbf270cd1e2c0c6d46ea7d13e9cda6bb6c36deb6" ], "index": "pypi", - "version": "==1.9.137" + "version": "==1.9.138" }, "botocore": { "hashes": [ - "sha256:0d95794f6b1239c75e2c5f966221bcd4b68020fddb5676f757531eedbb612ed8", - "sha256:3213cf48cf2ceee10fc3b93221f2cd1c38521cca7584f547d5c086213cc60f35" + "sha256:73bf439ba6d97606f8acbe4e037cc7a6e7a2b83f080b472c37c22d810c7dd8a8", + "sha256:ff4171f850cfb221b553f32948b93ea0a8d82e636fe121ff08945f4581c21ad9" ], "index": "pypi", - "version": "==1.12.137" + "version": "==1.12.138" }, "certifi": { "hashes": [ @@ -99,6 +99,14 @@ "index": "pypi", "version": "==0.0.3" }, + "click-help-colors": { + "hashes": [ + "sha256:1012611344706d0da2f8642faa61843118824a54c9950c14f5122518eb01ae6b", + "sha256:a5fbdec2e86c6c2bad2529b7a9ab579087cbb0cf62d16fe93f27fe6c297114f7" + ], + "index": "pypi", + "version": "==0.5" + }, "cryptography": { "extras": [ "security" @@ -350,7 +358,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "idna": { diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index ddcd07c..5f99443 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -3,9 +3,8 @@ import click -from click_didyoumean import DYMGroup - from paperspace import constants, client, config +from paperspace.cli import common from paperspace.cli.common import api_key_option, del_if_value_is_none from paperspace.cli.jobs import jobs_group from paperspace.cli.models import models_group @@ -23,22 +22,22 @@ ) -@click.group(cls=DYMGroup) +@click.group(cls=common.ClickGroup, **config.HELP_COLORS_DICT) def cli(): pass -@cli.group("experiments", help="Manage experiments", cls=DYMGroup) +@cli.group("experiments", help="Manage experiments", cls=common.ClickGroup) def experiments(): pass -@experiments.group("create", help="Create new experiment", cls=DYMGroup) +@experiments.group("create", help="Create new experiment", cls=common.ClickGroup) def create_experiment(): pass -@experiments.group(name="createAndStart", help="Create and start new experiment", cls=DYMGroup) +@experiments.group(name="createAndStart", help="Create and start new experiment", cls=common.ClickGroup) def create_and_start_experiment(): pass @@ -330,7 +329,7 @@ def get_experiment_details(experiment_handle, api_key): ) -@cli.group("deployments", help="Manage deployments", cls=DYMGroup) +@cli.group("deployments", help="Manage deployments", cls=common.ClickGroup) def deployments(): pass @@ -499,7 +498,7 @@ def delete_deployment(id_, api_key=None): ) -@cli.group("machines", help="Manage machines", cls=DYMGroup) +@cli.group("machines", help="Manage machines", cls=common.ClickGroup) def machines_group(): pass @@ -1068,3 +1067,6 @@ def version(): cli.add_command(jobs_group) cli.add_command(projects_group) cli.add_command(models_group) + +if __name__ == '__main__': + cli() diff --git a/paperspace/cli/common.py b/paperspace/cli/common.py index d744117..ee06521 100644 --- a/paperspace/cli/common.py +++ b/paperspace/cli/common.py @@ -1,4 +1,6 @@ import click +from click_didyoumean import DYMMixin +from click_help_colors import HelpColorsGroup api_key_option = click.option( "--apiKey", @@ -11,4 +13,8 @@ def del_if_value_is_none(dict_): """Remove all elements with value == None""" for key, val in list(dict_.items()): if val is None: - del dict_[key] \ No newline at end of file + del dict_[key] + + +class ClickGroup(DYMMixin, HelpColorsGroup): + pass diff --git a/paperspace/cli/jobs.py b/paperspace/cli/jobs.py index 0ea378b..332e958 100644 --- a/paperspace/cli/jobs.py +++ b/paperspace/cli/jobs.py @@ -1,12 +1,11 @@ import click -from click_didyoumean import DYMGroup from paperspace import client, config from paperspace.cli import common from paperspace.commands import jobs as jobs_commands -@click.group("jobs", help="Manage gradient jobs", cls=DYMGroup) +@click.group("jobs", help="Manage gradient jobs", cls=common.ClickGroup) def jobs_group(): pass diff --git a/paperspace/cli/models.py b/paperspace/cli/models.py index ba27cc9..f2b2e5c 100644 --- a/paperspace/cli/models.py +++ b/paperspace/cli/models.py @@ -1,12 +1,11 @@ import click -from click_didyoumean import DYMGroup from paperspace import client, config from paperspace.cli import common from paperspace.commands import models as models_commands -@click.group("models", help="Manage models", cls=DYMGroup) +@click.group("models", help="Manage models", cls=common.ClickGroup) def models_group(): pass diff --git a/paperspace/cli/projects.py b/paperspace/cli/projects.py index 39e6ced..a33cf63 100644 --- a/paperspace/cli/projects.py +++ b/paperspace/cli/projects.py @@ -1,12 +1,11 @@ import click -from click_didyoumean import DYMGroup from paperspace import client, config from paperspace.commands import projects as projects_commands from . import common -@click.group("projects", help="Manage projects", cls=DYMGroup) +@click.group("projects", help="Manage projects", cls=common.ClickGroup) def projects_group(): pass diff --git a/paperspace/commands/deployments.py b/paperspace/commands/deployments.py index 3e7a768..7420bd2 100644 --- a/paperspace/commands/deployments.py +++ b/paperspace/commands/deployments.py @@ -19,7 +19,7 @@ def _log_message(self, response, success_msg_template, error_msg): j = response.json() handle = j["deployment"] except (ValueError, KeyError): - self.logger.log(success_msg_template) + self.logger.error(success_msg_template) else: msg = success_msg_template.format(**handle) self.logger.log(msg) @@ -28,7 +28,7 @@ def _log_message(self, response, success_msg_template, error_msg): data = response.json() self.logger.log_error_response(data) except ValueError: - self.logger.log(error_msg) + self.logger.error(error_msg) class CreateDeploymentCommand(_DeploymentCommandBase): @@ -51,7 +51,7 @@ def execute(self, filters=None): return deployments = self._get_deployments_list(response) except (ValueError, KeyError) as e: - self.logger.log("Error while parsing response data: {}".format(e)) + self.logger.error("Error while parsing response data: {}".format(e)) else: self._log_deployments_list(deployments) @@ -74,7 +74,7 @@ def _get_deployments_list(response): def _log_deployments_list(self, deployments): if not deployments: - self.logger.log("No deployments found") + self.logger.warning("No deployments found") else: table_str = self._make_deployments_list_table(deployments) if len(table_str.splitlines()) > get_terminal_lines(): diff --git a/paperspace/commands/experiments.py b/paperspace/commands/experiments.py index 7ef9dea..14cdbf5 100644 --- a/paperspace/commands/experiments.py +++ b/paperspace/commands/experiments.py @@ -20,7 +20,7 @@ def _log_create_experiment(response, success_msg_template, error_msg, logger_=lo data = response.json() logger_.log_error_response(data) except ValueError: - logger_.log(error_msg) + logger_.error(error_msg) def create_experiment(json_, api=experiments_api): @@ -63,7 +63,7 @@ def execute(self, project_handles=None): try: experiments = self._get_experiments_list(response, bool(project_handles)) except (ValueError, KeyError) as e: - self.logger.log("Error while parsing response data: {}".format(e)) + self.logger.error("Error while parsing response data: {}".format(e)) else: self._log_experiments_list(experiments) @@ -106,7 +106,7 @@ def _get_experiments_list(response, filtered=False): def _log_experiments_list(self, experiments): if not experiments: - self.logger.log("No experiments found") + self.logger.warning("No experiments found") else: table_str = self._make_experiments_list_table(experiments) if len(table_str.splitlines()) > get_terminal_lines(): @@ -175,7 +175,6 @@ def get_experiment_details(experiment_handle, api=experiments_api): experiment = response.json()["data"] details = _make_details_table(experiment) except (ValueError, KeyError) as e: - logger.log("Error parsing response data") - logger.debug(e) + logger.error("Error parsing response data") log_response(response, details, "Unknown error while retrieving details of the experiment") diff --git a/paperspace/commands/jobs.py b/paperspace/commands/jobs.py index a43cc29..5de8884 100644 --- a/paperspace/commands/jobs.py +++ b/paperspace/commands/jobs.py @@ -21,7 +21,7 @@ def _log_message(self, response, success_msg_template, error_msg): data = response.json() self.logger.log_error_response(data) except ValueError: - self.logger.log(error_msg) + self.logger.error(error_msg) class DeleteJobCommand(JobsCommandBase): @@ -53,13 +53,13 @@ def execute(self, filters=None): self.logger.log_error_response(data) return except (ValueError, KeyError) as e: - self.logger.log("Error while parsing response data: {}".format(e)) + self.logger.error("Error while parsing response data: {}".format(e)) else: self._log_jobs_list(data) def _log_jobs_list(self, data): if not data: - self.logger.log("No jobs found") + self.logger.warning("No jobs found") else: table_str = self._make_table(data) if len(table_str.splitlines()) > get_terminal_lines(): diff --git a/paperspace/commands/machines.py b/paperspace/commands/machines.py index 8c4d6b0..3cf5008 100644 --- a/paperspace/commands/machines.py +++ b/paperspace/commands/machines.py @@ -23,7 +23,7 @@ def _log_message(self, response, success_msg_template, error_msg): data = response.json() self.logger.log_error_response(data) except ValueError: - self.logger.log(error_msg) + self.logger.error(error_msg) class CheckAvailabilityCommand(_MachinesCommandBase): @@ -91,7 +91,7 @@ def execute(self, machine_id): self.logger.log_error_response(data) return except (ValueError, KeyError) as e: - self.logger.log("Error while parsing response data: {}".format(e)) + self.logger.error("Error while parsing response data: {}".format(e)) else: table = self.make_details_table(data) self.logger.log(table) @@ -149,13 +149,13 @@ def execute(self, kwargs): self.logger.log_error_response(data) return except (ValueError, KeyError) as e: - self.logger.log("Error while parsing response data: {}".format(e)) + self.logger.error("Error while parsing response data: {}".format(e)) else: self._log_machines_list(data) def _log_machines_list(self, machines): if not machines: - self.logger.log("No machines found") + self.logger.warning("No machines found") else: table_str = self._make_machines_list_table(machines) if len(table_str.splitlines()) > get_terminal_lines(): @@ -204,7 +204,7 @@ def execute(self, machine_id, billing_month): self.logger.log_error_response(data) return except (ValueError, KeyError) as e: - self.logger.log("Error while parsing response data: {}".format(e)) + self.logger.error("Error while parsing response data: {}".format(e)) else: table = self.make_details_table(data) self.logger.log(table) @@ -229,7 +229,7 @@ def execute(self, machine_id, state, interval=5): try: current_state = self._get_machine_state(machine_id) except BadResponse as e: - self.logger.log(e) + self.logger.error(e) return else: if current_state == state: diff --git a/paperspace/commands/models.py b/paperspace/commands/models.py index c154fb7..7208819 100644 --- a/paperspace/commands/models.py +++ b/paperspace/commands/models.py @@ -2,9 +2,8 @@ import terminaltables -from paperspace.utils import get_terminal_lines - from paperspace.commands import CommandBase +from paperspace.utils import get_terminal_lines class ListModelsCommand(CommandBase): @@ -20,7 +19,7 @@ def execute(self, filters): return models = self._get_objects_list(response) except (ValueError, KeyError) as e: - self.logger.log("Error while parsing response data: {}".format(e)) + self.logger.error("Error while parsing response data: {}".format(e)) else: self._log_models_list(models) @@ -39,7 +38,7 @@ def _get_objects_list(response): def _log_models_list(self, models): if not models: - self.logger.log("No models found") + self.logger.warning("No models found") else: table_str = self._make_models_list_table(models) if len(table_str.splitlines()) > get_terminal_lines(): @@ -61,4 +60,3 @@ def _make_models_list_table(models): ascii_table = terminaltables.AsciiTable(data) table_string = ascii_table.table return table_string - diff --git a/paperspace/commands/projects.py b/paperspace/commands/projects.py index a74003f..fb25300 100644 --- a/paperspace/commands/projects.py +++ b/paperspace/commands/projects.py @@ -30,13 +30,13 @@ def execute(self): self.logger.log_error_response(data) return except (ValueError, KeyError) as e: - self.logger.log("Error while parsing response data: {}".format(e)) + self.logger.error("Error while parsing response data: {}".format(e)) else: self._log_projects_list(data) def _log_projects_list(self, data): if not data.get("data"): - self.logger.log("No projects found") + self.logger.warning("No projects found") else: table_str = self._make_table(data["data"]) if len(table_str.splitlines()) > get_terminal_lines(): diff --git a/paperspace/config.py b/paperspace/config.py index 5534359..1311975 100644 --- a/paperspace/config.py +++ b/paperspace/config.py @@ -14,11 +14,25 @@ def get_api_key(config_dir_path, config_file_name): return '' +def get_help_colors_dict(use_colors, help_headers_color, help_options_color): + if not use_colors: + return {} + + d = { + "help_headers_color": help_headers_color, + "help_options_color": help_options_color, + } + return d + + _DEFAULT_CONFIG_HOST = "https://api.paperspace.io" _DEFAULT_CONFIG_LOG_HOST = "https://logs.paperspace.io" _DEFAULT_CONFIG_EXPERIMENTS_HOST = "https://services.paperspace.io/experiments/v1/" _DEFAULT_CONFIG_DIR_PATH = "~/.paperspace" _DEFAULT_CONFIG_FILE_NAME = os.path.expanduser("config.json") +_DEFAULT_HELP_HEADERS_COLOR = "yellow" +_DEFAULT_HELP_OPTIONS_COLOR = "green" +_DEFAULT_USE_CONSOLE_COLORS = True class config(object): @@ -29,3 +43,9 @@ class config(object): CONFIG_DIR_PATH = os.path.expanduser(os.environ.get("PAPERSPACE_CONFIG_PATH", _DEFAULT_CONFIG_DIR_PATH)) CONFIG_FILE_NAME = os.environ.get("PAPERSPACE_CONFIG_FILE_NAME", _DEFAULT_CONFIG_FILE_NAME) PAPERSPACE_API_KEY = os.environ.get("PAPERSPACE_API_KEY", get_api_key(CONFIG_DIR_PATH, CONFIG_FILE_NAME)) + + HELP_HEADERS_COLOR = os.environ.get("PAPERSPACE_HELP_HEADERS_COLOR", _DEFAULT_HELP_HEADERS_COLOR) + HELP_OPTIONS_COLOR = os.environ.get("PAPERSPACE_HELP_OPTIONS_COLOR", _DEFAULT_HELP_OPTIONS_COLOR) + USE_CONSOLE_COLORS = os.environ.get("PAPERSPACE_USE_CONSOLE_COLORS", + _DEFAULT_USE_CONSOLE_COLORS) in (True, "true", "1") + HELP_COLORS_DICT = get_help_colors_dict(USE_CONSOLE_COLORS, HELP_HEADERS_COLOR, HELP_OPTIONS_COLOR) diff --git a/paperspace/logger.py b/paperspace/logger.py index 47f09d2..01b3829 100644 --- a/paperspace/logger.py +++ b/paperspace/logger.py @@ -4,24 +4,39 @@ from .config import config -def log(message, **kwargs): - error = kwargs.get("error", False) - click.echo(message, err=error) +def _log(message, color=None, err=False): + message = str(message) + color = color if config.USE_CONSOLE_COLORS else None + click.secho(message, fg=color, err=err) + + +def log(message, color=None, err=False): + _log(message, color=color, err=err) + + +def error(message): + color = "red" if config.USE_CONSOLE_COLORS else None + _log(message, color=color, err=True) + + +def warning(message): + color = "yellow" if config.USE_CONSOLE_COLORS else None + _log(message, color=color) def log_error_response(data): - error = data.get("error") + error_str = data.get("error") details = data.get("details") message = data.get("message") - if not any((error, details, message)): + if not any((error_str, details, message)): raise ValueError("No error messages found") - if error: + if error_str: try: - log(error["message"], error=True) + error(error_str["message"]) except (KeyError, TypeError): - log(str(error), error=True) + error(str(error_str)) if details: if isinstance(details, dict): @@ -31,12 +46,12 @@ def log_error_response(data): for v in val: msg = "{}: {}".format(key, str(v)) - log(msg, error=True) + error(msg) else: - log(details) + error(details) if message: - log(str(message), error=True) + error(str(message)) def debug(messages): @@ -52,4 +67,4 @@ def log_response(response, success_msg, error_msg): data = response.json() log_error_response(data) except ValueError: - log(error_msg) + error(error_msg) diff --git a/setup.py b/setup.py index 9706983..5370318 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,22 @@ ], keywords='paperspace api development library', packages=find_packages(exclude=['contrib', 'docs', 'tests', 'old_tests']), - install_requires=['requests[security]', 'boto3', 'botocore', 'six', 'gradient-statsd', 'click', 'terminaltables'], + install_requires=[ + 'requests[security]', + 'boto3', + 'botocore', + 'six', + 'gradient-statsd', + 'click', + 'terminaltables', + 'click-didyoumean', + 'click-help-colors', + ], + extras_require={ + ':sys_platform == "win32"': [ + 'colorama', + ], + }, entry_points={'console_scripts': [ 'paperspace-python = paperspace.main:main', ]}, From 04295c15145a65c8f08285c53b7833c3a62e9746 Mon Sep 17 00:00:00 2001 From: kossak Date: Wed, 1 May 2019 19:12:07 +0200 Subject: [PATCH 24/42] relative path added to zip, progress bar tweak (it works!) --- paperspace/commands/experiments.py | 43 +++++++++++++++++------------- paperspace/exceptions.py | 4 +++ 2 files changed, 29 insertions(+), 18 deletions(-) diff --git a/paperspace/commands/experiments.py b/paperspace/commands/experiments.py index f50d7f0..151ea65 100644 --- a/paperspace/commands/experiments.py +++ b/paperspace/commands/experiments.py @@ -1,6 +1,7 @@ import os import pydoc import zipfile +from collections import OrderedDict import click import progressbar @@ -10,7 +11,8 @@ from paperspace import logger, constants, client, config from paperspace.commands import CommandBase -from paperspace.exceptions import PresignedUrlUnreachableException, S3UploadFailedException +from paperspace.exceptions import PresignedUrlUnreachableException, S3UploadFailedException, \ + PresignedUrlAccessDeniedException from paperspace.logger import log_response from paperspace.utils import get_terminal_lines @@ -35,21 +37,25 @@ def _log_create_experiment(self, response, success_msg_template, error_msg): class CreateExperimentCommand(ExperimentCommand): - def retrieve_file_paths(self, dirName): + def _retrieve_file_paths(self, dirName): # setup file paths variable - filePaths = [] - exclude = ['.git'] + file_paths = {} + exclude = ['.git', '.idea', '.pytest_cache'] # Read all directory, subdirectories and file lists for root, dirs, files in os.walk(dirName, topdown=True): dirs[:] = [d for d in dirs if d not in exclude] for filename in files: # Create the full filepath by using os module. - filePath = os.path.join(root, filename) - filePaths.append(filePath) + relpath = os.path.relpath(root, dirName) + if relpath == '.': + file_path = filename + else: + file_path = os.path.join(os.path.relpath(root, dirName), filename) + file_paths[file_path] = os.path.join(root, filename) # return all paths - return filePaths + return file_paths def _zip_workspace(self, workspace_path): if not workspace_path: @@ -64,7 +70,7 @@ def _zip_workspace(self, workspace_path): self.logger.log('Removing existing archive') os.remove(zip_file_path) - file_paths = self.retrieve_file_paths(workspace_path) + file_paths = self._retrieve_file_paths(workspace_path) self.logger.log('Creating zip archive: %s' % zip_file_name) zip_file = zipfile.ZipFile(zip_file_path, 'w') @@ -73,10 +79,10 @@ def _zip_workspace(self, workspace_path): with zip_file: i = 0 - for file in file_paths: + for relpath, abspath in file_paths.items(): i+=1 - self.logger.debug('Adding %s to archive' % file) - zip_file.write(file) + self.logger.debug('Adding %s to archive' % relpath) + zip_file.write(abspath, arcname=relpath) bar.update(i) bar.finish() self.logger.log('\nFinished creating archive: %s' % zip_file_name) @@ -87,8 +93,6 @@ def _create_callback(self, encoder_obj): def callback(monitor): bar.update(monitor.bytes_read) - if monitor.bytes_read == monitor.len: - bar.finish() return callback def _upload_workspace(self, input_data): @@ -110,14 +114,15 @@ def _upload_workspace(self, input_data): else: archive_path = self._zip_workspace(workspace_path) - s3_upload_data = self._get_upload_data(os.path.basename(archive_path)) + file_name = os.path.basename(archive_path) + s3_upload_data = self._get_upload_data(file_name) + bucket_name = s3_upload_data['bucket_name'] self.logger.log('Uploading zipped workspace to S3') files = {'file': (archive_path, open(archive_path, 'rb'))} - fields = s3_upload_data['fields'] + fields = OrderedDict(s3_upload_data['fields']) fields.update(files) - s3_encoder = encoder.MultipartEncoder(fields=fields) monitor = encoder.MultipartEncoderMonitor(s3_encoder, callback=self._create_callback(s3_encoder)) s3_response = requests.post(s3_upload_data['url'], data=monitor, headers={'Content-Type': monitor.content_type}) @@ -125,8 +130,8 @@ def _upload_workspace(self, input_data): raise S3UploadFailedException(s3_response) self.logger.log('\nUploading completed') - s3_workspace_url = s3_response.headers.get('Location') - return s3_workspace_url + + return 's3://{}/{}'.format(bucket_name, file_name) def execute(self, json_): workspace_url = self._upload_workspace(json_) @@ -143,6 +148,8 @@ def _get_upload_data(self, file_name): response = self.api.get("/workspace/get_presigned_url", params={'workspaceName': file_name}) if response.status_code == 404: raise PresignedUrlUnreachableException + if response.status_code == 403: + raise PresignedUrlAccessDeniedException return response.json() diff --git a/paperspace/exceptions.py b/paperspace/exceptions.py index cc43760..effabe9 100644 --- a/paperspace/exceptions.py +++ b/paperspace/exceptions.py @@ -10,5 +10,9 @@ class PresignedUrlUnreachableException(ApplicationException): pass +class PresignedUrlAccessDeniedException(ApplicationException): + pass + + class S3UploadFailedException(ApplicationException): pass From 17dea50d12e774a7bbc1f2c7ba8c8282bfde5045 Mon Sep 17 00:00:00 2001 From: kossak Date: Wed, 1 May 2019 20:26:44 +0200 Subject: [PATCH 25/42] refactored - workspace handling extracted to module --- paperspace/commands/experiments.py | 125 +++-------------------------- paperspace/exceptions.py | 4 + paperspace/workspace.py | 125 +++++++++++++++++++++++++++++ 3 files changed, 139 insertions(+), 115 deletions(-) create mode 100644 paperspace/workspace.py diff --git a/paperspace/commands/experiments.py b/paperspace/commands/experiments.py index c4e0b43..dd534f9 100644 --- a/paperspace/commands/experiments.py +++ b/paperspace/commands/experiments.py @@ -1,27 +1,21 @@ -import os import pydoc -import zipfile -from collections import OrderedDict -import click -import progressbar -import requests import terminaltables -from requests_toolbelt.multipart import encoder from paperspace import logger, constants, client, config from paperspace.commands import CommandBase -from paperspace.exceptions import PresignedUrlUnreachableException, S3UploadFailedException, \ - PresignedUrlAccessDeniedException +from paperspace.workspace import S3WorkspaceHandler from paperspace.logger import log_response from paperspace.utils import get_terminal_lines -# from clint.textui.progress import Bar as ProgressBar - experiments_api = client.API(config.CONFIG_EXPERIMENTS_HOST, headers=client.default_headers) class ExperimentCommand(CommandBase): + def __init__(self, workspace_handler=None, **kwargs): + super(ExperimentCommand, self).__init__(**kwargs) + self._workspace_handler = workspace_handler or S3WorkspaceHandler(api=self.api, logger=self.logger) + def _log_create_experiment(self, response, success_msg_template, error_msg): if response.ok: j = response.json() @@ -37,104 +31,9 @@ def _log_create_experiment(self, response, success_msg_template, error_msg): class CreateExperimentCommand(ExperimentCommand): - def _retrieve_file_paths(self, dirName): - - # setup file paths variable - file_paths = {} - exclude = ['.git', '.idea', '.pytest_cache'] - # Read all directory, subdirectories and file lists - for root, dirs, files in os.walk(dirName, topdown=True): - dirs[:] = [d for d in dirs if d not in exclude] - for filename in files: - # Create the full filepath by using os module. - relpath = os.path.relpath(root, dirName) - if relpath == '.': - file_path = filename - else: - file_path = os.path.join(os.path.relpath(root, dirName), filename) - file_paths[file_path] = os.path.join(root, filename) - - # return all paths - return file_paths - - def _zip_workspace(self, workspace_path): - if not workspace_path: - workspace_path = '.' - zip_file_name = os.path.basename(os.getcwd()) + '.zip' - else: - zip_file_name = os.path.basename(workspace_path) + '.zip' - - zip_file_path = os.path.join(workspace_path, zip_file_name) - - if os.path.exists(zip_file_path): - self.logger.log('Removing existing archive') - os.remove(zip_file_path) - - file_paths = self._retrieve_file_paths(workspace_path) - - self.logger.log('Creating zip archive: %s' % zip_file_name) - zip_file = zipfile.ZipFile(zip_file_path, 'w') - - bar = progressbar.ProgressBar(max_value=len(file_paths)) - - with zip_file: - i = 0 - for relpath, abspath in file_paths.items(): - i+=1 - self.logger.debug('Adding %s to archive' % relpath) - zip_file.write(abspath, arcname=relpath) - bar.update(i) - bar.finish() - self.logger.log('\nFinished creating archive: %s' % zip_file_name) - return zip_file_path - - def _create_callback(self, encoder_obj): - bar = progressbar.ProgressBar(max_value=encoder_obj.len) - - def callback(monitor): - bar.update(monitor.bytes_read) - return callback - - def _upload_workspace(self, input_data): - workspace_url = input_data.get('workspaceUrl') - workspace_path = input_data.get('workspacePath') - workspace_archive = input_data.get('workspaceArchive') - if (workspace_archive and workspace_path) or (workspace_archive and workspace_url) or ( - workspace_path and workspace_url): - raise click.UsageError("Use either:\n\t--workspaceUrl to point repository URL" - "\n\t--workspacePath to point on project directory" - "\n\t--workspaceArchive to point on project ZIP archive" - "\n or neither to use current directory") - - if workspace_url: - return # nothing to do - - if workspace_archive: - archive_path = os.path.abspath(workspace_archive) - else: - archive_path = self._zip_workspace(workspace_path) - - file_name = os.path.basename(archive_path) - s3_upload_data = self._get_upload_data(file_name) - bucket_name = s3_upload_data['bucket_name'] - - self.logger.log('Uploading zipped workspace to S3') - - files = {'file': (archive_path, open(archive_path, 'rb'))} - fields = OrderedDict(s3_upload_data['fields']) - fields.update(files) - s3_encoder = encoder.MultipartEncoder(fields=fields) - monitor = encoder.MultipartEncoderMonitor(s3_encoder, callback=self._create_callback(s3_encoder)) - s3_response = requests.post(s3_upload_data['url'], data=monitor, headers={'Content-Type': monitor.content_type}) - if not s3_response.ok: - raise S3UploadFailedException(s3_response) - - self.logger.log('\nUploading completed') - - return 's3://{}/{}'.format(bucket_name, file_name) def execute(self, json_): - workspace_url = self._upload_workspace(json_) + workspace_url = self._workspace_handler.upload_workspace(json_) if workspace_url: json_['workspaceUrl'] = workspace_url @@ -144,17 +43,13 @@ def execute(self, json_): "New experiment created with handle: {}", "Unknown error while creating the experiment") - def _get_upload_data(self, file_name): - response = self.api.get("/workspace/get_presigned_url", params={'workspaceName': file_name}) - if response.status_code == 404: - raise PresignedUrlUnreachableException - if response.status_code == 403: - raise PresignedUrlAccessDeniedException - return response.json() - class CreateAndStartExperimentCommand(ExperimentCommand): def execute(self, json_): + workspace_url = self._workspace_handler.upload_workspace(json_) + if workspace_url: + json_['workspaceUrl'] = workspace_url + response = self.api.post("/experiments/create_and_start/", json=json_) self._log_create_experiment(response, "New experiment created and started with handle: {}", diff --git a/paperspace/exceptions.py b/paperspace/exceptions.py index effabe9..0951204 100644 --- a/paperspace/exceptions.py +++ b/paperspace/exceptions.py @@ -14,5 +14,9 @@ class PresignedUrlAccessDeniedException(ApplicationException): pass +class PresignedUrlConnectionException(ApplicationException): + pass + + class S3UploadFailedException(ApplicationException): pass diff --git a/paperspace/workspace.py b/paperspace/workspace.py new file mode 100644 index 0000000..95ea575 --- /dev/null +++ b/paperspace/workspace.py @@ -0,0 +1,125 @@ +import logging +import os +import zipfile +from collections import OrderedDict + +import click +import progressbar +import requests +from requests_toolbelt.multipart import encoder + +from paperspace.exceptions import S3UploadFailedException, PresignedUrlUnreachableException, \ + PresignedUrlAccessDeniedException, PresignedUrlConnectionException + + +class S3WorkspaceHandler: + def __init__(self, api, logger=None): + self.api = api + self.logger = logger or logging.getLogger() + + def _retrieve_file_paths(self, dirName): + + # setup file paths variable + file_paths = {} + exclude = ['.git', '.idea', '.pytest_cache'] + # Read all directory, subdirectories and file lists + for root, dirs, files in os.walk(dirName, topdown=True): + dirs[:] = [d for d in dirs if d not in exclude] + for filename in files: + # Create the full filepath by using os module. + relpath = os.path.relpath(root, dirName) + if relpath == '.': + file_path = filename + else: + file_path = os.path.join(os.path.relpath(root, dirName), filename) + file_paths[file_path] = os.path.join(root, filename) + + # return all paths + return file_paths + + def _zip_workspace(self, workspace_path): + if not workspace_path: + workspace_path = '.' + zip_file_name = os.path.basename(os.getcwd()) + '.zip' + else: + zip_file_name = os.path.basename(workspace_path) + '.zip' + + zip_file_path = os.path.join(workspace_path, zip_file_name) + + if os.path.exists(zip_file_path): + self.logger.log('Removing existing archive') + os.remove(zip_file_path) + + file_paths = self._retrieve_file_paths(workspace_path) + + self.logger.log('Creating zip archive: %s' % zip_file_name) + zip_file = zipfile.ZipFile(zip_file_path, 'w') + + bar = progressbar.ProgressBar(max_value=len(file_paths)) + + with zip_file: + i = 0 + for relpath, abspath in file_paths.items(): + i += 1 + self.logger.debug('Adding %s to archive' % relpath) + zip_file.write(abspath, arcname=relpath) + bar.update(i) + bar.finish() + self.logger.log('\nFinished creating archive: %s' % zip_file_name) + return zip_file_path + + def _create_callback(self, encoder_obj): + bar = progressbar.ProgressBar(max_value=encoder_obj.len) + + def callback(monitor): + bar.update(monitor.bytes_read) + + return callback + + def upload_workspace(self, input_data): + workspace_url = input_data.get('workspaceUrl') + workspace_path = input_data.get('workspacePath') + workspace_archive = input_data.get('workspaceArchive') + if (workspace_archive and workspace_path) or (workspace_archive and workspace_url) or ( + workspace_path and workspace_url): + raise click.UsageError("Use either:\n\t--workspaceUrl to point repository URL" + "\n\t--workspacePath to point on project directory" + "\n\t--workspaceArchive to point on project ZIP archive" + "\n or neither to use current directory") + + if workspace_url: + return # nothing to do + + if workspace_archive: + archive_path = os.path.abspath(workspace_archive) + else: + archive_path = self._zip_workspace(workspace_path) + + file_name = os.path.basename(archive_path) + s3_upload_data = self._get_upload_data(file_name) + bucket_name = s3_upload_data['bucket_name'] + + self.logger.log('Uploading zipped workspace to S3') + + files = {'file': (archive_path, open(archive_path, 'rb'))} + fields = OrderedDict(s3_upload_data['fields']) + fields.update(files) + s3_encoder = encoder.MultipartEncoder(fields=fields) + monitor = encoder.MultipartEncoderMonitor(s3_encoder, callback=self._create_callback(s3_encoder)) + s3_response = requests.post(s3_upload_data['url'], data=monitor, headers={'Content-Type': monitor.content_type}) + if not s3_response.ok: + raise S3UploadFailedException(s3_response) + + self.logger.log('\nUploading completed') + + return 's3://{}/{}'.format(bucket_name, file_name) + + def _get_upload_data(self, file_name): + response = self.api.get("/workspace/get_presigned_url", params={'workspaceName': file_name}) + if response.status_code == 404: + raise PresignedUrlUnreachableException + if response.status_code == 403: + raise PresignedUrlAccessDeniedException + if not response.ok: + raise PresignedUrlConnectionException(response.reason) + return response.json() From 0a0a6ee7bf6bd21e2f233386097a1d883b454043 Mon Sep 17 00:00:00 2001 From: kossak Date: Wed, 1 May 2019 23:00:41 +0200 Subject: [PATCH 26/42] creating job + bug fixes --- paperspace/cli/cli.py | 9 +++++++-- paperspace/cli/jobs.py | 22 ++++++++++++++++++++++ paperspace/commands/experiments.py | 2 +- paperspace/commands/jobs.py | 10 +++++++++- paperspace/workspace.py | 12 ++++++------ 5 files changed, 45 insertions(+), 10 deletions(-) diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 3c23437..0210a40 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -58,8 +58,13 @@ def common_experiments_create_options(f): "--workspace", "workspace", required=False, - help="Path to workspace directory or archive", - default="." + help="Path to workspace directory", + ), + click.option( + "--workspaceArchive", + "workspaceArchive", + required=False, + help="Path to workspace .zip archive", ), click.option( "--workspaceUrl", diff --git a/paperspace/cli/jobs.py b/paperspace/cli/jobs.py index c7c60dd..c687c6b 100644 --- a/paperspace/cli/jobs.py +++ b/paperspace/cli/jobs.py @@ -2,6 +2,7 @@ from paperspace import client, config from paperspace.cli import common +from paperspace.cli.common import del_if_value_is_none from paperspace.commands import jobs as jobs_commands @@ -63,8 +64,29 @@ def list_jobs(api_key, **filters): @jobs_group.command("create", help="Create job") +@click.option("--name", "name", help="Job name", required=True) +@click.option('--machineType', 'machineType', help="Virtual machine type") +@click.option('--container', 'container', help="Docker container") +@click.option('--command', 'command', help="Job command/entrypoint") +@click.option('--ports', 'ports', help="Mapped ports") +@click.option('--isPublic', 'isPublic', help="Flag: is job public") +@click.option("--workspace", "workspace", required=False, help="Path to workspace directory") +@click.option("--workspaceArchive", "workspaceArchive", required=False, help="Path to workspace archive") +@click.option("--workspaceUrl", "workspaceUrl", required=False, help="Project git repository url") +@click.option("--workingDirectory", "workingDirectory", help="Working directory for the experiment", ) +@click.option('--experimentId', 'experimentId', help="Experiment Id") +# @click.option('--envVars', 'envVars', help="Environmental variables ") # TODO +@click.option('--useDockerfile', 'useDockerfile', help="Flag: using Dockerfile") +@click.option('--isPreemptible', 'isPreemptible', help="Flag: isPreemptible") +@click.option('--project', 'project', help="Project name") +@click.option('--projectHandle', '--projectId', 'projectId', help="Project handle", required=True) +@click.option('--startedByUserId', 'startedByUserId', help="User ID") +@click.option('--relDockerfilePath', 'relDockerfilePath', help="Relative path to Dockerfile") +@click.option('--registryUsername', 'registryUsername', help="Docker registry username") +@click.option('--registryPassword', 'registryPassword', help="Docker registry password") @common.api_key_option def create_job(api_key, **kwargs): + del_if_value_is_none(kwargs) jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) command = jobs_commands.CreateJobCommand(api=jobs_api) command.execute(kwargs) diff --git a/paperspace/commands/experiments.py b/paperspace/commands/experiments.py index dd534f9..bd50fb3 100644 --- a/paperspace/commands/experiments.py +++ b/paperspace/commands/experiments.py @@ -14,7 +14,7 @@ class ExperimentCommand(CommandBase): def __init__(self, workspace_handler=None, **kwargs): super(ExperimentCommand, self).__init__(**kwargs) - self._workspace_handler = workspace_handler or S3WorkspaceHandler(api=self.api, logger=self.logger) + self._workspace_handler = workspace_handler or S3WorkspaceHandler(experiments_api=self.api, logger=self.logger) def _log_create_experiment(self, response, success_msg_template, error_msg): if response.ok: diff --git a/paperspace/commands/jobs.py b/paperspace/commands/jobs.py index f591ac5..b0f0dd2 100644 --- a/paperspace/commands/jobs.py +++ b/paperspace/commands/jobs.py @@ -2,6 +2,7 @@ import terminaltables +from paperspace import config, client from paperspace.commands import CommandBase from paperspace.utils import get_terminal_lines from paperspace.workspace import S3WorkspaceHandler @@ -88,10 +89,17 @@ def _make_table(jobs): class CreateJobCommand(JobsCommandBase): def __init__(self, workspace_handler=None, **kwargs): super(CreateJobCommand, self).__init__(**kwargs) - self.workspace_handler = workspace_handler or S3WorkspaceHandler(api=self.api, logger=self.logger) + experiments_api = client.API(config.CONFIG_EXPERIMENTS_HOST, api_key=kwargs.get('api_key')) + self._workspace_handler = workspace_handler or S3WorkspaceHandler(experiments_api=experiments_api, + logger=self.logger) def execute(self, json_): url = "/jobs/createJob/" + + workspace_url = self._workspace_handler.upload_workspace(json_) + if workspace_url: + json_['workspaceFileName'] = workspace_url + response = self.api.post(url, json_) self._log_message(response, "Job created", diff --git a/paperspace/workspace.py b/paperspace/workspace.py index 95ea575..d4ee6c4 100644 --- a/paperspace/workspace.py +++ b/paperspace/workspace.py @@ -13,8 +13,8 @@ class S3WorkspaceHandler: - def __init__(self, api, logger=None): - self.api = api + def __init__(self, experiments_api, logger=None): + self.experiments_api = experiments_api self.logger = logger or logging.getLogger() def _retrieve_file_paths(self, dirName): @@ -78,13 +78,13 @@ def callback(monitor): def upload_workspace(self, input_data): workspace_url = input_data.get('workspaceUrl') - workspace_path = input_data.get('workspacePath') + workspace_path = input_data.get('workspace') workspace_archive = input_data.get('workspaceArchive') if (workspace_archive and workspace_path) or (workspace_archive and workspace_url) or ( workspace_path and workspace_url): raise click.UsageError("Use either:\n\t--workspaceUrl to point repository URL" - "\n\t--workspacePath to point on project directory" - "\n\t--workspaceArchive to point on project ZIP archive" + "\n\t--workspace to point on project directory" + "\n\t--workspaceArchive to point on project .zip archive" "\n or neither to use current directory") if workspace_url: @@ -115,7 +115,7 @@ def upload_workspace(self, input_data): return 's3://{}/{}'.format(bucket_name, file_name) def _get_upload_data(self, file_name): - response = self.api.get("/workspace/get_presigned_url", params={'workspaceName': file_name}) + response = self.experiments_api.get("/workspace/get_presigned_url", params={'workspaceName': file_name}) if response.status_code == 404: raise PresignedUrlUnreachableException if response.status_code == 403: From ce423764bf9b4a898b5edac07ef6663c1d7632ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Mudlaff?= Date: Sun, 5 May 2019 16:14:48 +0200 Subject: [PATCH 27/42] Feature PS-9868: Configure tox to run tests on different environments. Fix some issue with code on different python versions. --- Pipfile | 1 + Pipfile.lock | 82 +++++++++++++++++++++++++---------- paperspace/commands/logs.py | 3 +- tests/example_responses.py | 48 ++++++++++++++++++++ tests/functional/test_logs.py | 44 +++++++++++++++++++ tox.ini | 37 ++++++++++++++++ 6 files changed, 191 insertions(+), 24 deletions(-) create mode 100644 tests/functional/test_logs.py create mode 100644 tox.ini diff --git a/Pipfile b/Pipfile index cbdef86..cd68189 100644 --- a/Pipfile +++ b/Pipfile @@ -14,6 +14,7 @@ gradient-statsd = "*" click = "*" terminaltables = "*" colorclass = "*" +tox = "*" [dev-packages] twine = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 6956738..7821c32 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "621baffc562273b5632852b94190868a39c486b1bee0c3e9b4d791d91732902f" + "sha256": "5234ad016cbd7c13609cb099ae2e54d7729d205586797f7a5c970f2db7052b08" }, "pipfile-spec": 6, "requires": {}, @@ -23,19 +23,19 @@ }, "boto3": { "hashes": [ - "sha256:882cc4869b47b51dae4b4a900769e72171ff00e0b6bca644b2d7a7ad7378f324", - "sha256:cd503a7e7a04f1c14d2801f9727159dfa88c393b4004e98940fa4aa205d920c8" + "sha256:aee5d24bec598b2dbdd8585865646ea4dce6f8ef6059417589de7284e80433a2", + "sha256:b2327f4dc2ca0e45da59b2f4a0085a3c356a0423ba0fa4f3fad4f7919c6be2f7" ], "index": "pypi", - "version": "==1.9.137" + "version": "==1.9.142" }, "botocore": { "hashes": [ - "sha256:0d95794f6b1239c75e2c5f966221bcd4b68020fddb5676f757531eedbb612ed8", - "sha256:3213cf48cf2ceee10fc3b93221f2cd1c38521cca7584f547d5c086213cc60f35" + "sha256:60c40f8c51d308046c737c3f74c4dcf1681e884e6dc87fa2727db2d44f8b70f7", + "sha256:78b9413286c8fbcf094911584f63a81356b4539befec7be76887c6dcd66ace83" ], "index": "pypi", - "version": "==1.12.137" + "version": "==1.12.142" }, "certifi": { "hashes": [ @@ -154,6 +154,13 @@ "editable": true, "path": "." }, + "filelock": { + "hashes": [ + "sha256:b8d5ca5ca1c815e1574aee746650ea7301de63d87935b3463d26368b76e31633", + "sha256:d610c1bb404daf85976d7a82eb2ada120f04671007266b708606565dd03b5be6" + ], + "version": "==3.0.10" + }, "gradient-statsd": { "hashes": [ "sha256:12965d471cc3e203464229c44839c5a8f67a665ecc4f00e807b88351eb30a565", @@ -177,6 +184,20 @@ ], "version": "==0.9.4" }, + "pluggy": { + "hashes": [ + "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", + "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746" + ], + "version": "==0.9.0" + }, + "py": { + "hashes": [ + "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", + "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" + ], + "version": "==1.8.0" + }, "pycparser": { "hashes": [ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" @@ -232,13 +253,35 @@ "index": "pypi", "version": "==3.1.0" }, + "toml": { + "hashes": [ + "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", + "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e" + ], + "version": "==0.10.0" + }, + "tox": { + "hashes": [ + "sha256:1b166b93d2ce66bb7b253ba944d2be89e0c9d432d49eeb9da2988b4902a4684e", + "sha256:665cbdd99f5c196dd80d1d8db8c8cf5d48b1ae1f778bccd1bdf14d5aaf4ca0fc" + ], + "index": "pypi", + "version": "==3.9.0" + }, "urllib3": { "hashes": [ - "sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0", - "sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3" + "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", + "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" ], "markers": "python_version >= '3.4'", - "version": "==1.24.2" + "version": "==1.24.3" + }, + "virtualenv": { + "hashes": [ + "sha256:15ee248d13e4001a691d9583948ad3947bcb8a289775102e4c4aa98a8b7a6d73", + "sha256:bfc98bb9b42a3029ee41b96dc00a34c2f254cbf7716bec824477b2c82741a5c4" + ], + "version": "==16.5.0" } }, "develop": { @@ -332,11 +375,11 @@ }, "mock": { "hashes": [ - "sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1", - "sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba" + "sha256:21a2c07af3bbc4a77f9d14ac18fcc1782e8e7ea363df718740cdeaf61995b5e7", + "sha256:7868db2825a1563578869d4a011a036503a2f1d60f9ff9dd1e3205cd6e25fcec" ], "index": "pypi", - "version": "==2.0.0" + "version": "==3.0.4" }, "more-itertools": { "hashes": [ @@ -346,13 +389,6 @@ "markers": "python_version > '2.7'", "version": "==7.0.0" }, - "pbr": { - "hashes": [ - "sha256:6901995b9b686cb90cceba67a0f6d4d14ae003cd59bc12beb61549bdfbe3bc89", - "sha256:d950c64aeea5456bbd147468382a5bb77fe692c13c9f00f0219814ce5b642755" - ], - "version": "==5.2.0" - }, "pkginfo": { "hashes": [ "sha256:7424f2c8511c186cd5424bbf31045b77435b37a8d604990b79d4e70d741148bb", @@ -446,11 +482,11 @@ }, "urllib3": { "hashes": [ - "sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0", - "sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3" + "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", + "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" ], "markers": "python_version >= '3.4'", - "version": "==1.24.2" + "version": "==1.24.3" }, "webencodings": { "hashes": [ diff --git a/paperspace/commands/logs.py b/paperspace/commands/logs.py index 35f5d86..d291f99 100644 --- a/paperspace/commands/logs.py +++ b/paperspace/commands/logs.py @@ -14,8 +14,9 @@ class ListLogsCommand(CommandBase): is_logs_complete = False def execute(self, job_id): + table_title = "Job %s logs" % job_id table_data = [("LINE", "MESSAGE")] - table = terminaltables.AsciiTable(table_data, title=f"Job {job_id} logs") + table = terminaltables.AsciiTable(table_data, title=table_title) while not self.is_logs_complete: response = self._get_logs(job_id) diff --git a/tests/example_responses.py b/tests/example_responses.py index e60e3e6..f3199e2 100644 --- a/tests/example_responses.py +++ b/tests/example_responses.py @@ -2247,3 +2247,51 @@ "experimentId": "es47og38wzhnuo" } ] + +LIST_OF_LOGS_FOR_JOB = [ + { + "line": 1, + "timestamp": "2019-04-03T15:56:35.457Z", + "message": "Traceback (most recent call last):" + }, { + "line": 2, + "timestamp": "2019-04-03T15:56:35.458Z", + "message": " File \"generate_figures.py\", line 15, in \u003cmodule\u003e" + }, { + "line": 3, + "timestamp": "2019-04-03T15:56:35.458Z", + "message": " import dnnlib.tflib as tflib" + }, { + "line": 4, + "timestamp": "2019-04-03T15:56:35.458Z", + "message": " File \"/paperspace/dnnlib/tflib/__init__.py\", line 8, in \u003cmodule\u003e" + }, { + "line": 5, + "timestamp": "2019-04-03T15:56:35.458Z", + "message": " from . import autosummary" + }, { + "line": 6, + "timestamp": "2019-04-03T15:56:35.458Z", + "message": " File \"/paperspace/dnnlib/tflib/autosummary.py\", line 31, in \u003cmodule\u003e" + }, { + "line": 7, + "timestamp": "2019-04-03T15:56:35.458Z", + "message": " from . import tfutil" + }, { + "line": 8, + "timestamp": "2019-04-03T15:56:35.458Z", + "message": " File \"/paperspace/dnnlib/tflib/tfutil.py\", line 34, in \u003cmodule\u003e" + }, { + "line": 9, + "timestamp": "2019-04-03T15:56:35.458Z", + "message": " def shape_to_list(shape: Iterable[tf.Dimension]) -\u003e List[Union[int, None]]:" + }, { + "line": 10, + "timestamp": "2019-04-03T15:56:35.458Z", + "message": "AttributeError: module 'tensorflow' has no attribute 'Dimension'" + }, { + "line": 11, + "timestamp": "2019-04-03T15:56:46.168Z", + "message": "PSEOF" + } +] diff --git a/tests/functional/test_logs.py b/tests/functional/test_logs.py new file mode 100644 index 0000000..1d93336 --- /dev/null +++ b/tests/functional/test_logs.py @@ -0,0 +1,44 @@ +import mock +from click.testing import CliRunner + +from paperspace.cli import cli +from paperspace.client import default_headers +from tests import MockResponse, example_responses + + +class TestListLogs(object): + URL = "https://logs.paperspace.io" + EXPECTED_HEADERS = default_headers.copy() + EXPECTED_RESPONSE_JSON = example_responses.LIST_OF_LOGS_FOR_JOB + BASIC_COMMAND = ["logs", "list"] + EXPECTED_STDOUT = """+Job jztdeungdkzjv logs------------------------------------------------------------------+ +| LINE | MESSAGE | ++------+---------------------------------------------------------------------------------+ +| 1 | Traceback (most recent call last): | +| 2 | File "generate_figures.py", line 15, in | +| 3 | import dnnlib.tflib as tflib | +| 4 | File "/paperspace/dnnlib/tflib/__init__.py", line 8, in | +| 5 | from . import autosummary | +| 6 | File "/paperspace/dnnlib/tflib/autosummary.py", line 31, in | +| 7 | from . import tfutil | +| 8 | File "/paperspace/dnnlib/tflib/tfutil.py", line 34, in | +| 9 | def shape_to_list(shape: Iterable[tf.Dimension]) -> List[Union[int, None]]: | +| 10 | AttributeError: module 'tensorflow' has no attribute 'Dimension' | +| 11 | PSEOF | ++------+---------------------------------------------------------------------------------+ +""" + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_get_request_and_print_table_with_logs(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params=None) + + assert result.output == self.EXPECTED_STDOUT + assert result.exit_code == 0 diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..df0afba --- /dev/null +++ b/tox.ini @@ -0,0 +1,37 @@ +[run] +omit = tests/* + +[tool:pytest] +addopts = --cov=paperspace + +[tox] +envlist = + check + {py27,py34,py35,py36,py37}-{pt44}-{ptc26} +skip_missing_interpreters = True + +[testenv] +changedir = tests + +extras = testing +setenv = + PYTHONUNBUFFERED=yes +passenv = + * +deps = + mock + pt44: pytest==4.4.1 + + ptc26: pytest-cov==2.6.1 +pip_pre = true + +commands = + pytest --cov=paperspace --cov-append + +[testenv:check] +deps = + flake8 +skip_install = true +usedevelop = false +commands = + flake8 paperspace tests setup.py From 5477da05bb57a87f2cc26b8e0491f313e8558852 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Mudlaff?= Date: Sun, 5 May 2019 18:04:31 +0200 Subject: [PATCH 28/42] Feature PS-9868: Update logs tests. Prepare Makefile to allow run tests in tox for every available python version. --- Makefile | 5 +++ tests/example_responses.py | 12 +++---- tests/functional/test_logs.py | 62 ++++++++++++++++++++++++++++++++--- 3 files changed, 68 insertions(+), 11 deletions(-) create mode 100644 Makefile diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..0d86641 --- /dev/null +++ b/Makefile @@ -0,0 +1,5 @@ +clean-tox: + rm -rf .tox paperspace.egg-info + +run-tests: clean-tox + tox diff --git a/tests/example_responses.py b/tests/example_responses.py index f888e6f..962ab2b 100644 --- a/tests/example_responses.py +++ b/tests/example_responses.py @@ -2908,7 +2908,7 @@ }, { "line": 2, "timestamp": "2019-04-03T15:56:35.458Z", - "message": " File \"generate_figures.py\", line 15, in \u003cmodule\u003e" + "message": " File \"generate_figures.py\", line 15, in " }, { "line": 3, "timestamp": "2019-04-03T15:56:35.458Z", @@ -2916,7 +2916,7 @@ }, { "line": 4, "timestamp": "2019-04-03T15:56:35.458Z", - "message": " File \"/paperspace/dnnlib/tflib/__init__.py\", line 8, in \u003cmodule\u003e" + "message": " File \"/paperspace/dnnlib/tflib/__init__.py\", line 8, in " }, { "line": 5, "timestamp": "2019-04-03T15:56:35.458Z", @@ -2924,7 +2924,7 @@ }, { "line": 6, "timestamp": "2019-04-03T15:56:35.458Z", - "message": " File \"/paperspace/dnnlib/tflib/autosummary.py\", line 31, in \u003cmodule\u003e" + "message": " File \"/paperspace/dnnlib/tflib/autosummary.py\", line 31, in " }, { "line": 7, "timestamp": "2019-04-03T15:56:35.458Z", @@ -2932,15 +2932,15 @@ }, { "line": 8, "timestamp": "2019-04-03T15:56:35.458Z", - "message": " File \"/paperspace/dnnlib/tflib/tfutil.py\", line 34, in \u003cmodule\u003e" + "message": " File \"/paperspace/dnnlib/tflib/tfutil.py\", line 34, in " }, { "line": 9, "timestamp": "2019-04-03T15:56:35.458Z", - "message": " def shape_to_list(shape: Iterable[tf.Dimension]) -\u003e List[Union[int, None]]:" + "message": " def shape_to_list(shape: Iterable[tf.Dimension]) -> List[Union[int, None]]:" }, { "line": 10, "timestamp": "2019-04-03T15:56:35.458Z", - "message": "AttributeError: module 'tensorflow' has no attribute 'Dimension'" + "message": "AttributeError: module \'tensorflow\' has no attribute \'Dimension\'" }, { "line": 11, "timestamp": "2019-04-03T15:56:46.168Z", diff --git a/tests/functional/test_logs.py b/tests/functional/test_logs.py index 1d93336..7e18934 100644 --- a/tests/functional/test_logs.py +++ b/tests/functional/test_logs.py @@ -7,11 +7,23 @@ class TestListLogs(object): - URL = "https://logs.paperspace.io" + URL = "https://logs.paperspace.io/jobs/logs?jobId=some_job_id&line=0" EXPECTED_HEADERS = default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY = default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" + + RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} EXPECTED_RESPONSE_JSON = example_responses.LIST_OF_LOGS_FOR_JOB - BASIC_COMMAND = ["logs", "list"] - EXPECTED_STDOUT = """+Job jztdeungdkzjv logs------------------------------------------------------------------+ + BASIC_COMMAND_WITHOUT_PARAMETERS = ["logs", "list"] + BASIC_COMMAND = ["logs", "list", "--jobId", "some_job_id", "--apiKey", "some_key"] + + EXPECTED_STDOUT_WITHOUT_PARAMETERS = """Usage: cli logs list [OPTIONS] +Try "cli logs list --help" for help. + +Error: Missing option "--jobId". +""" + + EXPECTED_STDOUT = """+Job some_job_id logs--------------------------------------------------------------------+ | LINE | MESSAGE | +------+---------------------------------------------------------------------------------+ | 1 | Traceback (most recent call last): | @@ -28,17 +40,57 @@ class TestListLogs(object): +------+---------------------------------------------------------------------------------+ """ + EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Invalid API token\n" + @mock.patch("paperspace.cli.cli.client.requests.get") - def test_should_send_valid_get_request_and_print_table_with_logs(self, get_patched): + def test_command_should_not_send_request_without_required_parameters(self, get_patched): + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITHOUT_PARAMETERS) + print(result) + + get_patched.assert_not_called() + assert result.exit_code == 2 + assert result.output == self.EXPECTED_STDOUT_WITHOUT_PARAMETERS + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_get_request_and_print_available_logs(self, get_patched): get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) cli_runner = CliRunner() result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) get_patched.assert_called_with(self.URL, - headers=self.EXPECTED_HEADERS, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=None, params=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_get_request_when_log_list_was_used_with_wrong_api_key(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=None, + params=None) + assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, get_patched): + get_patched.return_value = MockResponse(status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=None, + params=None) + assert result.output == "Error while parsing response data: No JSON\n" + assert result.exit_code == 0 From ba5ba73cad757d01e71d851ce4853874de3af024 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Mudlaff?= Date: Sun, 5 May 2019 18:42:52 +0200 Subject: [PATCH 29/42] Feature PS-9868: Change name of makefile step that clean tests and build folders --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 0d86641..05e1dcf 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -clean-tox: +clean-tests: rm -rf .tox paperspace.egg-info run-tests: clean-tox From deb58e81d87becfb415620ad62e9487feff1236a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Mudlaff?= Date: Mon, 6 May 2019 11:10:08 +0200 Subject: [PATCH 30/42] Feature PS-9868: Remove colorclass requirements from CLI because we can use click style function --- Makefile | 2 +- Pipfile | 1 - Pipfile.lock | 3 +-- paperspace/commands/logs.py | 4 ++-- setup.py | 1 - 5 files changed, 4 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index 05e1dcf..2db1459 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ clean-tests: rm -rf .tox paperspace.egg-info -run-tests: clean-tox +run-tests: clean-tests tox diff --git a/Pipfile b/Pipfile index 855aa6d..a22b444 100644 --- a/Pipfile +++ b/Pipfile @@ -15,7 +15,6 @@ click = "*" terminaltables = "*" click-didyoumean = "*" click-help-colors = "*" -colorclass = "*" tox = "*" [dev-packages] diff --git a/Pipfile.lock b/Pipfile.lock index e88a93c..bbcca79 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "a33e49d96d0278ced7462e52e9867e7f7d3d5ffc534c20e6e1821a42cb2b67ae" + "sha256": "9a2e80628f652d225a1a1ced4d7eb2079194196e4727a0c4b1911ae5934d5de0" }, "pipfile-spec": 6, "requires": {}, @@ -111,7 +111,6 @@ "hashes": [ "sha256:b05c2a348dfc1aff2d502527d78a5b7b7e2f85da94a96c5081210d8e9ee8e18b" ], - "index": "pypi", "version": "==2.2.0" }, "cryptography": { diff --git a/paperspace/commands/logs.py b/paperspace/commands/logs.py index d291f99..046999c 100644 --- a/paperspace/commands/logs.py +++ b/paperspace/commands/logs.py @@ -1,7 +1,7 @@ import pydoc import terminaltables -from colorclass import Color +from click import style from paperspace.commands import CommandBase from paperspace.utils import get_terminal_lines @@ -55,6 +55,6 @@ def _make_table(self, logs, table, table_data): self.last_line_number = logs[-1].get("line") for log in logs: - table_data.append((Color.colorize("red", log.get("line")), log.get("message"))) + table_data.append((style(fg="red", text=str(log.get("line"))), log.get("message"))) return table.table diff --git a/setup.py b/setup.py index f248ce9..5370318 100644 --- a/setup.py +++ b/setup.py @@ -50,7 +50,6 @@ 'terminaltables', 'click-didyoumean', 'click-help-colors', - 'colorclass', ], extras_require={ ':sys_platform == "win32"': [ From 3932706233bbdd48f0d81caf01debbdf48287920 Mon Sep 17 00:00:00 2001 From: kossak Date: Mon, 6 May 2019 14:21:24 +0200 Subject: [PATCH 31/42] tmp --- paperspace/workspace.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/paperspace/workspace.py b/paperspace/workspace.py index d4ee6c4..fc9956a 100644 --- a/paperspace/workspace.py +++ b/paperspace/workspace.py @@ -96,7 +96,8 @@ def upload_workspace(self, input_data): archive_path = self._zip_workspace(workspace_path) file_name = os.path.basename(archive_path) - s3_upload_data = self._get_upload_data(file_name) + project_handle = input_data['projectHandle'] + s3_upload_data = self._get_upload_data(file_name, project_handle) bucket_name = s3_upload_data['bucket_name'] self.logger.log('Uploading zipped workspace to S3') @@ -114,8 +115,9 @@ def upload_workspace(self, input_data): return 's3://{}/{}'.format(bucket_name, file_name) - def _get_upload_data(self, file_name): - response = self.experiments_api.get("/workspace/get_presigned_url", params={'workspaceName': file_name}) + def _get_upload_data(self, file_name, project_handle): + response = self.experiments_api.get("/workspace/get_presigned_url", + params={'workspaceName': file_name, 'projectHandle': project_handle}) if response.status_code == 404: raise PresignedUrlUnreachableException if response.status_code == 403: From b96614ec1272336778c264c68f4adaa53b278363 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Mudlaff?= Date: Mon, 6 May 2019 14:30:17 +0200 Subject: [PATCH 32/42] Feature PS-9868: Add Job logs to job group in cli --- paperspace/cli/cli.py | 2 - paperspace/cli/jobs.py | 13 +++++ paperspace/cli/logs.py | 23 --------- paperspace/commands/jobs.py | 54 ++++++++++++++++++++ paperspace/commands/logs.py | 60 ---------------------- tests/functional/test_jobs.py | 90 ++++++++++++++++++++++++++++++++ tests/functional/test_logs.py | 96 ----------------------------------- tox.ini | 2 +- 8 files changed, 158 insertions(+), 182 deletions(-) delete mode 100644 paperspace/cli/logs.py delete mode 100644 paperspace/commands/logs.py delete mode 100644 tests/functional/test_logs.py diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 0f9bdd6..f05639b 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -7,7 +7,6 @@ from paperspace.cli import common from paperspace.cli.common import api_key_option, del_if_value_is_none from paperspace.cli.jobs import jobs_group -from paperspace.cli.logs import logs_group from paperspace.cli.models import models_group from paperspace.cli.projects import projects_group from paperspace.cli.cli_types import ChoiceType, json_string @@ -1068,7 +1067,6 @@ def version(): cli.add_command(jobs_group) cli.add_command(projects_group) cli.add_command(models_group) -cli.add_command(logs_group) if __name__ == '__main__': diff --git a/paperspace/cli/jobs.py b/paperspace/cli/jobs.py index 332e958..7f98590 100644 --- a/paperspace/cli/jobs.py +++ b/paperspace/cli/jobs.py @@ -60,3 +60,16 @@ def list_jobs(api_key, **filters): jobs_api = client.API(config.CONFIG_HOST, api_key=api_key) command = jobs_commands.ListJobsCommand(api=jobs_api) command.execute(filters) + + +@jobs_group.command("log", help="List job logs") +@click.option( + "--jobId", + "job_id", + required=True +) +@common.api_key_option +def list_logs(job_id, api_key=None): + logs_api = client.API(config.CONFIG_LOG_HOST, api_key=api_key) + command = jobs_commands.JobLogsCommand(api=logs_api) + command.execute(job_id) diff --git a/paperspace/cli/logs.py b/paperspace/cli/logs.py deleted file mode 100644 index bf6b913..0000000 --- a/paperspace/cli/logs.py +++ /dev/null @@ -1,23 +0,0 @@ -import click - -from paperspace import client, config -from paperspace.cli import common -from paperspace.commands import logs as logs_commands - - -@click.group("logs", help="Manage gradient logs") -def logs_group(): - pass - - -@logs_group.command("list", help="List job logs") -@click.option( - "--jobId", - "job_id", - required=True -) -@common.api_key_option -def list_logs(job_id, api_key=None): - logs_api = client.API(config.CONFIG_LOG_HOST, api_key=api_key) - command = logs_commands.ListLogsCommand(api=logs_api) - command.execute(job_id) diff --git a/paperspace/commands/jobs.py b/paperspace/commands/jobs.py index 5de8884..39ed696 100644 --- a/paperspace/commands/jobs.py +++ b/paperspace/commands/jobs.py @@ -1,6 +1,7 @@ import pydoc import terminaltables +from click import style from paperspace.commands import CommandBase from paperspace.utils import get_terminal_lines @@ -82,3 +83,56 @@ def _make_table(jobs): ascii_table = terminaltables.AsciiTable(data) table_string = ascii_table.table return table_string + + +class JobLogsCommand(CommandBase): + last_line_number = 0 + base_url = "/jobs/logs?jobId={}&line={}" + + is_logs_complete = False + + def execute(self, job_id): + table_title = "Job %s logs" % job_id + table_data = [("LINE", "MESSAGE")] + table = terminaltables.AsciiTable(table_data, title=table_title) + + while not self.is_logs_complete: + response = self._get_logs(job_id) + + try: + data = response.json() + if not response.ok: + self.logger.log_error_response(data) + return + except (ValueError, KeyError) as e: + if response.status_code == 204: + continue + self.logger.log("Error while parsing response data: {}".format(e)) + return + else: + self._log_logs_list(data, table, table_data) + + def _get_logs(self, job_id): + url = self.base_url.format(job_id, self.last_line_number) + return self.api.get(url) + + def _log_logs_list(self, data, table, table_data): + if not data: + self.logger.log("No Logs found") + else: + table_str = self._make_table(data, table, table_data) + if len(table_str.splitlines()) > get_terminal_lines(): + pydoc.pager(table_str) + else: + self.logger.log(table_str) + + def _make_table(self, logs, table, table_data): + if logs[-1].get("message") == "PSEOF": + self.is_logs_complete = True + else: + self.last_line_number = logs[-1].get("line") + + for log in logs: + table_data.append((style(fg="red", text=str(log.get("line"))), log.get("message"))) + + return table.table diff --git a/paperspace/commands/logs.py b/paperspace/commands/logs.py deleted file mode 100644 index 046999c..0000000 --- a/paperspace/commands/logs.py +++ /dev/null @@ -1,60 +0,0 @@ -import pydoc - -import terminaltables -from click import style - -from paperspace.commands import CommandBase -from paperspace.utils import get_terminal_lines - - -class ListLogsCommand(CommandBase): - last_line_number = 0 - base_url = "/jobs/logs?jobId={}&line={}" - - is_logs_complete = False - - def execute(self, job_id): - table_title = "Job %s logs" % job_id - table_data = [("LINE", "MESSAGE")] - table = terminaltables.AsciiTable(table_data, title=table_title) - - while not self.is_logs_complete: - response = self._get_logs(job_id) - - try: - data = response.json() - if not response.ok: - self.logger.log_error_response(data) - return - except (ValueError, KeyError) as e: - if response.status_code == 204: - continue - self.logger.log("Error while parsing response data: {}".format(e)) - return - else: - self._log_logs_list(data, table, table_data) - - def _get_logs(self, job_id): - url = self.base_url.format(job_id, self.last_line_number) - return self.api.get(url) - - def _log_logs_list(self, data, table, table_data): - if not data: - self.logger.log("No Logs found") - else: - table_str = self._make_table(data, table, table_data) - if len(table_str.splitlines()) > get_terminal_lines(): - pydoc.pager(table_str) - else: - self.logger.log(table_str) - - def _make_table(self, logs, table, table_data): - if logs[-1].get("message") == "PSEOF": - self.is_logs_complete = True - else: - self.last_line_number = logs[-1].get("line") - - for log in logs: - table_data.append((style(fg="red", text=str(log.get("line"))), log.get("message"))) - - return table.table diff --git a/tests/functional/test_jobs.py b/tests/functional/test_jobs.py index 699ca9e..4e7c9e0 100644 --- a/tests/functional/test_jobs.py +++ b/tests/functional/test_jobs.py @@ -167,3 +167,93 @@ def test_should_print_proper_message_when_jobs_list_was_used_with_mutually_exclu params=None) assert result.output == self.EXPECTED_STDOUT_WHEN_MUTUALLY_EXCLUSIVE_FILTERS assert result.exit_code == 0 + + +class TestJobLogs(object): + URL = "https://logs.paperspace.io/jobs/logs?jobId=some_job_id&line=0" + EXPECTED_HEADERS = default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY = default_headers.copy() + EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" + + RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} + EXPECTED_RESPONSE_JSON = example_responses.LIST_OF_LOGS_FOR_JOB + BASIC_COMMAND_WITHOUT_PARAMETERS = ["jobs", "log"] + BASIC_COMMAND = ["jobs", "log", "--jobId", "some_job_id", "--apiKey", "some_key"] + + EXPECTED_STDOUT_WITHOUT_PARAMETERS = """Usage: cli jobs log [OPTIONS] +Try "cli jobs log --help" for help. + +Error: Missing option "--jobId". +""" + + EXPECTED_STDOUT = """+Job some_job_id logs--------------------------------------------------------------------+ +| LINE | MESSAGE | ++------+---------------------------------------------------------------------------------+ +| 1 | Traceback (most recent call last): | +| 2 | File "generate_figures.py", line 15, in | +| 3 | import dnnlib.tflib as tflib | +| 4 | File "/paperspace/dnnlib/tflib/__init__.py", line 8, in | +| 5 | from . import autosummary | +| 6 | File "/paperspace/dnnlib/tflib/autosummary.py", line 31, in | +| 7 | from . import tfutil | +| 8 | File "/paperspace/dnnlib/tflib/tfutil.py", line 34, in | +| 9 | def shape_to_list(shape: Iterable[tf.Dimension]) -> List[Union[int, None]]: | +| 10 | AttributeError: module 'tensorflow' has no attribute 'Dimension' | +| 11 | PSEOF | ++------+---------------------------------------------------------------------------------+ +""" + + EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Invalid API token\n" + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_command_should_not_send_request_without_required_parameters(self, get_patched): + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITHOUT_PARAMETERS) + print(result) + + get_patched.assert_not_called() + assert result.exit_code == 2 + assert result.output == self.EXPECTED_STDOUT_WITHOUT_PARAMETERS + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_get_request_and_print_available_logs(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=None, + params=None) + + assert result.output == self.EXPECTED_STDOUT + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_send_valid_get_request_when_log_list_was_used_with_wrong_api_key(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=None, + params=None) + assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN + assert result.exit_code == 0 + + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, get_patched): + get_patched.return_value = MockResponse(status_code=400) + + cli_runner = CliRunner() + result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) + + get_patched.assert_called_with(self.URL, + headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, + json=None, + params=None) + assert result.output == "Error while parsing response data: No JSON\n" + assert result.exit_code == 0 diff --git a/tests/functional/test_logs.py b/tests/functional/test_logs.py deleted file mode 100644 index 7e18934..0000000 --- a/tests/functional/test_logs.py +++ /dev/null @@ -1,96 +0,0 @@ -import mock -from click.testing import CliRunner - -from paperspace.cli import cli -from paperspace.client import default_headers -from tests import MockResponse, example_responses - - -class TestListLogs(object): - URL = "https://logs.paperspace.io/jobs/logs?jobId=some_job_id&line=0" - EXPECTED_HEADERS = default_headers.copy() - EXPECTED_HEADERS_WITH_CHANGED_API_KEY = default_headers.copy() - EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" - - RESPONSE_JSON_WITH_WRONG_API_TOKEN = {"status": 400, "message": "Invalid API token"} - EXPECTED_RESPONSE_JSON = example_responses.LIST_OF_LOGS_FOR_JOB - BASIC_COMMAND_WITHOUT_PARAMETERS = ["logs", "list"] - BASIC_COMMAND = ["logs", "list", "--jobId", "some_job_id", "--apiKey", "some_key"] - - EXPECTED_STDOUT_WITHOUT_PARAMETERS = """Usage: cli logs list [OPTIONS] -Try "cli logs list --help" for help. - -Error: Missing option "--jobId". -""" - - EXPECTED_STDOUT = """+Job some_job_id logs--------------------------------------------------------------------+ -| LINE | MESSAGE | -+------+---------------------------------------------------------------------------------+ -| 1 | Traceback (most recent call last): | -| 2 | File "generate_figures.py", line 15, in | -| 3 | import dnnlib.tflib as tflib | -| 4 | File "/paperspace/dnnlib/tflib/__init__.py", line 8, in | -| 5 | from . import autosummary | -| 6 | File "/paperspace/dnnlib/tflib/autosummary.py", line 31, in | -| 7 | from . import tfutil | -| 8 | File "/paperspace/dnnlib/tflib/tfutil.py", line 34, in | -| 9 | def shape_to_list(shape: Iterable[tf.Dimension]) -> List[Union[int, None]]: | -| 10 | AttributeError: module 'tensorflow' has no attribute 'Dimension' | -| 11 | PSEOF | -+------+---------------------------------------------------------------------------------+ -""" - - EXPECTED_STDOUT_WITH_WRONG_API_TOKEN = "Invalid API token\n" - - @mock.patch("paperspace.cli.cli.client.requests.get") - def test_command_should_not_send_request_without_required_parameters(self, get_patched): - cli_runner = CliRunner() - result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND_WITHOUT_PARAMETERS) - print(result) - - get_patched.assert_not_called() - assert result.exit_code == 2 - assert result.output == self.EXPECTED_STDOUT_WITHOUT_PARAMETERS - - @mock.patch("paperspace.cli.cli.client.requests.get") - def test_should_send_valid_get_request_and_print_available_logs(self, get_patched): - get_patched.return_value = MockResponse(json_data=self.EXPECTED_RESPONSE_JSON, status_code=200) - - cli_runner = CliRunner() - result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) - - get_patched.assert_called_with(self.URL, - headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, - json=None, - params=None) - - assert result.output == self.EXPECTED_STDOUT - assert result.exit_code == 0 - - @mock.patch("paperspace.cli.cli.client.requests.get") - def test_should_send_valid_get_request_when_log_list_was_used_with_wrong_api_key(self, get_patched): - get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WITH_WRONG_API_TOKEN, status_code=400) - - cli_runner = CliRunner() - result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) - - get_patched.assert_called_with(self.URL, - headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, - json=None, - params=None) - assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN - assert result.exit_code == 0 - - @mock.patch("paperspace.cli.cli.client.requests.get") - def test_should_print_error_message_when_error_status_code_received_but_no_content_was_provided(self, get_patched): - get_patched.return_value = MockResponse(status_code=400) - - cli_runner = CliRunner() - result = cli_runner.invoke(cli.cli, self.BASIC_COMMAND) - - get_patched.assert_called_with(self.URL, - headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, - json=None, - params=None) - assert result.output == "Error while parsing response data: No JSON\n" - assert result.exit_code == 0 diff --git a/tox.ini b/tox.ini index df0afba..aa779c0 100644 --- a/tox.ini +++ b/tox.ini @@ -26,7 +26,7 @@ deps = pip_pre = true commands = - pytest --cov=paperspace --cov-append + pytest -vv --cov=paperspace --cov-append [testenv:check] deps = From c7f9f8eee8f77a3469c1c09054f99853e0329a1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Mudlaff?= Date: Mon, 6 May 2019 14:32:40 +0200 Subject: [PATCH 33/42] Feature PS-9868: Fix tox command for tests --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index aa779c0..df0afba 100644 --- a/tox.ini +++ b/tox.ini @@ -26,7 +26,7 @@ deps = pip_pre = true commands = - pytest -vv --cov=paperspace --cov-append + pytest --cov=paperspace --cov-append [testenv:check] deps = From cc3e74507882cf885f90142dddfcfa1c3a55dfa2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Mudlaff?= Date: Mon, 6 May 2019 14:37:22 +0200 Subject: [PATCH 34/42] Feature PS-9868: Remove colorclass from Pipfile.lock --- Pipfile.lock | 6 ------ 1 file changed, 6 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index bbcca79..233087e 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -107,12 +107,6 @@ "index": "pypi", "version": "==0.5" }, - "colorclass": { - "hashes": [ - "sha256:b05c2a348dfc1aff2d502527d78a5b7b7e2f85da94a96c5081210d8e9ee8e18b" - ], - "version": "==2.2.0" - }, "cryptography": { "extras": [ "security" From f8370f288213df3a7522e29dc776178713815824 Mon Sep 17 00:00:00 2001 From: kossak Date: Mon, 6 May 2019 15:49:52 +0200 Subject: [PATCH 35/42] minor tweaks previous commit: support of project handle for auth purposes --- paperspace/workspace.py | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/paperspace/workspace.py b/paperspace/workspace.py index fc9956a..177f1a3 100644 --- a/paperspace/workspace.py +++ b/paperspace/workspace.py @@ -1,4 +1,3 @@ -import logging import os import zipfile from collections import OrderedDict @@ -7,6 +6,7 @@ import progressbar import requests from requests_toolbelt.multipart import encoder +from paperspace import logger as default_logger from paperspace.exceptions import S3UploadFailedException, PresignedUrlUnreachableException, \ PresignedUrlAccessDeniedException, PresignedUrlConnectionException @@ -14,11 +14,15 @@ class S3WorkspaceHandler: def __init__(self, experiments_api, logger=None): + """ + + :param experiments_api: paperspace.client.API + :param logger: paperspace.logger + """ self.experiments_api = experiments_api - self.logger = logger or logging.getLogger() + self.logger = logger or default_logger def _retrieve_file_paths(self, dirName): - # setup file paths variable file_paths = {} exclude = ['.git', '.idea', '.pytest_cache'] @@ -97,24 +101,32 @@ def upload_workspace(self, input_data): file_name = os.path.basename(archive_path) project_handle = input_data['projectHandle'] + s3_upload_data = self._get_upload_data(file_name, project_handle) + bucket_name = s3_upload_data['bucket_name'] + s3_object_path = s3_upload_data['fields']['key'] self.logger.log('Uploading zipped workspace to S3') + self._upload(archive_path, s3_upload_data) + + self.logger.log('\nUploading completed') + + return 's3://{}/{}'.format(bucket_name, s3_object_path) + + def _upload(self, archive_path, s3_upload_data): files = {'file': (archive_path, open(archive_path, 'rb'))} fields = OrderedDict(s3_upload_data['fields']) fields.update(files) + s3_encoder = encoder.MultipartEncoder(fields=fields) monitor = encoder.MultipartEncoderMonitor(s3_encoder, callback=self._create_callback(s3_encoder)) s3_response = requests.post(s3_upload_data['url'], data=monitor, headers={'Content-Type': monitor.content_type}) + self.logger.debug("S3 upload response: {}".format(s3_response.headers)) if not s3_response.ok: raise S3UploadFailedException(s3_response) - self.logger.log('\nUploading completed') - - return 's3://{}/{}'.format(bucket_name, file_name) - def _get_upload_data(self, file_name, project_handle): response = self.experiments_api.get("/workspace/get_presigned_url", params={'workspaceName': file_name, 'projectHandle': project_handle}) From 1c8e5dc6654ad00073e27ff9f895d6e6d34157d2 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Mon, 6 May 2019 20:51:09 +0200 Subject: [PATCH 36/42] Fix coloring help messages and suggesting command names to all groups --- Pipfile | 3 +- Pipfile.lock | 152 ++++++++++++++++++++++++------------- paperspace/cli/__init__.py | 27 +++++++ paperspace/cli/cli.py | 10 +-- paperspace/cli/jobs.py | 3 +- paperspace/cli/models.py | 3 +- paperspace/cli/projects.py | 5 +- setup.py | 6 +- 8 files changed, 138 insertions(+), 71 deletions(-) diff --git a/Pipfile b/Pipfile index a22b444..7c7ddc4 100644 --- a/Pipfile +++ b/Pipfile @@ -15,7 +15,7 @@ click = "*" terminaltables = "*" click-didyoumean = "*" click-help-colors = "*" -tox = "*" +colorama = "*" [dev-packages] twine = "*" @@ -23,3 +23,4 @@ pypandoc = "*" pytest = "*" mock = "*" coverage = "*" +tox = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 233087e..4f45eb9 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9a2e80628f652d225a1a1ced4d7eb2079194196e4727a0c4b1911ae5934d5de0" + "sha256": "7089bab2f09801361abd4d2f626b60f2994f4b6715989b7150c72d01dd075b04" }, "pipfile-spec": 6, "requires": {}, @@ -107,6 +107,14 @@ "index": "pypi", "version": "==0.5" }, + "colorama": { + "hashes": [ + "sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", + "sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48" + ], + "index": "pypi", + "version": "==0.4.1" + }, "cryptography": { "extras": [ "security" @@ -133,7 +141,6 @@ "sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6" ], "index": "pypi", - "markers": null, "version": "==2.6.1" }, "datadog": { @@ -162,12 +169,23 @@ "editable": true, "path": "." }, - "filelock": { + "enum34": { "hashes": [ - "sha256:b8d5ca5ca1c815e1574aee746650ea7301de63d87935b3463d26368b76e31633", - "sha256:d610c1bb404daf85976d7a82eb2ada120f04671007266b708606565dd03b5be6" + "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", + "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", + "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", + "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" ], - "version": "==3.0.10" + "markers": "python_version < '3'", + "version": "==1.1.6" + }, + "futures": { + "hashes": [ + "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265", + "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1" + ], + "markers": "python_version == '2.6' or python_version == '2.7'", + "version": "==3.2.0" }, "gradient-statsd": { "hashes": [ @@ -182,9 +200,16 @@ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" ], - "markers": "extra == 'security'", "version": "==2.8" }, + "ipaddress": { + "hashes": [ + "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794", + "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c" + ], + "markers": "python_version < '3'", + "version": "==1.0.22" + }, "jmespath": { "hashes": [ "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", @@ -192,20 +217,6 @@ ], "version": "==0.9.4" }, - "pluggy": { - "hashes": [ - "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", - "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746" - ], - "version": "==0.9.0" - }, - "py": { - "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" - ], - "version": "==1.8.0" - }, "pycparser": { "hashes": [ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" @@ -217,7 +228,6 @@ "sha256:aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200", "sha256:c727930ad54b10fc157015014b666f2d8b41f70c0d03e83ab67624fd3dd5d1e6" ], - "markers": "extra == 'security'", "version": "==19.0.0" }, "python-dateutil": { @@ -261,35 +271,13 @@ "index": "pypi", "version": "==3.1.0" }, - "toml": { - "hashes": [ - "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", - "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e" - ], - "version": "==0.10.0" - }, - "tox": { - "hashes": [ - "sha256:1b166b93d2ce66bb7b253ba944d2be89e0c9d432d49eeb9da2988b4902a4684e", - "sha256:665cbdd99f5c196dd80d1d8db8c8cf5d48b1ae1f778bccd1bdf14d5aaf4ca0fc" - ], - "index": "pypi", - "version": "==3.9.0" - }, "urllib3": { "hashes": [ "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" ], - "markers": "python_version >= '3.4'", + "markers": "python_version == '2.7'", "version": "==1.24.3" - }, - "virtualenv": { - "hashes": [ - "sha256:15ee248d13e4001a691d9583948ad3947bcb8a289775102e4c4aa98a8b7a6d73", - "sha256:bfc98bb9b42a3029ee41b96dc00a34c2f254cbf7716bec824477b2c82741a5c4" - ], - "version": "==16.5.0" } }, "develop": { @@ -373,12 +361,26 @@ ], "version": "==0.14" }, + "filelock": { + "hashes": [ + "sha256:b8d5ca5ca1c815e1574aee746650ea7301de63d87935b3463d26368b76e31633", + "sha256:d610c1bb404daf85976d7a82eb2ada120f04671007266b708606565dd03b5be6" + ], + "version": "==3.0.10" + }, + "funcsigs": { + "hashes": [ + "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", + "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" + ], + "markers": "python_version < '3.3'", + "version": "==1.0.2" + }, "idna": { "hashes": [ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" ], - "markers": "extra == 'security'", "version": "==2.8" }, "mock": { @@ -391,11 +393,20 @@ }, "more-itertools": { "hashes": [ - "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7", - "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a" + "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", + "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", + "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" ], - "markers": "python_version > '2.7'", - "version": "==7.0.0" + "markers": "python_version <= '2.7'", + "version": "==5.0.0" + }, + "pathlib2": { + "hashes": [ + "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742", + "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7" + ], + "markers": "python_version < '3.6'", + "version": "==2.3.3" }, "pkginfo": { "hashes": [ @@ -465,6 +476,23 @@ ], "version": "==0.9.1" }, + "scandir": { + "hashes": [ + "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", + "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", + "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", + "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", + "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", + "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", + "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", + "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", + "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", + "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", + "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" + ], + "markers": "python_version < '3.5'", + "version": "==1.10.0" + }, "six": { "hashes": [ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", @@ -473,6 +501,21 @@ "index": "pypi", "version": "==1.12.0" }, + "toml": { + "hashes": [ + "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", + "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e" + ], + "version": "==0.10.0" + }, + "tox": { + "hashes": [ + "sha256:1b166b93d2ce66bb7b253ba944d2be89e0c9d432d49eeb9da2988b4902a4684e", + "sha256:665cbdd99f5c196dd80d1d8db8c8cf5d48b1ae1f778bccd1bdf14d5aaf4ca0fc" + ], + "index": "pypi", + "version": "==3.9.0" + }, "tqdm": { "hashes": [ "sha256:d385c95361699e5cf7622485d9b9eae2d4864b21cd5a2374a9c381ffed701021", @@ -493,9 +536,16 @@ "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" ], - "markers": "python_version >= '3.4'", + "markers": "python_version == '2.7'", "version": "==1.24.3" }, + "virtualenv": { + "hashes": [ + "sha256:15ee248d13e4001a691d9583948ad3947bcb8a289775102e4c4aa98a8b7a6d73", + "sha256:bfc98bb9b42a3029ee41b96dc00a34c2f254cbf7716bec824477b2c82741a5c4" + ], + "version": "==16.5.0" + }, "webencodings": { "hashes": [ "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", diff --git a/paperspace/cli/__init__.py b/paperspace/cli/__init__.py index e69de29..0902828 100644 --- a/paperspace/cli/__init__.py +++ b/paperspace/cli/__init__.py @@ -0,0 +1,27 @@ +import click +import colorama +from click._compat import get_text_stderr + +import paperspace.cli.jobs +import paperspace.cli.models +import paperspace.cli.projects + + +def show(self, file=None): + if file is None: + file = get_text_stderr() + color = None + hint = '' + if (self.cmd is not None and + self.cmd.get_help_option(self.ctx) is not None): + hint = ('Try "%s %s" for help.\n' + % (self.ctx.command_path, self.ctx.help_option_names[0])) + if self.ctx is not None: + color = self.ctx.color + click.echo(self.ctx.get_usage() + '\n%s' % hint, file=file, color=color) + msg = colorama.Fore.RED + 'Error: %s' % self.format_message() + colorama.Style.RESET_ALL + click.echo(msg, file=file, color=color) + + +# not-very-elegant way to color click's error messages +click.exceptions.UsageError.show = show diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index f05639b..9216c90 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -5,11 +5,8 @@ from paperspace import constants, client, config from paperspace.cli import common -from paperspace.cli.common import api_key_option, del_if_value_is_none -from paperspace.cli.jobs import jobs_group -from paperspace.cli.models import models_group -from paperspace.cli.projects import projects_group from paperspace.cli.cli_types import ChoiceType, json_string +from paperspace.cli.common import api_key_option, del_if_value_is_none from paperspace.cli.validators import validate_mutually_exclusive, validate_email from paperspace.commands import experiments as experiments_commands, deployments as deployments_commands, \ machines as machines_commands, login as login_commands @@ -1064,10 +1061,5 @@ def version(): command.execute() -cli.add_command(jobs_group) -cli.add_command(projects_group) -cli.add_command(models_group) - - if __name__ == '__main__': cli() diff --git a/paperspace/cli/jobs.py b/paperspace/cli/jobs.py index 7f98590..983ae0f 100644 --- a/paperspace/cli/jobs.py +++ b/paperspace/cli/jobs.py @@ -2,10 +2,11 @@ from paperspace import client, config from paperspace.cli import common +from paperspace.cli.cli import cli from paperspace.commands import jobs as jobs_commands -@click.group("jobs", help="Manage gradient jobs", cls=common.ClickGroup) +@cli.group("jobs", help="Manage gradient jobs", cls=common.ClickGroup) def jobs_group(): pass diff --git a/paperspace/cli/models.py b/paperspace/cli/models.py index f2b2e5c..7027783 100644 --- a/paperspace/cli/models.py +++ b/paperspace/cli/models.py @@ -2,10 +2,11 @@ from paperspace import client, config from paperspace.cli import common +from paperspace.cli.cli import cli from paperspace.commands import models as models_commands -@click.group("models", help="Manage models", cls=common.ClickGroup) +@cli.group("models", help="Manage models", cls=common.ClickGroup) def models_group(): pass diff --git a/paperspace/cli/projects.py b/paperspace/cli/projects.py index a33cf63..cfa6b30 100644 --- a/paperspace/cli/projects.py +++ b/paperspace/cli/projects.py @@ -1,11 +1,10 @@ -import click - from paperspace import client, config +from paperspace.cli.cli import cli from paperspace.commands import projects as projects_commands from . import common -@click.group("projects", help="Manage projects", cls=common.ClickGroup) +@cli.group("projects", help="Manage projects", cls=common.ClickGroup) def projects_group(): pass diff --git a/setup.py b/setup.py index 5370318..50944fe 100644 --- a/setup.py +++ b/setup.py @@ -50,12 +50,8 @@ 'terminaltables', 'click-didyoumean', 'click-help-colors', + 'colorama', ], - extras_require={ - ':sys_platform == "win32"': [ - 'colorama', - ], - }, entry_points={'console_scripts': [ 'paperspace-python = paperspace.main:main', ]}, From 5a9d764f0df6f7a8a2270b22c3b626444ff30ac4 Mon Sep 17 00:00:00 2001 From: kossak Date: Tue, 7 May 2019 15:01:04 +0200 Subject: [PATCH 37/42] unifying variable names (in other words: bugfix) --- paperspace/cli/jobs.py | 2 +- paperspace/commands/jobs.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/paperspace/cli/jobs.py b/paperspace/cli/jobs.py index e1ed828..7bf3292 100644 --- a/paperspace/cli/jobs.py +++ b/paperspace/cli/jobs.py @@ -79,7 +79,7 @@ def list_jobs(api_key, **filters): @click.option('--useDockerfile', 'useDockerfile', help="Flag: using Dockerfile") @click.option('--isPreemptible', 'isPreemptible', help="Flag: isPreemptible") @click.option('--project', 'project', help="Project name") -@click.option('--projectHandle', '--projectId', 'projectId', help="Project handle", required=True) +@click.option('--projectHandle', '--projectId', 'projectHandle', help="Project handle", required=True) @click.option('--startedByUserId', 'startedByUserId', help="User ID") @click.option('--relDockerfilePath', 'relDockerfilePath', help="Relative path to Dockerfile") @click.option('--registryUsername', 'registryUsername', help="Docker registry username") diff --git a/paperspace/commands/jobs.py b/paperspace/commands/jobs.py index d014c8b..544d32a 100644 --- a/paperspace/commands/jobs.py +++ b/paperspace/commands/jobs.py @@ -153,7 +153,7 @@ def execute(self, json_): workspace_url = self._workspace_handler.upload_workspace(json_) if workspace_url: json_['workspaceFileName'] = workspace_url - + json_['projectId'] = json_.get('projectId', json_.get('projectHandle')) response = self.api.post(url, json_) self._log_message(response, "Job created", From ab980940bc5b4a47a4d0cc3ad01369c996b34f36 Mon Sep 17 00:00:00 2001 From: kossak Date: Tue, 7 May 2019 15:01:40 +0200 Subject: [PATCH 38/42] more clear exception handling + relative import tweak --- paperspace/exceptions.py | 4 ++++ paperspace/main.py | 2 +- paperspace/workspace.py | 8 +++++--- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/paperspace/exceptions.py b/paperspace/exceptions.py index 0951204..f8ac458 100644 --- a/paperspace/exceptions.py +++ b/paperspace/exceptions.py @@ -10,6 +10,10 @@ class PresignedUrlUnreachableException(ApplicationException): pass +class ProjectAccessDeniedException(ApplicationException): + pass + + class PresignedUrlAccessDeniedException(ApplicationException): pass diff --git a/paperspace/main.py b/paperspace/main.py index 3acda14..6d936a1 100644 --- a/paperspace/main.py +++ b/paperspace/main.py @@ -1,7 +1,7 @@ import os import sys -from paperspace.cli.cli import cli +from .cli.cli import cli from .jobs import run, print_json_pretty from .login import set_apikey from .version import version diff --git a/paperspace/workspace.py b/paperspace/workspace.py index 177f1a3..3b5c822 100644 --- a/paperspace/workspace.py +++ b/paperspace/workspace.py @@ -9,7 +9,7 @@ from paperspace import logger as default_logger from paperspace.exceptions import S3UploadFailedException, PresignedUrlUnreachableException, \ - PresignedUrlAccessDeniedException, PresignedUrlConnectionException + PresignedUrlAccessDeniedException, PresignedUrlConnectionException, ProjectAccessDeniedException class S3WorkspaceHandler: @@ -130,10 +130,12 @@ def _upload(self, archive_path, s3_upload_data): def _get_upload_data(self, file_name, project_handle): response = self.experiments_api.get("/workspace/get_presigned_url", params={'workspaceName': file_name, 'projectHandle': project_handle}) - if response.status_code == 404: - raise PresignedUrlUnreachableException + if response.status_code == 401: + raise ProjectAccessDeniedException(project_handle) if response.status_code == 403: raise PresignedUrlAccessDeniedException + if response.status_code == 404: + raise PresignedUrlUnreachableException if not response.ok: raise PresignedUrlConnectionException(response.reason) return response.json() From 8cd890524c0d5d48d2849e52a141d5afdb971414 Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Tue, 7 May 2019 14:49:35 +0200 Subject: [PATCH 39/42] Minor changes to experiments options --- paperspace/cli/cli.py | 9 --------- tests/functional/test_experiments.py | 7 ------- 2 files changed, 16 deletions(-) diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 9216c90..c045b99 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -48,7 +48,6 @@ def common_experiments_create_options(f): ), click.option( "--ports", - type=int, help="Port to use in new experiment", ), click.option( @@ -70,7 +69,6 @@ def common_experiments_create_options(f): click.option( "--clusterId", "clusterId", - type=int, help="Cluster ID", ), click.option( @@ -79,16 +77,9 @@ def common_experiments_create_options(f): type=json_string, help="Environment variables in a JSON", ), - click.option( - "--triggerEventId", - "triggerEventId", - type=int, - help="Trigger event ID", - ), click.option( "--projectId", "projectId", - type=int, help="Project ID", ), click.option( diff --git a/tests/functional/test_experiments.py b/tests/functional/test_experiments.py index fcf8b28..81ac8ee 100644 --- a/tests/functional/test_experiments.py +++ b/tests/functional/test_experiments.py @@ -30,7 +30,6 @@ class TestExperimentsCreateSingleNode(object): "--artifactDirectory", "/artifact/dir/", "--clusterId", 42, "--experimentEnv", '{"key":"val"}', - "--triggerEventId", 45678, "--projectId", 987654, "--projectHandle", "testHandle", "--container", "testContainer", @@ -58,7 +57,6 @@ class TestExperimentsCreateSingleNode(object): "artifactDirectory": u"/artifact/dir/", "clusterId": 42, "experimentEnv": {u"key": u"val"}, - "triggerEventId": 45678, "projectId": 987654, "projectHandle": u"testHandle", "container": u"testContainer", @@ -154,7 +152,6 @@ class TestExperimentsCreateMultiNode(object): "--artifactDirectory", "/artdir", "--clusterId", 2, "--experimentEnv", '{"key":"val"}', - "--triggerEventId", 12, "--projectId", 34, "--projectHandle", "prq70zy79", "--experimentTypeId", "MPI", @@ -197,7 +194,6 @@ class TestExperimentsCreateMultiNode(object): "artifactDirectory": u"/artdir", "clusterId": 2, "experimentEnv": {"key": "val"}, - "triggerEventId": 12, "projectId": 34, "projectHandle": u"prq70zy79", "experimentTypeId": 3, @@ -271,7 +267,6 @@ class TestExperimentsCreateAndStartSingleNode(TestExperimentsCreateSingleNode): "--artifactDirectory", "/artifact/dir/", "--clusterId", 42, "--experimentEnv", '{"key":"val"}', - "--triggerEventId", 45678, "--projectId", 987654, "--projectHandle", "testHandle", "--container", "testContainer", @@ -312,7 +307,6 @@ class TestExperimentsCreateAndStartMultiNode(TestExperimentsCreateMultiNode): "--artifactDirectory", "/artdir", "--clusterId", 2, "--experimentEnv", '{"key":"val"}', - "--triggerEventId", 12, "--projectId", 34, "--projectHandle", "prq70zy79", "--experimentTypeId", "MPI", @@ -419,7 +413,6 @@ class TestExperimentDetail(object): "id": 12, "type": "github" }, - "triggerEventId": 12 } }, "message": "success" From db17d63257c9616cad0c7feb699b18eabc1da80f Mon Sep 17 00:00:00 2001 From: kossak Date: Tue, 7 May 2019 18:14:45 +0200 Subject: [PATCH 40/42] tests fixed and expanded exceptions renamed --- Pipfile | 4 +- Pipfile.lock | 76 +++++++++++++--- paperspace/cli/jobs.py | 31 +++---- paperspace/commands/machines.py | 8 +- paperspace/exceptions.py | 14 +-- paperspace/workspace.py | 33 ++++--- tests/functional/test_deployments.py | 30 ++++--- tests/functional/test_experiments.py | 15 ++-- tests/functional/test_machines.py | 99 ++++++++++++++------- tests/test_click_commands.py | 30 +++---- tests/unit/__init__.py | 0 tests/unit/test_workspace.py | 126 +++++++++++++++++++++++++++ 12 files changed, 350 insertions(+), 116 deletions(-) create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/test_workspace.py diff --git a/Pipfile b/Pipfile index f05b850..87cf8ec 100644 --- a/Pipfile +++ b/Pipfile @@ -6,8 +6,8 @@ verify_ssl = true [packages] e1839a8 = {path = ".",editable = true} requests = {extras = ["security"]} -requests-toolbelt = * -progressbar2 = * +requests-toolbelt = "*" +progressbar2 = "*" cryptography = {extras = ["security"]} "boto3" = "*" botocore = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 4f45eb9..50f6074 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "7089bab2f09801361abd4d2f626b60f2994f4b6715989b7150c72d01dd075b04" + "sha256": "414dd057f13ec74db3b2f021281547e2bcf53eec38e2459798ca230964e7b65a" }, "pipfile-spec": 6, "requires": {}, @@ -23,19 +23,19 @@ }, "boto3": { "hashes": [ - "sha256:aee5d24bec598b2dbdd8585865646ea4dce6f8ef6059417589de7284e80433a2", - "sha256:b2327f4dc2ca0e45da59b2f4a0085a3c356a0423ba0fa4f3fad4f7919c6be2f7" + "sha256:484650b86ea843587f484a8f9cc9629465ad805aff0ffaabf95345960168f569", + "sha256:635e1864cd35d78d33fd7ce325f9baa15c93a932403953b2b4801567a791b869" ], "index": "pypi", - "version": "==1.9.142" + "version": "==1.9.143" }, "botocore": { "hashes": [ - "sha256:60c40f8c51d308046c737c3f74c4dcf1681e884e6dc87fa2727db2d44f8b70f7", - "sha256:78b9413286c8fbcf094911584f63a81356b4539befec7be76887c6dcd66ace83" + "sha256:0247ad0da9fdbf4e8025b0dafb3982b945d335bcd7043518fdabe9d99f704e17", + "sha256:94846e90fc4dbe91a9e70f6a24ca823b4f3acc9a4047b497266d003fe12c80ce" ], "index": "pypi", - "version": "==1.12.142" + "version": "==1.12.143" }, "certifi": { "hashes": [ @@ -217,6 +217,14 @@ ], "version": "==0.9.4" }, + "progressbar2": { + "hashes": [ + "sha256:1ea89e2aaa1da85450aabbd2af62cefa04f1ee1c567f3a11ee0d8ded14fd1fea", + "sha256:8e5b5419e04193bb7c3fea71579937bbbcd64c26472b929718c2fe7ec420fe39" + ], + "index": "pypi", + "version": "==3.39.3" + }, "pycparser": { "hashes": [ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" @@ -238,6 +246,13 @@ "markers": "python_version >= '2.7'", "version": "==2.8.0" }, + "python-utils": { + "hashes": [ + "sha256:34aaf26b39b0b86628008f2ae0ac001b30e7986a8d303b61e1357dfcdad4f6d3", + "sha256:e25f840564554eaded56eaa395bca507b0b9e9f0ae5ecb13a8cb785305c56d25" + ], + "version": "==2.3.0" + }, "requests": { "extras": [ "security" @@ -249,6 +264,14 @@ "index": "pypi", "version": "==2.21.0" }, + "requests-toolbelt": { + "hashes": [ + "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f", + "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0" + ], + "index": "pypi", + "version": "==0.9.1" + }, "s3transfer": { "hashes": [ "sha256:7b9ad3213bff7d357f888e0fab5101b56fa1a0548ee77d121c3a3dbfbef4cb2e", @@ -316,6 +339,22 @@ ], "version": "==3.0.4" }, + "configparser": { + "hashes": [ + "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", + "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75" + ], + "markers": "python_version < '3'", + "version": "==3.7.4" + }, + "contextlib2": { + "hashes": [ + "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", + "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" + ], + "markers": "python_version < '3'", + "version": "==0.5.5" + }, "coverage": { "hashes": [ "sha256:3684fabf6b87a369017756b551cef29e505cb155ddb892a7a29277b978da88b9", @@ -373,7 +412,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "idna": { @@ -383,6 +422,13 @@ ], "version": "==2.8" }, + "importlib-metadata": { + "hashes": [ + "sha256:46fc60c34b6ed7547e2a723fc8de6dc2e3a1173f8423246b3ce497f064e9c3de", + "sha256:bc136180e961875af88b1ab85b4009f4f1278f8396a60526c0009f503a1a96ca" + ], + "version": "==0.9" + }, "mock": { "hashes": [ "sha256:21a2c07af3bbc4a77f9d14ac18fcc1782e8e7ea363df718740cdeaf61995b5e7", @@ -417,10 +463,10 @@ }, "pluggy": { "hashes": [ - "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", - "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746" + "sha256:1c0b297d4d41bc9bdfbdc17991b35f9e1d2cfe8eaa4d7c118e86d705870d34c8", + "sha256:fb2f776b7ec85038ef95860f4e83bfb6ab171a9d0b70b69d7ca4d04130644c2b" ], - "version": "==0.9.0" + "version": "==0.10.0" }, "py": { "hashes": [ @@ -474,6 +520,7 @@ "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f", "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0" ], + "index": "pypi", "version": "==0.9.1" }, "scandir": { @@ -559,6 +606,13 @@ "sha256:8eb4a788b3aec8abf5ff68d4165441bc57420c9f64ca5f471f58c3969fe08668" ], "version": "==0.33.1" + }, + "zipp": { + "hashes": [ + "sha256:139391b239594fd8b91d856bc530fbd2df0892b17dd8d98a91f018715954185f", + "sha256:8047e4575ce8d700370a3301bbfc972896a5845eb62dd535da395b86be95dfad" + ], + "version": "==0.4.0" } } } diff --git a/paperspace/cli/jobs.py b/paperspace/cli/jobs.py index efe537d..26453f6 100644 --- a/paperspace/cli/jobs.py +++ b/paperspace/cli/jobs.py @@ -2,6 +2,7 @@ from paperspace import client, config from paperspace.cli import common +from paperspace.cli.cli_types import json_string from paperspace.cli.common import del_if_value_is_none from paperspace.cli.cli import cli from paperspace.commands import jobs as jobs_commands @@ -66,25 +67,25 @@ def list_jobs(api_key, **filters): @jobs_group.command("create", help="Create job") @click.option("--name", "name", help="Job name", required=True) -@click.option('--machineType', 'machineType', help="Virtual machine type") -@click.option('--container', 'container', help="Docker container") -@click.option('--command', 'command', help="Job command/entrypoint") -@click.option('--ports', 'ports', help="Mapped ports") -@click.option('--isPublic', 'isPublic', help="Flag: is job public") +@click.option("--machineType", "machineType", help="Virtual machine type") +@click.option("--container", "container", help="Docker container") +@click.option("--command", "command", help="Job command/entrypoint") +@click.option("--ports", "ports", help="Mapped ports") +@click.option("--isPublic", "isPublic", help="Flag: is job public") @click.option("--workspace", "workspace", required=False, help="Path to workspace directory") @click.option("--workspaceArchive", "workspaceArchive", required=False, help="Path to workspace archive") @click.option("--workspaceUrl", "workspaceUrl", required=False, help="Project git repository url") @click.option("--workingDirectory", "workingDirectory", help="Working directory for the experiment", ) -@click.option('--experimentId', 'experimentId', help="Experiment Id") -# @click.option('--envVars', 'envVars', help="Environmental variables ") # TODO -@click.option('--useDockerfile', 'useDockerfile', help="Flag: using Dockerfile") -@click.option('--isPreemptible', 'isPreemptible', help="Flag: isPreemptible") -@click.option('--project', 'project', help="Project name") -@click.option('--projectHandle', '--projectId', 'projectHandle', help="Project handle", required=True) -@click.option('--startedByUserId', 'startedByUserId', help="User ID") -@click.option('--relDockerfilePath', 'relDockerfilePath', help="Relative path to Dockerfile") -@click.option('--registryUsername', 'registryUsername', help="Docker registry username") -@click.option('--registryPassword', 'registryPassword', help="Docker registry password") +@click.option("--experimentId", "experimentId", help="Experiment Id") +@click.option("--jobEnv", "envVars", type=json_string, help="Environmental variables ") +@click.option("--useDockerfile", "useDockerfile", help="Flag: using Dockerfile") +@click.option("--isPreemptible", "isPreemptible", help="Flag: isPreemptible") +@click.option("--project", "project", help="Project name") +@click.option("--projectHandle", "--projectId", "projectHandle", help="Project handle", required=True) +@click.option("--startedByUserId", "startedByUserId", help="User ID") +@click.option("--relDockerfilePath", "relDockerfilePath", help="Relative path to Dockerfile") +@click.option("--registryUsername", "registryUsername", help="Docker registry username") +@click.option("--registryPassword", "registryPassword", help="Docker registry password") @common.api_key_option def create_job(api_key, **kwargs): del_if_value_is_none(kwargs) diff --git a/paperspace/commands/machines.py b/paperspace/commands/machines.py index a15bc2f..c71ae2e 100644 --- a/paperspace/commands/machines.py +++ b/paperspace/commands/machines.py @@ -4,7 +4,7 @@ import terminaltables from paperspace.commands import CommandBase -from paperspace.exceptions import BadResponseException +from paperspace.exceptions import BadResponseError from paperspace.utils import get_terminal_lines @@ -228,7 +228,7 @@ def execute(self, machine_id, state, interval=5): while True: try: current_state = self._get_machine_state(machine_id) - except BadResponseException as e: + except BadResponseError as e: self.logger.error(e) return else: @@ -246,8 +246,8 @@ def _get_machine_state(self, machine_id): json_ = response.json() if not response.ok: self.logger.log_error_response(json_) - raise BadResponseException("Error while reading machine state") + raise BadResponseError("Error while reading machine state") state = json_.get("state") except (ValueError, AttributeError): - raise BadResponseException("Unknown error while reading machine state") + raise BadResponseError("Unknown error while reading machine state") return state diff --git a/paperspace/exceptions.py b/paperspace/exceptions.py index f8ac458..dbc5e98 100644 --- a/paperspace/exceptions.py +++ b/paperspace/exceptions.py @@ -1,26 +1,26 @@ -class ApplicationException(Exception): +class ApplicationError(Exception): pass -class BadResponseException(ApplicationException): +class BadResponseError(ApplicationError): pass -class PresignedUrlUnreachableException(ApplicationException): +class PresignedUrlUnreachableError(ApplicationError): pass -class ProjectAccessDeniedException(ApplicationException): +class ProjectAccessDeniedError(ApplicationError): pass -class PresignedUrlAccessDeniedException(ApplicationException): +class PresignedUrlAccessDeniedError(ApplicationError): pass -class PresignedUrlConnectionException(ApplicationException): +class PresignedUrlConnectionError(ApplicationError): pass -class S3UploadFailedException(ApplicationException): +class S3UploadFailedError(ApplicationError): pass diff --git a/paperspace/workspace.py b/paperspace/workspace.py index 3b5c822..1177ce5 100644 --- a/paperspace/workspace.py +++ b/paperspace/workspace.py @@ -8,8 +8,8 @@ from requests_toolbelt.multipart import encoder from paperspace import logger as default_logger -from paperspace.exceptions import S3UploadFailedException, PresignedUrlUnreachableException, \ - PresignedUrlAccessDeniedException, PresignedUrlConnectionException, ProjectAccessDeniedException +from paperspace.exceptions import S3UploadFailedError, PresignedUrlUnreachableError, \ + PresignedUrlAccessDeniedError, PresignedUrlConnectionError, ProjectAccessDeniedError class S3WorkspaceHandler: @@ -22,23 +22,23 @@ def __init__(self, experiments_api, logger=None): self.experiments_api = experiments_api self.logger = logger or default_logger - def _retrieve_file_paths(self, dirName): + @staticmethod + def _retrieve_file_paths(dir_name): # setup file paths variable file_paths = {} exclude = ['.git', '.idea', '.pytest_cache'] # Read all directory, subdirectories and file lists - for root, dirs, files in os.walk(dirName, topdown=True): + for root, dirs, files in os.walk(dir_name, topdown=True): dirs[:] = [d for d in dirs if d not in exclude] for filename in files: # Create the full filepath by using os module. - relpath = os.path.relpath(root, dirName) + relpath = os.path.relpath(root, dir_name) if relpath == '.': file_path = filename else: - file_path = os.path.join(os.path.relpath(root, dirName), filename) + file_path = os.path.join(os.path.relpath(root, dir_name), filename) file_paths[file_path] = os.path.join(root, filename) - # return all paths return file_paths def _zip_workspace(self, workspace_path): @@ -72,7 +72,8 @@ def _zip_workspace(self, workspace_path): self.logger.log('\nFinished creating archive: %s' % zip_file_name) return zip_file_path - def _create_callback(self, encoder_obj): + @staticmethod + def _create_callback(encoder_obj): bar = progressbar.ProgressBar(max_value=encoder_obj.len) def callback(monitor): @@ -116,7 +117,7 @@ def upload_workspace(self, input_data): return 's3://{}/{}'.format(bucket_name, s3_object_path) def _upload(self, archive_path, s3_upload_data): - files = {'file': (archive_path, open(archive_path, 'rb'))} + files = self._get_files_dict(archive_path) fields = OrderedDict(s3_upload_data['fields']) fields.update(files) @@ -125,17 +126,21 @@ def _upload(self, archive_path, s3_upload_data): s3_response = requests.post(s3_upload_data['url'], data=monitor, headers={'Content-Type': monitor.content_type}) self.logger.debug("S3 upload response: {}".format(s3_response.headers)) if not s3_response.ok: - raise S3UploadFailedException(s3_response) + raise S3UploadFailedError(s3_response) + + def _get_files_dict(self, archive_path): + files = {'file': (archive_path, open(archive_path, 'rb'))} + return files def _get_upload_data(self, file_name, project_handle): response = self.experiments_api.get("/workspace/get_presigned_url", params={'workspaceName': file_name, 'projectHandle': project_handle}) if response.status_code == 401: - raise ProjectAccessDeniedException(project_handle) + raise ProjectAccessDeniedError(project_handle) if response.status_code == 403: - raise PresignedUrlAccessDeniedException + raise PresignedUrlAccessDeniedError if response.status_code == 404: - raise PresignedUrlUnreachableException + raise PresignedUrlUnreachableError if not response.ok: - raise PresignedUrlConnectionException(response.reason) + raise PresignedUrlConnectionError(response.reason) return response.json() diff --git a/tests/functional/test_deployments.py b/tests/functional/test_deployments.py index d57af56..b4ae462 100644 --- a/tests/functional/test_deployments.py +++ b/tests/functional/test_deployments.py @@ -57,7 +57,8 @@ def test_should_send_proper_data_and_print_message_when_create_deployment_with_b post_patched.assert_called_once_with(self.URL, headers=EXPECTED_HEADERS, json=self.BASIC_OPTIONS_REQUEST, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -71,7 +72,8 @@ def test_should_send_different_api_key_when_api_key_parameter_was_used(self, pos post_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=self.BASIC_OPTIONS_REQUEST, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -86,7 +88,8 @@ def test_should_send_proper_data_and_print_message_when_create_wrong_model_id_wa post_patched.assert_called_once_with(self.URL, headers=EXPECTED_HEADERS, json=self.BASIC_OPTIONS_REQUEST, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_MODEL_NOT_FOUND assert result.exit_code == 0 @@ -236,7 +239,8 @@ def test_should_send_proper_data_and_print_message_when_update_deployment(self, post_patched.assert_called_once_with(self.URL, headers=EXPECTED_HEADERS, json=self.BASIC_OPTIONS_REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -250,7 +254,8 @@ def test_should_send_proper_data_with_custom_api_key_when_api_key_parameter_was_ post_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=self.BASIC_OPTIONS_REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -264,7 +269,8 @@ def test_should_send_proper_data_and_print_message_when_update_deployment_used_w post_patched.assert_called_once_with(self.URL, headers=EXPECTED_HEADERS, json=self.BASIC_OPTIONS_REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_ID assert result.exit_code == 0 @@ -286,7 +292,8 @@ def test_should_send_proper_data_and_print_message_when_deployments_start_was_us post_patched.assert_called_once_with(self.URL, headers=EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -319,7 +326,8 @@ def test_should_send_proper_data_and_print_message_when_deployments_delete_was_u post_patched.assert_called_once_with(self.URL, headers=EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -333,7 +341,8 @@ def test_should_send_proper_data_with_custom_api_key_when_api_key_parameter_was_ post_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -347,6 +356,7 @@ def test_should_send_proper_data_and_print_message_when_deployments_delete_used_ post_patched.assert_called_once_with(self.URL, headers=EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_ID assert result.exit_code == 0 diff --git a/tests/functional/test_experiments.py b/tests/functional/test_experiments.py index 81ac8ee..05840cf 100644 --- a/tests/functional/test_experiments.py +++ b/tests/functional/test_experiments.py @@ -86,7 +86,8 @@ def test_should_send_proper_data_and_print_message_when_create_experiment_was_ru post_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.BASIC_OPTIONS_REQUEST, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -101,7 +102,8 @@ def test_should_send_proper_data_and_print_message_when_create_experiment_was_ru post_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=self.FULL_OPTIONS_REQUEST, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 assert self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] == "some_key" @@ -117,7 +119,8 @@ def test_should_send_proper_data_and_print_message_when_create_wrong_project_han post_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.BASIC_OPTIONS_REQUEST, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_PROJECT_NOT_FOUND assert result.exit_code == 0 @@ -227,7 +230,8 @@ def test_should_send_proper_data_and_print_message_when_create_experiment_was_ru post_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.BASIC_OPTIONS_REQUEST, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -242,7 +246,8 @@ def test_should_send_proper_data_and_print_message_when_create_experiment_was_ru post_patched.assert_called_once_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=self.FULL_OPTIONS_REQUEST, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 diff --git a/tests/functional/test_machines.py b/tests/functional/test_machines.py index bf7d1cd..a4b24b2 100644 --- a/tests/functional/test_machines.py +++ b/tests/functional/test_machines.py @@ -202,7 +202,8 @@ def test_should_send_valid_post_request_when_machine_create_was_used_with_reques get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -216,7 +217,8 @@ def test_should_send_valid_post_request_when_machine_create_was_used_with_all_op get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.ALL_COMMANDS_REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -230,7 +232,8 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, get_patc get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -244,7 +247,8 @@ def test_should_print_error_message_when_wrong_api_key_was_used(self, get_patche get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 @@ -258,7 +262,8 @@ def test_should_print_error_message_when_wrong_template_id_was_used(self, get_pa get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == "templateId not found\n" assert result.exit_code == 0 @@ -272,7 +277,8 @@ def test_should_print_error_message_when_no_content_was_received_in_response(sel get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == "Unknown error while creating machine\n" assert result.exit_code == 0 @@ -333,7 +339,8 @@ def test_should_send_valid_post_request_when_machines_destroy_was_used(self, pos post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -347,7 +354,8 @@ def test_should_send_valid_post_request_when_machines_destroy_was_used_with_all_ post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.ALL_COMMANDS_REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -361,7 +369,8 @@ def test_should_send_valid_post_request_when_machines_destroy_was_used_with_api_ post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -375,7 +384,8 @@ def test_should_send_valid_post_request_when_machines_destroy_was_used_with_wron post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 @@ -390,7 +400,8 @@ def test_should_print_error_message_when_machine_with_given_id_was_not_found(sel post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND assert result.exit_code == 0 @@ -404,7 +415,8 @@ def test_should_print_error_message_when_error_status_code_received_but_no_conte post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == "Unknown error while destroying the machine\n" assert result.exit_code == 0 @@ -650,7 +662,8 @@ def test_should_send_get_request_and_print_valid_message_when_restart_command_wa post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -664,7 +677,8 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, post_pat post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -678,7 +692,8 @@ def test_should_print_valid_error_message_when_start_command_was_used_with_inval post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 @@ -692,7 +707,8 @@ def test_should_print_valid_error_message_when_no_content_was_received_in_respon post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == "Unknown error while restarting the machine\n" assert result.exit_code == 0 @@ -707,7 +723,8 @@ def test_should_print_error_message_when_machine_with_given_id_was_not_found(sel post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND assert result.exit_code == 0 @@ -882,7 +899,8 @@ def test_should_send_get_request_and_print_valid_message_when_start_command_was_ post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -896,7 +914,8 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, post_pat post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -910,7 +929,8 @@ def test_should_print_valid_error_message_when_start_command_was_used_with_inval post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 @@ -924,7 +944,8 @@ def test_should_print_valid_error_message_when_no_content_was_received_in_respon post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == "Unknown error while starting the machine\n" assert result.exit_code == 0 @@ -939,7 +960,8 @@ def test_should_print_error_message_when_machine_with_given_id_was_not_found(sel post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND assert result.exit_code == 0 @@ -983,7 +1005,8 @@ def test_should_send_get_request_and_print_valid_message_when_stop_command_was_u post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -997,7 +1020,8 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, post_pat post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -1011,7 +1035,8 @@ def test_should_print_valid_error_message_when_stop_command_was_used_with_invali post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 @@ -1025,7 +1050,8 @@ def test_should_print_valid_error_message_when_no_content_was_received_in_respon post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == "Unknown error while stopping the machine\n" assert result.exit_code == 0 @@ -1040,7 +1066,8 @@ def test_should_print_error_message_when_machine_with_given_id_was_not_found(sel post_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=None, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WHEN_MACHINE_WAS_NOT_FOUND assert result.exit_code == 0 @@ -1101,7 +1128,8 @@ def test_should_send_valid_post_request_when_machine_create_was_used_with_reques get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -1115,7 +1143,8 @@ def test_should_send_valid_post_request_when_machine_create_was_used_with_all_op get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.ALL_COMMANDS_REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -1129,7 +1158,8 @@ def test_should_send_changed_headers_when_api_key_option_was_used(self, get_patc get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS_WITH_CHANGED_API_KEY, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT assert result.exit_code == 0 @@ -1143,7 +1173,8 @@ def test_should_print_error_message_when_wrong_api_key_was_used(self, get_patche get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == self.EXPECTED_STDOUT_WITH_WRONG_API_TOKEN assert result.exit_code == 0 @@ -1157,7 +1188,8 @@ def test_should_print_error_message_when_wrong_machine_id_was_used(self, get_pat get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == "Not found\n" assert result.exit_code == 0 @@ -1171,7 +1203,8 @@ def test_should_print_error_message_when_no_content_was_received_in_response(sel get_patched.assert_called_with(self.URL, headers=self.EXPECTED_HEADERS, json=self.REQUEST_JSON, - params=None) + params=None, + files=None) assert result.output == "Unknown error while updating machine\n" assert result.exit_code == 0 diff --git a/tests/test_click_commands.py b/tests/test_click_commands.py index 1638ed1..8f4afb1 100644 --- a/tests/test_click_commands.py +++ b/tests/test_click_commands.py @@ -6,8 +6,8 @@ @mock.patch("paperspace.cli.cli.client.API") -@mock.patch("paperspace.cli.cli.experiments_commands") -def test_should_execute_create_experiment_command_when_cli_singlenode_command_was_executed(commands_patched, +@mock.patch("paperspace.commands.experiments.CreateExperimentCommand.execute") +def test_should_execute_create_experiment_command_when_cli_singlenode_command_was_executed(command_patched, api_patched): api_patched.return_value = mock.MagicMock() runner = CliRunner() @@ -31,12 +31,12 @@ def test_should_execute_create_experiment_command_when_cli_singlenode_command_wa result = runner.invoke(cli.cli, command.split()) assert result.exit_code == 0 - commands_patched.create_experiment.assert_called_once_with(expected_kwargs, api=api_patched()) + command_patched.assert_called_once_with(expected_kwargs) @mock.patch("paperspace.cli.cli.client.API") -@mock.patch("paperspace.cli.cli.experiments_commands") -def test_should_execute_create_experiment_command_when_cli_multinode_mpi_command_was_executed(commands_patched, +@mock.patch("paperspace.commands.experiments.CreateExperimentCommand.execute") +def test_should_execute_create_experiment_command_when_cli_multinode_mpi_command_was_executed(command_patched, api_patched): api_patched.return_value = mock.MagicMock() runner = CliRunner() @@ -71,12 +71,12 @@ def test_should_execute_create_experiment_command_when_cli_multinode_mpi_command result = runner.invoke(cli.cli, command.split()) assert result.exit_code == 0 - commands_patched.create_experiment.assert_called_once_with(expected_kwargs, api=api_patched()) + command_patched.assert_called_once_with(expected_kwargs) @mock.patch("paperspace.cli.cli.client.API") -@mock.patch("paperspace.cli.cli.experiments_commands") -def test_should_execute_create_experiment_command_when_cli_multinode_grpc_command_was_executed(commands_patched, +@mock.patch("paperspace.commands.experiments.CreateExperimentCommand.execute") +def test_should_execute_create_experiment_command_when_cli_multinode_grpc_command_was_executed(command_patched, api_patched): api_patched.return_value = mock.MagicMock() runner = CliRunner() @@ -110,13 +110,13 @@ def test_should_execute_create_experiment_command_when_cli_multinode_grpc_comman result = runner.invoke(cli.cli, command.split()) assert result.exit_code == 0 - commands_patched.create_experiment.assert_called_once_with(expected_kwargs, api=api_patched()) + command_patched.assert_called_once_with(expected_kwargs) @mock.patch("paperspace.cli.cli.client.API") -@mock.patch("paperspace.cli.cli.experiments_commands") +@mock.patch("paperspace.commands.experiments.CreateAndStartExperimentCommand.execute") def test_should_execute_create_experiment_command_when_cli_create_and_start_singlenode_command_was_executed( - commands_patched, api_patched): + command_patched, api_patched): api_patched.return_value = mock.MagicMock() runner = CliRunner() command = "experiments createAndStart singlenode " \ @@ -139,13 +139,13 @@ def test_should_execute_create_experiment_command_when_cli_create_and_start_sing result = runner.invoke(cli.cli, command.split()) assert result.exit_code == 0 - commands_patched.create_and_start_experiment.assert_called_once_with(expected_kwargs, api=api_patched()) + command_patched.assert_called_once_with(expected_kwargs) @mock.patch("paperspace.cli.cli.client.API") -@mock.patch("paperspace.cli.cli.experiments_commands") +@mock.patch("paperspace.commands.experiments.CreateAndStartExperimentCommand.execute") def test_should_execute_create_experiment_command_when_cli_create_and_start_multinode_mpi_command_was_executed( - commands_patched, api_patched): + command_patched, api_patched): api_patched.return_value = mock.MagicMock() runner = CliRunner() command = "experiments createAndStart multinode " \ @@ -178,4 +178,4 @@ def test_should_execute_create_experiment_command_when_cli_create_and_start_mult result = runner.invoke(cli.cli, command.split()) assert result.exit_code == 0 - commands_patched.create_and_start_experiment.assert_called_once_with(expected_kwargs, api=api_patched()) + command_patched.assert_called_once_with(expected_kwargs) diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_workspace.py b/tests/unit/test_workspace.py new file mode 100644 index 0000000..43facd5 --- /dev/null +++ b/tests/unit/test_workspace.py @@ -0,0 +1,126 @@ +import os + +import click +import mock +import pytest + +from paperspace import exceptions +from paperspace.workspace import S3WorkspaceHandler + +MOCK_BUCKET_NAME = 'bucket_name' +MOCK_OBJECT_KEY = 'object_key' + +mock_upload_data = { + 'bucket_name': MOCK_BUCKET_NAME, + 'fields': { + 'key': MOCK_OBJECT_KEY + } +} + + +@pytest.fixture +def workspace_handler(): + s3_workspace_handler = S3WorkspaceHandler(mock.MagicMock(), mock.MagicMock()) + s3_workspace_handler._upload = mock.MagicMock() + s3_workspace_handler._get_upload_data = mock.MagicMock() + s3_workspace_handler._get_upload_data.return_value = mock_upload_data + + return s3_workspace_handler + + +class TestWorkspace(object): + + @pytest.mark.parametrize('params', ({'workspace': 'foo', 'workspaceUrl': 'bar'}, + {'workspaceUrl': 'ffo', 'workspaceArchive': 'var'}, + {'workspaceArchive': 'foo', 'workspace': 'bar'}, + {'workspace': 'foo', 'workspaceUrl': 'bar', 'workspaceArchive': 'baz'})) + def test_raise_exception_when_more_than_one_workspace_provided(self, params, workspace_handler): + workspace_handler = S3WorkspaceHandler(mock.MagicMock, mock.MagicMock) + with pytest.raises(click.UsageError): + workspace_handler.upload_workspace(params) + + def test_dont_upload_if_archive_url_provided(self, workspace_handler): + workspace_handler.upload_workspace({'workspaceUrl': 'foo'}) + + workspace_handler._upload.assert_not_called() + + def test_zip_files_and_receive_s3_response_when_no_dir_provided(self, workspace_handler): + archive_name = 'foo.zip' + + workspace_handler._zip_workspace = mock.MagicMock() + workspace_handler._zip_workspace.return_value = archive_name + + response_url = workspace_handler.upload_workspace({'projectHandle': 'foo'}) + + workspace_handler._zip_workspace.assert_called_once() + workspace_handler._upload.assert_called_once() + workspace_handler._upload.assert_called_with(archive_name, mock_upload_data) + assert response_url == 's3://{}/{}'.format(MOCK_BUCKET_NAME, MOCK_OBJECT_KEY) + + def test_zip_files_and_receive_s3_response_when_workspace_dir_provided(self, workspace_handler): + archive_name = 'foo.zip' + + workspace_handler._zip_workspace = mock.MagicMock() + workspace_handler._zip_workspace.return_value = archive_name + + response_url = workspace_handler.upload_workspace({'projectHandle': 'foo', 'workspace': 'foo/bar'}) + + workspace_handler._zip_workspace.assert_called_once() + workspace_handler._upload.assert_called_once() + workspace_handler._upload.assert_called_with(archive_name, mock_upload_data) + assert response_url == 's3://{}/{}'.format(MOCK_BUCKET_NAME, MOCK_OBJECT_KEY) + + def test_dont_zip_files_and_receive_s3_response_when_workspace_archive_provided(self, workspace_handler): + workspace_handler._zip_workspace = mock.MagicMock() + + response_url = workspace_handler.upload_workspace({'projectHandle': 'foo', 'workspaceArchive': 'foo.zip'}) + + workspace_handler._zip_workspace.assert_not_called() + workspace_handler._upload.assert_called_once() + workspace_handler._upload.assert_called_with(os.path.abspath('foo.zip'), mock_upload_data) + assert response_url == 's3://{}/{}'.format(MOCK_BUCKET_NAME, MOCK_OBJECT_KEY) + + @pytest.mark.parametrize('code,exception', ((401, exceptions.ProjectAccessDeniedError), + (403, exceptions.PresignedUrlAccessDeniedError), + (404, exceptions.PresignedUrlUnreachableError))) + def test_raise_exception_on_40x_presigned_url_response(self, code, exception): + mock_response = mock.MagicMock() + mock_response.status_code = code + + workspace_handler = S3WorkspaceHandler(mock.MagicMock(), mock.MagicMock()) + workspace_handler.experiments_api.get.return_value = mock_response + + with pytest.raises(exception): + workspace_handler._get_upload_data('foo', 'bar') + + def test_return_json_with_presigned_url_response(self): + mock_response = mock.MagicMock() + mock_response.json.return_value = mock_upload_data + + workspace_handler = S3WorkspaceHandler(mock.MagicMock(), mock.MagicMock()) + workspace_handler.experiments_api.get.return_value = mock_response + + upload_data = workspace_handler._get_upload_data('foo', 'bar') + assert upload_data == mock_upload_data + + @mock.patch("paperspace.workspace.requests.post") + def test_multipart_upload_ok(self, mock_post): + workspace_handler = S3WorkspaceHandler(mock.MagicMock(), mock.MagicMock()) + workspace_handler._get_files_dict = mock.MagicMock() + + mock_response = mock.MagicMock() + mock_response.ok = True + mock_post.return_value = mock_response + + workspace_handler._upload('foo', {'url': 'bar', 'fields': []}) + + @mock.patch("paperspace.workspace.requests.post") + def test_multipart_upload_raises_exception(self, mock_post): + workspace_handler = S3WorkspaceHandler(mock.MagicMock(), mock.MagicMock()) + workspace_handler._get_files_dict = mock.MagicMock() + + mock_response = mock.MagicMock() + mock_response.ok = False + mock_post.return_value = mock_response + with pytest.raises(exceptions.S3UploadFailedError): + workspace_handler._upload('foo', {'url': 'bar', 'fields': []}) From 4995134b60f3f1c4120ac48828ba39c288afbe21 Mon Sep 17 00:00:00 2001 From: kossak Date: Tue, 7 May 2019 18:16:04 +0200 Subject: [PATCH 41/42] redundant required params removed --- paperspace/cli/cli.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/paperspace/cli/cli.py b/paperspace/cli/cli.py index 3edcba9..26ee263 100644 --- a/paperspace/cli/cli.py +++ b/paperspace/cli/cli.py @@ -53,19 +53,16 @@ def common_experiments_create_options(f): click.option( "--workspace", "workspace", - required=False, help="Path to workspace directory", ), click.option( "--workspaceArchive", "workspaceArchive", - required=False, help="Path to workspace .zip archive", ), click.option( "--workspaceUrl", "workspaceUrl", - required=False, help="Project git repository url", ), click.option( From 82526263292709d878b9e7bf4208f43dc094f97a Mon Sep 17 00:00:00 2001 From: Bartosz Cierocki Date: Tue, 7 May 2019 18:33:59 +0200 Subject: [PATCH 42/42] Fix handling error response in experiments list (#77) --- paperspace/commands/experiments.py | 17 +-- tests/example_responses.py | 166 ++++++++++++++++++++++++ tests/functional/test_experiments.py | 184 +++------------------------ 3 files changed, 194 insertions(+), 173 deletions(-) diff --git a/paperspace/commands/experiments.py b/paperspace/commands/experiments.py index 14cdbf5..5cf58b8 100644 --- a/paperspace/commands/experiments.py +++ b/paperspace/commands/experiments.py @@ -61,7 +61,12 @@ def execute(self, project_handles=None): response = self.api.get("/experiments/", params=params) try: - experiments = self._get_experiments_list(response, bool(project_handles)) + data = response.json() + if not response.ok: + self.logger.log_error_response(data) + return + + experiments = self._get_experiments_list(data, bool(project_handles)) except (ValueError, KeyError) as e: self.logger.error("Error while parsing response data: {}".format(e)) else: @@ -90,16 +95,12 @@ def _make_experiments_list_table(experiments): return table_string @staticmethod - def _get_experiments_list(response, filtered=False): - if not response.ok: - raise ValueError("Unknown error") - - data = response.json()["data"] + def _get_experiments_list(data, filtered=False): if not filtered: # If filtering by projectHandle response data has different format... - return data + return data["data"] experiments = [] - for project_experiments in data: + for project_experiments in data["data"]: for experiment in project_experiments["data"]: experiments.append(experiment) return experiments diff --git a/tests/example_responses.py b/tests/example_responses.py index 962ab2b..8868fcf 100644 --- a/tests/example_responses.py +++ b/tests/example_responses.py @@ -1,3 +1,169 @@ +LIST_OF_EXPERIMENTS_RESPONSE_JSON = { + "data": [ + { + "dtCreated": "2019-03-21T07:47:05.616096+00:00", + "dtDeleted": None, + "dtFinished": None, + "dtModified": "2019-03-21T07:47:05.616096+00:00", + "dtProvisioningFinished": None, + "dtProvisioningStarted": None, + "dtStarted": None, + "dtTeardownFinished": None, + "dtTeardownStarted": None, + "experimentError": None, + "experimentTemplateHistoryId": 6315, + "experimentTemplateId": 60, + "experimentTypeId": 1, + "handle": "ea2lfbbpdyzsq", + "id": 6292, + "projectHandle": "prq70zy79", + "projectId": 612, + "started_by_user_id": 1, + "state": 1, + "templateHistory": { + "dtCreated": "2019-03-21T07:47:04.925852+00:00", + "dtDeleted": None, + "experimentTemplateId": 60, + "id": 6315, + "params": { + "experimentTypeId": 1, + "name": "dsfads", + "ports": 5000, + "project_handle": "prq70zy79", + "worker_command": "sadas", + "worker_container": "asd", + "worker_machine_type": "sadas" + }, + "triggerEvent": None, + "triggerEventId": None + } + }, + { + "dtCreated": "2019-03-21T07:46:57.706055+00:00", + "dtDeleted": None, + "dtFinished": None, + "dtModified": "2019-03-21T07:46:57.706055+00:00", + "dtProvisioningFinished": None, + "dtProvisioningStarted": None, + "dtStarted": None, + "dtTeardownFinished": None, + "dtTeardownStarted": None, + "experimentError": None, + "experimentTemplateHistoryId": 6314, + "experimentTemplateId": 60, + "experimentTypeId": 1, + "handle": "em6btk2vtb7it", + "id": 6291, + "projectHandle": "prq70zy79", + "projectId": 612, + "started_by_user_id": 1, + "state": 1, + "templateHistory": { + "dtCreated": "2019-03-21T07:46:56.949590+00:00", + "dtDeleted": None, + "experimentTemplateId": 60, + "id": 6314, + "params": { + "experimentTypeId": 1, + "name": "dsfads", + "ports": 5000, + "project_handle": "prq70zy79", + "worker_command": "sadas", + "worker_container": "asd", + "worker_machine_type": "sadas" + }, + "triggerEvent": None, + "triggerEventId": None + } + }, + { + "dtCreated": "2019-03-20T19:56:50.154853+00:00", + "dtDeleted": None, + "dtFinished": None, + "dtModified": "2019-03-20T19:56:50.154853+00:00", + "dtProvisioningFinished": None, + "dtProvisioningStarted": None, + "dtStarted": None, + "dtTeardownFinished": None, + "dtTeardownStarted": None, + "experimentError": None, + "experimentTemplateHistoryId": 6297, + "experimentTemplateId": 60, + "experimentTypeId": 3, + "handle": "ew69ls0vy3eto", + "id": 6286, + "projectHandle": "prq70zy79", + "projectId": 612, + "started_by_user_id": 1, + "state": 1, + "templateHistory": { + "dtCreated": "2019-03-20T19:56:49.427354+00:00", + "dtDeleted": None, + "experimentTemplateId": 60, + "id": 6297, + "params": { + "artifactDirectory": "/artdir", + "clusterId": 2, + "experimentEnv": { + "key": "val" + }, + "experimentTypeId": 3, + "name": "multinode_mpi", + "parameter_server_command": "ls", + "parameter_server_container": "pscon", + "parameter_server_container_user": "pscuser", + "parameter_server_count": 2, + "parameter_server_machine_type": "psmtype", + "parameter_server_registry_password": "psrpass", + "parameter_server_registry_username": "psrcus", + "ports": 3456, + "project_handle": "prq70zy79", + "project_id": 34, + "trigger_event_id": 12, + "worker_command": "wcom", + "worker_container": "wcon", + "worker_container_user": "usr", + "worker_count": 2, + "worker_machine_type": "mty", + "worker_registry_password": "rpass", + "worker_registry_username": "rusr", + "workingDirectory": "/dir", + "workspaceUrl": "wurl" + }, + "triggerEvent": { + "dtCreated": "2019-03-11T14:47:57+00:00", + "eventData": { + "author": { + "email": "bluckey@paperspace.com", + "login": "ultrabluewolf", + "name": "Britney Luckey" + }, + "branch": "feature/test-1", + "message": "Update readme #2", + "repo_node_id": "MDEwOlJlcG9zaXRvcnkxNzQ3MjI3NDc=", + "sender": { + "id": 4633049, + "login": "ultrabluewolf" + }, + "sha": "daa117a00cd1e0e9b1b55695031e698a560cca29", + "timestamp": "2019-03-11T10:47:57-04:00" + }, + "id": 12, + "type": "github" + }, + "triggerEventId": 12 + } + } + ], + "message": "success", + "meta": { + "filter": [], + "limit": 11, + "offset": 0, + "totalItems": 27 + } +} + LIST_OF_EXPERIMENTS_FILTERED_WITH_TWO_PROJECTS = { "data": [ { diff --git a/tests/functional/test_experiments.py b/tests/functional/test_experiments.py index 81ac8ee..c41864c 100644 --- a/tests/functional/test_experiments.py +++ b/tests/functional/test_experiments.py @@ -556,171 +556,7 @@ class TestExperimentList(object): EXPECTED_HEADERS = paperspace.client.default_headers.copy() EXPECTED_HEADERS_WITH_CHANGED_API_KEY = paperspace.client.default_headers.copy() EXPECTED_HEADERS_WITH_CHANGED_API_KEY["X-API-Key"] = "some_key" - LIST_JSON = { - "data": [ - { - "dtCreated": "2019-03-21T07:47:05.616096+00:00", - "dtDeleted": None, - "dtFinished": None, - "dtModified": "2019-03-21T07:47:05.616096+00:00", - "dtProvisioningFinished": None, - "dtProvisioningStarted": None, - "dtStarted": None, - "dtTeardownFinished": None, - "dtTeardownStarted": None, - "experimentError": None, - "experimentTemplateHistoryId": 6315, - "experimentTemplateId": 60, - "experimentTypeId": 1, - "handle": "ea2lfbbpdyzsq", - "id": 6292, - "projectHandle": "prq70zy79", - "projectId": 612, - "started_by_user_id": 1, - "state": 1, - "templateHistory": { - "dtCreated": "2019-03-21T07:47:04.925852+00:00", - "dtDeleted": None, - "experimentTemplateId": 60, - "id": 6315, - "params": { - "experimentTypeId": 1, - "name": "dsfads", - "ports": 5000, - "project_handle": "prq70zy79", - "worker_command": "sadas", - "worker_container": "asd", - "worker_machine_type": "sadas" - }, - "triggerEvent": None, - "triggerEventId": None - } - }, - { - "dtCreated": "2019-03-21T07:46:57.706055+00:00", - "dtDeleted": None, - "dtFinished": None, - "dtModified": "2019-03-21T07:46:57.706055+00:00", - "dtProvisioningFinished": None, - "dtProvisioningStarted": None, - "dtStarted": None, - "dtTeardownFinished": None, - "dtTeardownStarted": None, - "experimentError": None, - "experimentTemplateHistoryId": 6314, - "experimentTemplateId": 60, - "experimentTypeId": 1, - "handle": "em6btk2vtb7it", - "id": 6291, - "projectHandle": "prq70zy79", - "projectId": 612, - "started_by_user_id": 1, - "state": 1, - "templateHistory": { - "dtCreated": "2019-03-21T07:46:56.949590+00:00", - "dtDeleted": None, - "experimentTemplateId": 60, - "id": 6314, - "params": { - "experimentTypeId": 1, - "name": "dsfads", - "ports": 5000, - "project_handle": "prq70zy79", - "worker_command": "sadas", - "worker_container": "asd", - "worker_machine_type": "sadas" - }, - "triggerEvent": None, - "triggerEventId": None - } - }, - { - "dtCreated": "2019-03-20T19:56:50.154853+00:00", - "dtDeleted": None, - "dtFinished": None, - "dtModified": "2019-03-20T19:56:50.154853+00:00", - "dtProvisioningFinished": None, - "dtProvisioningStarted": None, - "dtStarted": None, - "dtTeardownFinished": None, - "dtTeardownStarted": None, - "experimentError": None, - "experimentTemplateHistoryId": 6297, - "experimentTemplateId": 60, - "experimentTypeId": 3, - "handle": "ew69ls0vy3eto", - "id": 6286, - "projectHandle": "prq70zy79", - "projectId": 612, - "started_by_user_id": 1, - "state": 1, - "templateHistory": { - "dtCreated": "2019-03-20T19:56:49.427354+00:00", - "dtDeleted": None, - "experimentTemplateId": 60, - "id": 6297, - "params": { - "artifactDirectory": "/artdir", - "clusterId": 2, - "experimentEnv": { - "key": "val" - }, - "experimentTypeId": 3, - "name": "multinode_mpi", - "parameter_server_command": "ls", - "parameter_server_container": "pscon", - "parameter_server_container_user": "pscuser", - "parameter_server_count": 2, - "parameter_server_machine_type": "psmtype", - "parameter_server_registry_password": "psrpass", - "parameter_server_registry_username": "psrcus", - "ports": 3456, - "project_handle": "prq70zy79", - "project_id": 34, - "trigger_event_id": 12, - "worker_command": "wcom", - "worker_container": "wcon", - "worker_container_user": "usr", - "worker_count": 2, - "worker_machine_type": "mty", - "worker_registry_password": "rpass", - "worker_registry_username": "rusr", - "workingDirectory": "/dir", - "workspaceUrl": "wurl" - }, - "triggerEvent": { - "dtCreated": "2019-03-11T14:47:57+00:00", - "eventData": { - "author": { - "email": "bluckey@paperspace.com", - "login": "ultrabluewolf", - "name": "Britney Luckey" - }, - "branch": "feature/test-1", - "message": "Update readme #2", - "repo_node_id": "MDEwOlJlcG9zaXRvcnkxNzQ3MjI3NDc=", - "sender": { - "id": 4633049, - "login": "ultrabluewolf" - }, - "sha": "daa117a00cd1e0e9b1b55695031e698a560cca29", - "timestamp": "2019-03-11T10:47:57-04:00" - }, - "id": 12, - "type": "github" - }, - "triggerEventId": 12 - } - } - ], - "message": "success", - "meta": { - "filter": [], - "limit": 11, - "offset": 0, - "totalItems": 27 - } - } + LIST_JSON = example_responses.LIST_OF_EXPERIMENTS_RESPONSE_JSON DETAILS_STDOUT = """+---------------+---------------+---------+ | Name | Handle | Status | +---------------+---------------+---------+ @@ -729,6 +565,8 @@ class TestExperimentList(object): | multinode_mpi | ew69ls0vy3eto | created | +---------------+---------------+---------+ """ + RESPONSE_JSON_WHEN_WRONG_API_KEY_WAS_USED = {"details": "Incorrect API Key provided", "error": "Forbidden"} + EXPECTED_STDOUT_WHEN_WRONG_API_KEY_WAS_USED = "Forbidden\nIncorrect API Key provided\n" @mock.patch("paperspace.cli.cli.client.requests.get") def test_should_send_get_request_and_print_list_of_experiments(self, get_patched): @@ -798,6 +636,22 @@ def test_should_send_get_request_and_print_list_of_experiments_filtered_with_two assert result.output == "No experiments found\n" + @mock.patch("paperspace.cli.cli.client.requests.get") + def test_should_print_proper_message_when_wrong_api_key_was_used(self, get_patched): + get_patched.return_value = MockResponse(json_data=self.RESPONSE_JSON_WHEN_WRONG_API_KEY_WAS_USED, + status_code=403) + + runner = CliRunner() + result = runner.invoke(cli.cli, self.COMMAND) + + get_patched.assert_called_once_with(self.URL, + headers=self.EXPECTED_HEADERS, + json=None, + params={"limit": -1}) + + assert result.output == self.EXPECTED_STDOUT_WHEN_WRONG_API_KEY_WAS_USED + assert self.EXPECTED_HEADERS["X-API-Key"] != "some_key" + class TestStartExperiment(object): URL = "https://services.paperspace.io/experiments/v1/experiments/some-handle/start/"