diff --git a/Pipfile b/Pipfile index 87cf8ec..c09eb1d 100644 --- a/Pipfile +++ b/Pipfile @@ -9,8 +9,6 @@ requests = {extras = ["security"]} requests-toolbelt = "*" progressbar2 = "*" cryptography = {extras = ["security"]} -"boto3" = "*" -botocore = "*" six = "*" gradient-statsd = "*" click = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 50f6074..b0761ec 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "414dd057f13ec74db3b2f021281547e2bcf53eec38e2459798ca230964e7b65a" + "sha256": "07a261a3940a31364e4f54e7b457414316705e767a66d09f0e3c9c87a4ad8eb2" }, "pipfile-spec": 6, "requires": {}, @@ -21,22 +21,6 @@ ], "version": "==0.24.0" }, - "boto3": { - "hashes": [ - "sha256:484650b86ea843587f484a8f9cc9629465ad805aff0ffaabf95345960168f569", - "sha256:635e1864cd35d78d33fd7ce325f9baa15c93a932403953b2b4801567a791b869" - ], - "index": "pypi", - "version": "==1.9.143" - }, - "botocore": { - "hashes": [ - "sha256:0247ad0da9fdbf4e8025b0dafb3982b945d335bcd7043518fdabe9d99f704e17", - "sha256:94846e90fc4dbe91a9e70f6a24ca823b4f3acc9a4047b497266d003fe12c80ce" - ], - "index": "pypi", - "version": "==1.12.143" - }, "certifi": { "hashes": [ "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5", @@ -157,14 +141,6 @@ ], "version": "==4.4.0" }, - "docutils": { - "hashes": [ - "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", - "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", - "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6" - ], - "version": "==0.14" - }, "e1839a8": { "editable": true, "path": "." @@ -179,14 +155,6 @@ "markers": "python_version < '3'", "version": "==1.1.6" }, - "futures": { - "hashes": [ - "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265", - "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1" - ], - "markers": "python_version == '2.6' or python_version == '2.7'", - "version": "==3.2.0" - }, "gradient-statsd": { "hashes": [ "sha256:12965d471cc3e203464229c44839c5a8f67a665ecc4f00e807b88351eb30a565", @@ -210,13 +178,6 @@ "markers": "python_version < '3'", "version": "==1.0.22" }, - "jmespath": { - "hashes": [ - "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", - "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c" - ], - "version": "==0.9.4" - }, "progressbar2": { "hashes": [ "sha256:1ea89e2aaa1da85450aabbd2af62cefa04f1ee1c567f3a11ee0d8ded14fd1fea", @@ -238,14 +199,6 @@ ], "version": "==19.0.0" }, - "python-dateutil": { - "hashes": [ - "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", - "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" - ], - "markers": "python_version >= '2.7'", - "version": "==2.8.0" - }, "python-utils": { "hashes": [ "sha256:34aaf26b39b0b86628008f2ae0ac001b30e7986a8d303b61e1357dfcdad4f6d3", @@ -272,13 +225,6 @@ "index": "pypi", "version": "==0.9.1" }, - "s3transfer": { - "hashes": [ - "sha256:7b9ad3213bff7d357f888e0fab5101b56fa1a0548ee77d121c3a3dbfbef4cb2e", - "sha256:f23d5cb7d862b104401d9021fc82e5fa0e0cf57b7660a1331425aab0c691d021" - ], - "version": "==0.2.0" - }, "six": { "hashes": [ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", @@ -299,7 +245,6 @@ "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" ], - "markers": "python_version == '2.7'", "version": "==1.24.3" } }, @@ -339,22 +284,6 @@ ], "version": "==3.0.4" }, - "configparser": { - "hashes": [ - "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", - "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75" - ], - "markers": "python_version < '3'", - "version": "==3.7.4" - }, - "contextlib2": { - "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" - ], - "markers": "python_version < '3'", - "version": "==0.5.5" - }, "coverage": { "hashes": [ "sha256:3684fabf6b87a369017756b551cef29e505cb155ddb892a7a29277b978da88b9", @@ -422,20 +351,13 @@ ], "version": "==2.8" }, - "importlib-metadata": { - "hashes": [ - "sha256:46fc60c34b6ed7547e2a723fc8de6dc2e3a1173f8423246b3ce497f064e9c3de", - "sha256:bc136180e961875af88b1ab85b4009f4f1278f8396a60526c0009f503a1a96ca" - ], - "version": "==0.9" - }, "mock": { "hashes": [ - "sha256:21a2c07af3bbc4a77f9d14ac18fcc1782e8e7ea363df718740cdeaf61995b5e7", - "sha256:7868db2825a1563578869d4a011a036503a2f1d60f9ff9dd1e3205cd6e25fcec" + "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", + "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" ], "index": "pypi", - "version": "==3.0.4" + "version": "==3.0.5" }, "more-itertools": { "hashes": [ @@ -463,10 +385,10 @@ }, "pluggy": { "hashes": [ - "sha256:1c0b297d4d41bc9bdfbdc17991b35f9e1d2cfe8eaa4d7c118e86d705870d34c8", - "sha256:fb2f776b7ec85038ef95860f4e83bfb6ab171a9d0b70b69d7ca4d04130644c2b" + "sha256:25a1bc1d148c9a640211872b4ff859878d422bccb59c9965e04eed468a0aa180", + "sha256:964cedd2b27c492fbf0b7f58b3284a09cf7f99b0f715941fb24a439b3af1bd1a" ], - "version": "==0.10.0" + "version": "==0.11.0" }, "py": { "hashes": [ @@ -583,7 +505,6 @@ "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" ], - "markers": "python_version == '2.7'", "version": "==1.24.3" }, "virtualenv": { @@ -606,13 +527,6 @@ "sha256:8eb4a788b3aec8abf5ff68d4165441bc57420c9f64ca5f471f58c3969fe08668" ], "version": "==0.33.1" - }, - "zipp": { - "hashes": [ - "sha256:139391b239594fd8b91d856bc530fbd2df0892b17dd8d98a91f018715954185f", - "sha256:8047e4575ce8d700370a3301bbfc972896a5845eb62dd535da395b86be95dfad" - ], - "version": "==0.4.0" } } } diff --git a/old_tests/__init__.py b/old_tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/old_tests/myscript.py b/old_tests/myscript.py deleted file mode 100644 index 39d3880..0000000 --- a/old_tests/myscript.py +++ /dev/null @@ -1,16 +0,0 @@ -import os -import subprocess -import sys -args = sys.argv[:] -print('hello from %s' % args[0]) -print('args: ' + ' '.join(args)) -print('current directory: ' + os.getcwd()) -p = subprocess.Popen('ls -al', shell=True, bufsize=1, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) -while True: - line = p.stdout.readline() - if line != '': - print(line.rstrip()) - else: - break -retval = p.wait() -print('%s done' % args[0]) diff --git a/old_tests/test.py b/old_tests/test.py deleted file mode 100644 index ed01aef..0000000 --- a/old_tests/test.py +++ /dev/null @@ -1,97 +0,0 @@ -import sys -import paperspace - -# Tests: - -project = 'all' -print('project: %s' % project) - -def errorcheck(res): - if 'error' in res: - paperspace.print_json_pretty(res) - sys.exit(1) - - -print("paperspace.jobs.machineTypes()") -machineTypes = paperspace.jobs.machineTypes() -errorcheck(machineTypes) -paperspace.print_json_pretty(machineTypes) - -print("paperspace.jobs.list({'project': '%s'})" % project) -jobs = paperspace.jobs.list({'project': project}) -errorcheck(jobs) -for job in jobs: - print(job['id']) - -print("jobs.create({'project': '%s', 'machineType': 'P5000', 'container': 'paperspace/tensorflow-python', 'command': './do.sh', 'workspace': '~/myproject3', 'cluster': 'Gradient-Node'})" % project) -job = paperspace.jobs.create({'project': project, - 'machineType': 'P5000', 'container': 'paperspace/tensorflow-python', - 'command': './do.sh', 'workspace': '~/myproject3'}) -if 'error' in job: - sys.exit(1) -jobId = job['id'] - -print("paperspace.jobs.artifactsList({'jobId': '%s', 'links': True})" % jobId) -artifacts = paperspace.jobs.artifactsList({'jobId': jobId, 'links': True}) -errorcheck(artifacts) -if artifacts: - paperspace.print_json_pretty(artifacts) - -print("paperspace.jobs.artifactsGet({'jobId': '%s', 'dest': '~/temp1'})" % jobId) -if not paperspace.jobs.artifactsGet({'jobId': jobId, 'dest': '~/temp1'}): - print('paperspace.jobs.artifactsGet returned False') - sys.exit(1) - -print("paperspace.jobs.show({'jobId': '%s'})" % jobId) -job = paperspace.jobs.show({'jobId': jobId}) -paperspace.print_json_pretty(job) - -print("paperspace.jobs.logs({'jobId': '%s', 'limit': 4}, tail=True)" % jobId) -if not paperspace.jobs.logs({'jobId': jobId, 'limit': 4}, tail=True): - print('logs encountered an error') - -print("paperspace.jobs.logs({'jobId': '%s', 'limit': 4}, no_logging=True)" % jobId) -res = paperspace.jobs.logs({'jobId': jobId, 'limit': 4}, no_logging=True) -paperspace.print_json_pretty(res) - -print("paperspace.jobs.stop({'jobId': '%s'})" % jobId) -res = paperspace.jobs.stop({'jobId': jobId}) -paperspace.print_json_pretty(res) - -print("paperspace.jobs.clone({'jobId': '%s'})" % jobId) -clonedJob = paperspace.jobs.clone({'jobId': jobId}) -paperspace.print_json_pretty(clonedJob) - -print("paperspace.jobs.waitfor({'jobId': '%s', 'state': 'Stopped'})" % clonedJob['id']) -waitforJob = paperspace.jobs.waitfor({'jobId': clonedJob['id'], 'state': 'Stopped'}) -paperspace.print_json_pretty(waitforJob) - -print("paperspace.jobs.artifactsList({'jobId': '%s'})" % clonedJob['id']) -artifacts = paperspace.jobs.artifactsList({'jobId': clonedJob['id']}) -errorcheck(artifacts) -if artifacts: - paperspace.print_json_pretty(artifacts) - print("paperspace.jobs.artifactsDestroy({'jobId': '%s'})" % clonedJob['id']) - paperspace.jobs.artifactsDestroy({'jobId': clonedJob['id']}) - - print("paperspace.jobs.artifactsList({'jobId': '%s'})" % clonedJob['id']) - artifacts = paperspace.jobs.artifactsList({'jobId': clonedJob['id']}) - errorcheck(artifacts) - if artifacts: - paperspace.print_json_pretty(artifacts) - -print("paperspace.jobs.list({'project': '%s'})" % project) -jobs = paperspace.jobs.list({'project': project}) -errorcheck(jobs) -for job in jobs: - print(job['id']) - -print("paperspace.jobs.destroy({'jobId': '%s'})" % clonedJob['id']) -res = paperspace.jobs.destroy({'jobId': clonedJob['id']}) -paperspace.print_json_pretty(res) - -print("paperspace.jobs.list({'project': '%s'})" % project) -jobs = paperspace.jobs.list({'project': project}) -errorcheck(jobs) -for job in jobs: - print(job['id']) diff --git a/old_tests/test_artifactsGet.py b/old_tests/test_artifactsGet.py deleted file mode 100644 index 641db86..0000000 --- a/old_tests/test_artifactsGet.py +++ /dev/null @@ -1,5 +0,0 @@ -import paperspace - -print("paperspace.jobs.artifactsGet({'jobId': 'jszkrgijy8ethy', 'dest': '~/temp1'}, no_logging=True)") -files = paperspace.jobs.artifactsGet({'jobId': 'jszkrgijy8ethy', 'dest': '~/temp1'}, no_logging=True) -paperspace.print_json_pretty(files) diff --git a/old_tests/test_login.py b/old_tests/test_login.py deleted file mode 100644 index cece50e..0000000 --- a/old_tests/test_login.py +++ /dev/null @@ -1,13 +0,0 @@ -import sys -import paperspace - -if not paperspace.login(): - sys.exit(1) - -print("paperspace.jobs.list({'project': 'all'})") -jobs = paperspace.jobs.list({'project': 'all'}) -if 'error' in jobs: - paperspace.print_json_pretty(jobs) -else: - for job in jobs: - print(job['id']) diff --git a/old_tests/test_machines.py b/old_tests/test_machines.py deleted file mode 100644 index eebd7cd..0000000 --- a/old_tests/test_machines.py +++ /dev/null @@ -1,160 +0,0 @@ -import sys -import time -import paperspace - -def errorcheck(res): - if 'error' in res: - paperspace.print_json_pretty(res) - sys.exit(1) -""" -print("paperspace.machines.availability({'region': 'East Coast (NY2)', 'machineType': 'P4000'})") -res = paperspace.machines.availability({'region': 'East Coast (NY2)', 'machineType': 'P4000'}) -errorcheck(res) -paperspace.print_json_pretty(res) - -print("paperspace.networks.list()") -networks = paperspace.networks.list() -if 'error' in res: - paperspace.print_json_pretty(res) -else: - for network in networks: - paperspace.print_json_pretty(network) - -print("paperspace.templates.list()") -templates = paperspace.templates.list() -errorcheck(templates) -for template in templates: - paperspace.print_json_pretty(template) - -print("paperspace.users.list()") -users = paperspace.users.list() -errorcheck(users) -for user in users: - paperspace.print_json_pretty(user) - -print("paperspace.scripts.create(...)") -script = paperspace.scripts.create({'scriptName': 'My Python Script', 'scriptText': 'python --version'}) -errorcheck(script) -paperspace.print_json_pretty(script) -scriptId = script['id'] - -print("paperspace.scripts.show(...)") -script = paperspace.scripts.show({'scriptId': scriptId}) -errorcheck(script) -paperspace.print_json_pretty(script) - -print("paperspace.scripts.destroy(...)") -res = paperspace.scripts.destroy({'scriptId': scriptId}) -errorcheck(res) -paperspace.print_json_pretty(res) - -print("paperspace.scripts.list()") -scripts = paperspace.scripts.list() -errorcheck(scripts) -scriptId = None -last_script = None -for script in scripts: - #paperspace.print_json_pretty(script) - scriptId = script['id'] - last_script = script -if last_script: - paperspace.print_json_pretty(last_script) - -print("paperspace.scripts.show(...)") -script = paperspace.scripts.show({'scriptId': scriptId}) -errorcheck(script) -paperspace.print_json_pretty(script) - -print("paperspace.scripts.text(...)") -script = paperspace.scripts.text({'scriptId': scriptId}) -errorcheck(script) -paperspace.print_json_pretty(script) - -print("paperspace.machines.create(...)") -machine = paperspace.machines.create({'machineType': 'C1', 'region': 'East Coast (NY2)', 'billingType': 'hourly', - 'machineName': 'pythoncreate4', 'templateId': 'tqalmii', 'size': '50', 'dynamicPublicIp': True}) #prod: 'tbludl2' -errorcheck(machine) -paperspace.print_json_pretty(machine) - -machineId = machine['id'] - -machine = paperspace.machines.waitfor({'machineId': machineId, 'state': 'ready'}) -errorcheck(machine) -paperspace.print_json_pretty(machine) - -print("paperspace.machines.show(...)") -machine = paperspace.machines.show({'machineId': machineId}) -errorcheck(machine) -paperspace.print_json_pretty(machine) - -print("paperspace.machines.list()") -machines = paperspace.machines.list() -errorcheck(machines) -paperspace.print_json_pretty(machine) -found = False -for machine in machines: - if machine['id'] == machineId: - found = True - print('found machineId %s in machines list' % machine['id']) -if not found: - print('failed to find machineId %s in machines list' % machineId) - sys.exit(1) - -print("paperspace.machines.stop(...)") -res = paperspace.machines.stop({'machineId': machineId}) -errorcheck(res) -paperspace.print_json_pretty(res) - -machine = paperspace.machines.waitfor({'machineId': machineId, 'state': 'off'}) -errorcheck(machine) -paperspace.print_json_pretty(machine) - -print("paperspace.machines.update(...)") -res = paperspace.machines.update({'machineId': machineId, 'machineName': 'pythoncreate-6', 'dynamicPublicIp': False}) -errorcheck(res) -paperspace.print_json_pretty(res) - -print("paperspace.machines.show(...)") -machine = paperspace.machines.show({'machineId': machineId}) -errorcheck(machine) -paperspace.print_json_pretty(machine) - -print("paperspace.machines.start(...)") -res = paperspace.machines.start({'machineId': machineId}) -errorcheck(res) -paperspace.print_json_pretty(res) - -machine = paperspace.machines.waitfor({'machineId': machineId, 'state': 'ready'}) -errorcheck(machine) -paperspace.print_json_pretty(machine) - -print("paperspace.machines.restart(...)") -res = paperspace.machines.restart({'machineId': machineId}) -errorcheck(res) -paperspace.print_json_pretty(res) - -time.sleep(10) - -machine = paperspace.machines.waitfor({'machineId': machineId, 'state': 'ready'}) -errorcheck(machine) -paperspace.print_json_pretty(machine) - -print("paperspace.machines.stop(...)") -res = paperspace.machines.stop({'machineId': machineId}) -errorcheck(res) -paperspace.print_json_pretty(res) - -machine = paperspace.machines.waitfor({'machineId': machineId, 'state': 'off'}) -errorcheck(machine) -paperspace.print_json_pretty(machine) - -print("paperspace.machines.destroy(...)") -res = paperspace.machines.destroy({'machineId': machineId}) -errorcheck(res) -paperspace.print_json_pretty(res) - -print("paperspace.machines.utilization(...)") -res = paperspace.machines.utilization({'machineId': machineId, 'billingMonth': '2018-04'}) -errorcheck(res) -paperspace.print_json_pretty(res) -""" diff --git a/old_tests/test_remote.py b/old_tests/test_remote.py deleted file mode 100644 index 90dd80a..0000000 --- a/old_tests/test_remote.py +++ /dev/null @@ -1,7 +0,0 @@ -import os -import paperspace - -paperspace.run({'project': 'myproject', 'machineType': 'P5000', 'container': 'paperspace/tensorflow-python'}) - -print(os.getcwd()) -print('something useful') diff --git a/old_tests/test_run_script.py b/old_tests/test_run_script.py deleted file mode 100644 index c28372f..0000000 --- a/old_tests/test_run_script.py +++ /dev/null @@ -1,6 +0,0 @@ -#invoke with python, not paperspace-python run -import paperspace - -print("paperspace.run('myscript.py', {'project': 'myproject', 'machineType': 'P5000', 'container': 'paperspace/tensorflow-python'})") -paperspace.run('myscript.py', {'project': 'myproject', 'machineType': 'P5000', 'container': 'paperspace/tensorflow-python'}) -print('test_run_script completed') diff --git a/old_tests/test_run_test_tensorflow.py b/old_tests/test_run_test_tensorflow.py deleted file mode 100644 index f5fcb93..0000000 --- a/old_tests/test_run_test_tensorflow.py +++ /dev/null @@ -1,6 +0,0 @@ -#invoke with python, not paperspace-python run -import paperspace - -print("paperspace.run('test_tensorflow.py', {'project': 'myproject', 'machineType': 'P5000', 'container': 'paperspace/tensorflow-python'})") -paperspace.run('myscript.py', {'project': 'myproject', 'machineType': 'P5000', 'container': 'paperspace/tensorflow-python'}) -print('test_run_script completed') diff --git a/old_tests/test_tensorflow.py b/old_tests/test_tensorflow.py deleted file mode 100644 index 94e88b1..0000000 --- a/old_tests/test_tensorflow.py +++ /dev/null @@ -1,16 +0,0 @@ -import tensorflow as tf - -print('test_tensorflow start') -print('tensorflow version: %s' % tf.__version__) - -sess = tf.Session(config=tf.ConfigProto(log_device_placement=True)) - -with tf.device('/gpu:0'): - a = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3], name='a') - b = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2], name='b') - c = tf.matmul(a, b) - -with tf.Session() as sess: - print(sess.run(c)) - -print('test_tensorflow done') diff --git a/paperspace/__init__.py b/paperspace/__init__.py index 152841a..73303b2 100644 --- a/paperspace/__init__.py +++ b/paperspace/__init__.py @@ -1,14 +1,12 @@ from gradient_statsd import Client as StatsdClient -from . import jobs -from . import machines -from . import networks -from . import scripts -from . import templates -from . import users from .config import config -from .jobs import run from .login import login, logout -from .method import print_json_pretty +from .utils import print_json_pretty +from .cli.cli import cli as _cli_entry_point -_ = StatsdClient # to keep import save from "Optimize Imports", auto code cleanup, etc. +_ = StatsdClient # to keep import safe from "Optimize Imports", auto code cleanup, etc. + + +def main(): + _cli_entry_point() diff --git a/paperspace/__main__.py b/paperspace/__main__.py new file mode 100644 index 0000000..89c63ef --- /dev/null +++ b/paperspace/__main__.py @@ -0,0 +1,4 @@ +from paperspace import main + +if __name__ == '__main__': + main() diff --git a/paperspace/jobs.py b/paperspace/jobs.py deleted file mode 100644 index e31b271..0000000 --- a/paperspace/jobs.py +++ /dev/null @@ -1,432 +0,0 @@ -import base64 -import inspect -import os -import re -import sys -import tempfile -import time - -import boto3 -import botocore -import requests -import six - -from paperspace.config import config -from .login import apikey -from .method import method, requests_exception_to_error_obj, print_json_pretty - - -def list(params={}): - return method('jobs', 'getJobs', params) - - -def artifactsList(params): - return method('jobs', 'artifactsList', params) - - -def artifactsDestroy(params): - return method('jobs', 'artifactsDestroy', params) - - -def show(params): - return method('jobs', 'getJob', params) - - -def clone(params): - return method('jobs', 'clone', params) - - -def stop(params): - return method('jobs', 'stop', params) - - -def destroy(params): - return method('jobs', 'destroy', params) - - -def machineTypes(params = {}): - return method('jobs', 'getClusterAvailableMachineTypes', params) - - -def logs(params, tail=False, no_logging=False): - params = params.copy() - if 'apiKey' in params: - config.PAPERSPACE_API_KEY = params.pop('apiKey') - elif not config.PAPERSPACE_API_KEY: - config.PAPERSPACE_API_KEY = apikey() - tail = params.pop('tail', False) or tail - no_logging = no_logging or params.pop('no_logging', False) - - last_line = 0 - PSEOF = False - result = [] - MAX_BACKOFF = 30 - backoff = 0 - - if 'line' not in params: - params['line'] = 0 - - while True: - try: - r = requests.request('GET', config.CONFIG_LOG_HOST + '/jobs/logs', - headers={'x-api-key': config.PAPERSPACE_API_KEY}, - params=params) - except requests.exceptions.RequestException as e: - res = requests_exception_to_error_obj(e) - if no_logging: - return res - print_json_pretty(res) - return False - else: - try: - res = r.json() - if 'error' in res: - if no_logging: - return res - print_json_pretty(res) - if tail: - print('Error: logs tail exited before job completed') - else: - print('Error: logs exited on error') - return False - - except ValueError: - res = [] - - if no_logging: - result += res - else: - for l in res: - m = l['message'] - if m != 'PSEOF': - print(m) - - if res: - last_line = res[-1]['line'] - PSEOF = res[-1]['message'] == 'PSEOF' - - if PSEOF: - break - - if last_line > params['line']: - params['line'] = last_line - backoff = 0 - continue - - if tail: - if backoff: - time.sleep(backoff) - backoff = min(backoff * 2, MAX_BACKOFF) - else: - backoff = 1 - else: - break - - if no_logging: - return result - return True - - -def waitfor(params): - while True: - job = method('jobs', 'getJob', params) - if 'state' not in job: - return job - state = job['state'] - - if (state == params['state'] - or (state == 'Running' and params['state'] == 'Pending') - or state == 'Error' - or state == 'Stopped' - or state == 'Failed' - or state == 'Preempted' - or state == 'Cancelled'): - return job - time.sleep(5) - - -def create(params, no_logging=False, extra_files=[]): - no_logging = no_logging or params.get('no_logging', False) - job = method('jobs', 'createJob', params) - if no_logging: - return job - if 'id' not in job: - print_json_pretty(job) - return job - jobId = job['id'] - print('New jobId: %s' % jobId) - print('Cluster: %s' % job['cluster']) - if job['codeCommit']: - print('Git commit: %s' % job['codeCommit']) - print('Job %s' % job['state']) - - if job['state'] == 'Pending': - print('Waiting for job to run...') - job = waitfor({'jobId': jobId, 'state': 'Running'}) - if 'state' not in job: - print_json_pretty(job) - return job - - if job['state'] != 'Error' and job['state'] != 'Cancelled': - print('Awaiting logs...') - if logs({'jobId': jobId}, tail=True, no_logging=no_logging): - job = method('jobs', 'getJob', {'jobId': jobId}) - else: - job = waitfor({'jobId': jobId, 'state': 'Stopped'}) - if 'state' not in job: - print_json_pretty(job) - return job - - if job['state'] == 'Error': - print('Job %s: %s' % (job['state'], job['jobError'])) - elif job['state'] == 'Cancelled': - print('Job %s' % (job['state'])) - else: - job = waitfor({'jobId': jobId, 'state': 'Stopped'}) - if job['state'] == 'Error': - print('Job %s: %s' % (job['state'], job['jobError'])) - else: - if 'exitCode' not in job: - print('Job %s, exitCode %s' % (job['state'], 'None')) - else: - print('Job %s, exitCode %s' % (job['state'], job['exitCode'])) - return job - - -def artifactsGet(params, no_logging=False): - params = params.copy() - no_logging = no_logging or params.get('no_logging', False) - result = [] - if 'dest' in params: - dest = os.path.abspath(os.path.expanduser(params['dest'])) - if not os.path.exists(dest): - os.makedirs(dest) - else: - if not os.path.isdir(dest): - print('Destination path not is not directory: %s' % dest) - if no_logging: - return result - return False - del params['dest'] - else: - dest = os.getcwd() - - artifacts_list = method('jobs', 'artifactsList', params) - if artifacts_list: - - creds = method('jobs', 'artifactsGet', params) - if 'bucket' in creds: - bucket = creds['bucket'] - folder = creds['folder'] - credentials = creds['Credentials'] - - session = boto3.Session( - aws_access_key_id=credentials['AccessKeyId'], - aws_secret_access_key=credentials['SecretAccessKey'], - aws_session_token=credentials['SessionToken'] - ) - s3 = session.resource('s3') - - for item in artifacts_list: - file = item['file'] - dest_file = os.path.join(dest, file) - - dest_dir = os.path.dirname(dest_file) - if not os.path.exists(dest_dir): - os.makedirs(dest_dir) - - key = folder + '/' + file - if not no_logging: - print('Downloading %s' % file) - - try: - s3.Bucket(bucket).download_file(key, dest_file) - except botocore.exceptions.ClientError as e: - if e.response['Error']['Code'] == "404": - print("The s3 object does not exist: %s" % key) - else: - raise - if no_logging: - result.append({ 'file': file, 'destination': dest_file }) - - if no_logging: - return result - print('Download complete') - return True - else: - if no_logging: - return creds - print_json_pretty(creds) - return False - - if no_logging: - return result - return False - - -# TO DO: -# detect running interactively -# stream file uploads/downloads - - -def run(params={}, no_logging=False): - if 'PS_JOB_RUNNER' in os.environ: - return - - # handle script is first arg, params is second - if isinstance(params, str): - script = params - if isinstance(no_logging, dict): - params = no_logging - no_logging = False - else: - params = {} - params['script'] = script - - python_opt = '' - python_cmd_str_term = '' - run_module = params.pop('run_module', None) - if run_module: - python_opt = '-m ' - run_command = params.pop('run_command', None) - if run_command: - python_opt = "-c '" - python_cmd_str_term = "' " - - params = params.copy() - run_this = False - if 'script' not in params: - run_this = True - - stack = inspect.stack() - obj = __import__(stack[1][0].f_globals['__name__']) - src = inspect.getsource(obj) - src_file = os.path.basename(inspect.getsourcefile(obj)) - - # TO DO: remove these replacements once we are auto importing paperspace on the job runner - src, n = re.subn('^import paperspace', 'def _paperspace_null_func(*args, **kwargs): return None\n#import _paperspace', src, count=1, flags=re.MULTILINE) - if n != 0: - src = re.sub('^import paperspace*$', '', src, flags=re.MULTILINE) - src = re.sub('import paperspace', 'pass #import _paperspace', src) - src = re.sub('^from paperspace', '#from _paperspace', src, flags=re.MULTILINE) - src = re.sub('from paperspace', 'pass #from _paperspace', src) - src = src.replace('paperspace.config.PAPERSPACE_API_KEY', '_paperspace_config_PAPERSPACE_API_KEY') - src = src.replace('paperspace.config.CONFIG_HOST', '_paperspace_config_CONFIG_HOST') - src = src.replace('paperspace.config.CONFIG_LOG_HOST', '_paperspace_config_CONFIG_LOG_HOST') - src = src.replace('paperspace.jobs.run', '_paperspace_null_func') - src = src.replace('paperspace.run', '_paperspace_null_func') - src = src.replace('paperspace.login', '_paperspace_null_func') - src = src.replace('paperspace.logout', '_paperspace_null_func') - - src_path = os.path.join(tempfile.gettempdir(), src_file) - with open(src_path, "w") as file: - file.write(src) - else: - if not run_module and not run_command: - src_file = os.path.basename(params['script']) - src_path = params.pop('script') - else: - src_file = params.pop('script') - src_path = src_file - - if 'project' not in params: - params['project'] = 'paperspace-python' - # if 'machineType' not in params: - # params['machineType'] = 'P5000' - if 'container' not in params: - params['container'] = 'paperspace/tensorflow-python' - - python_ver = params.pop('python', str(sys.version_info[0])) # defaults locally running version - # TODO validate python version; handle no version, specific version - - script_args = params.pop('script_args', None) - args = '' - if script_args: - args = ' ' + ' '.join(script_args) - - if 'command' not in params: - params['command'] = 'python' + python_ver + ' ' + python_opt + src_file + python_cmd_str_term + args - - params['extraFiles'] = [] - if not run_module and not run_command: - if not os.path.exists(src_path): - message = format('error: file not found: %s' % src_path) - print(message) - if 'no_logging' in params: - return { 'error': True, 'message': message } - sys.exit(1) - elif os.path.isdir(src_path): - message = format('error: specified file is a directory: %s' % src_path) - print(message) - if 'no_logging' in params: - return { 'error': True, 'message': message } - sys.exit(1) - if 'workspace' not in params: - params['workspace'] = src_path - else: - params['extraFiles'].append(src_path) - else: - if not run_command and os.path.exists(src_path) and not os.path.isdir(src_path): - if 'workspace' not in params: - params['workspace'] = src_path - else: - params['extraFiles'].append(src_path) - - if 'ignoreFiles' in params: - if isinstance(params['ignoreFiles'], str): - params['ignoreFiles'] = params['ignoreFiles'].split(',') - - pipenv = params.pop('pipenv', None) - if pipenv: - for pipfile in ['Pipfile', 'Pipfile.lock']: - if os.path.exists(pipfile): - params['extraFiles'].append(pipfile) - uses_python_ver = '' - if python_ver.startswith('3'): - uses_python_ver = '--three ' - elif python_ver.startswith('2'): - uses_python_ver = '--two ' - params['command'] = 'pipenv ' + uses_python_ver + 'run ' + params['command'] - - req = params.pop('req', None) - if req: - if not isinstance(req, str): - req = 'requirements.txt' - if os.path.exists(req): - params['extraFiles'].append(req) - params['command'] = 'pip' + python_ver + ' install -r ' + os.path.basename(req) + '\n' + params['command'] - if pipenv: - params['command'] = 'pipenv ' + uses_python_ver + 'run ' + params['command'] - - if pipenv: - params['command'] = 'pipenv ' + uses_python_ver + 'install\n' + params['command'] - - conda = params.pop('conda', None) - if conda: - params['command'] = 'conda -env ' + conda + '\n' + params['command'] - - init = params.pop('init', None) - if init: - if not isinstance(init, str): - init = 'init.sh' - if os.path.exists(init): - params['extraFiles'].append(init) - params['command'] = '. ' + os.path.basename(init) + '\n' + params['command'] - - if params.pop('dryrun', None): - print(params['command']) - sys.exit(1) - - if six.PY3: - params['command'] = bytes(params['command'], 'utf-8') - - params['command'] = base64.b64encode(params['command']) - res = create(params, no_logging) - if run_this: - sys.exit(0) - return res - -# TO DO: -# automatic install of imported dependencies -# allow return results -# detect/use python environment diff --git a/paperspace/login.py b/paperspace/login.py index 99f0dba..c580777 100644 --- a/paperspace/login.py +++ b/paperspace/login.py @@ -7,7 +7,7 @@ from paperspace import logger from .config import config -from .method import requests_exception_to_error_obj, response_error_check, status_code_to_error_obj +from paperspace.utils import response_error_check, requests_exception_to_error_obj, status_code_to_error_obj UNAUTHORIZED_EXTENDED_INFO = '\n\nNote: Please keep in mind that currently you can login only with the email and ' \ 'password from your Paperspace account. If you\'re using AD, SAML or GitHub ' \ diff --git a/paperspace/machines.py b/paperspace/machines.py deleted file mode 100644 index ab5798b..0000000 --- a/paperspace/machines.py +++ /dev/null @@ -1,65 +0,0 @@ -import time - -import paperspace -from .method import method - - -def availability(params): - return method('machines', 'getAvailability', params) - - -def create(params): - return method('machines', 'createSingleMachinePublic', params) - - -def destroy(params): - return method('machines', 'destroyMachine', params) - - -def list(params={}): - return method('machines', 'getMachines', params) - - -def restart(params): - return method('machines', 'restart', params) - - -def show(params): - return method('machines', 'getMachinePublic', params) - - -def start(params): - return method('machines', 'start', params) - - -def stop(params): - return method('machines', 'stop', params) - - -def waitfor(params): - params = params.copy() - if 'machineId' not in params: - print('Error: machineId is a required parameter for paperspace.machines.waitfor method') - sys.exit(1) - if 'state' not in params: - print('Error: state is a required parameter for paperspace.machines.waitfor method') - sys.exit(1) - target_state = params.pop('state', None) - state = None - machine = None - while state != target_state: - time.sleep(5) - machine = show(params) - if 'error' in machine: - paperspace.print_json_pretty(res) - sys.exit(1) - state = machine['state'] - return machine - - -def update(params): - return method('machines', 'updateMachinePublic', params) - - -def utilization(params): - return method('machines', 'getUtilization', params) diff --git a/paperspace/main.py b/paperspace/main.py deleted file mode 100644 index 6d936a1..0000000 --- a/paperspace/main.py +++ /dev/null @@ -1,140 +0,0 @@ -import os -import sys - -from .cli.cli import cli -from .jobs import run, print_json_pretty -from .login import set_apikey -from .version import version - - -def main(): - if len(sys.argv) >= 2 and sys.argv[1] in ('experiments', 'deployments', 'machines', 'login', 'logout', 'version', - 'projects', 'jobs', 'models'): - cli(sys.argv[1:]) - - args = sys.argv[:] - prog = os.path.basename(args.pop(0)) - - if not args: - usage(prog) - sys.exit(1) - - cmd = args.pop(0) - - help_opts = ['help', '--help', '-h'] - - if cmd in help_opts: - usage(prog) - sys.exit(0) - - if cmd == 'apikey' or cmd == 'apiKey': - if not args or args[0] in help_opts: - print('usage: %s' % apikey_usage(prog)) - sys.exit(not args) - return not set_apikey(args[0]) - - if cmd == 'run': - if not args or args[0] in help_opts: - print('run usage: %s' % run_usage(prog)) - sys.exit(not args) - params = {} - skip_arg_processing = False - while args: - opt = args.pop(0) - if opt == '-': - skip_arg_processing = True - elif opt.startswith('--') and not skip_arg_processing: - param = opt[2:] - if param in ['script', 'python', 'conda', 'ignoreFiles', 'apiKey', 'container', 'machineType', 'name', - 'project', 'projectId', 'command', 'workspace', 'dataset', 'registryUsername', - 'registryPassword', 'workspaceUsername', 'workspacePassword', 'cluster', 'clusterId', - 'ports', 'isPreemptible', 'useDockerfile', 'buildOnly', 'registryTarget', - 'registryTargetUsername', 'registryTargetPassword', 'relDockerfilePath', 'customMetrics', 'modelType', 'modelPath']: - if args and not args[0].startswith('--'): - params[param] = args.pop(0) - else: - print('error: missing argument for %s' % opt) - print('usage: %s' % run_usage(prog)) - sys.exit(1) - elif param in ['init', 'req']: - params[param] = True - if args and not args[0].startswith('-') and not args[0].endswith('.py'): - params[param] = args.pop(0) - elif param in ['no_logging', 'nologging', 'noLogging', 'json']: - params['no_logging'] = True - elif param in ['dryrun', 'pipenv']: - params[param] = True - else: - print('error: invalid option: %s' % opt) - print('usage: %s' % run_usage(prog)) - sys.exit(1) - elif opt == '-m' and not skip_arg_processing: - params['run_module'] = True - skip_arg_processing = True - elif opt == '-c' and not skip_arg_processing: - params['run_command'] = True - skip_arg_processing = True - elif 'script' not in params: - params['script'] = opt - else: - if 'script_args' not in params: - params['script_args'] = [opt] - else: - params['script_args'].append(opt) - res = run(params) - if 'error' in res: - print_json_pretty(res) - sys.exit(1) - sys.exit(0) - - print('error: invalid command: %s' % cmd) - usage(prog) - sys.exit(1) - - -def vers(prog): - print('%s %s' % (prog, version)) - - -def login_usage(prog): - return format('%s login [[--email] ] [[--password] ""] [[--apiToken] ""]\n %s logout' % (prog, prog)) - - -def apikey_usage(prog): - return format('%s apikey ' % prog) - - -def run_usage(prog): - return format('%s run [options] [[-m]