From 80bc2ef19ff22a2b290f7608069dd85a1eed42f5 Mon Sep 17 00:00:00 2001 From: vishal Date: Thu, 22 Oct 2020 14:16:52 -0400 Subject: [PATCH 1/2] Fix install documentation --- docs/cluster-management/install.md | 9 ++++++--- pkg/workloads/cortex/client/README.md | 9 ++++++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/docs/cluster-management/install.md b/docs/cluster-management/install.md index 0525b52368..19eb6673e2 100644 --- a/docs/cluster-management/install.md +++ b/docs/cluster-management/install.md @@ -19,13 +19,16 @@ See [here](../miscellaneous/cli.md#install-cortex-cli-without-python-client) to ```bash # clone the Cortex repository git clone -b master https://github.com/cortexlabs/cortex.git + +# navigate to the Pytorch text generator example +cd cortex/examples/pytorch/text-generator ``` ### Using the CLI ```bash # deploy the model as a realtime api -cortex deploy cortex/examples/pytorch/text-generator/cortex.yaml +cortex deploy # view the status of the api cortex get --watch @@ -39,7 +42,7 @@ cortex get text-generator # generate text curl \ -X POST -H "Content-Type: application/json" \ - -d '{"text": "machine learning is"}' \ + -d '{"text": "machine learning is"}' # delete the api cortex delete text-generator @@ -54,7 +57,7 @@ import requests local_client = cortex.client("local") # deploy the model as a realtime api and wait for it to become active -deployments = local_client.deploy("cortex/examples/pytorch/text-generator/cortex.yaml", wait=True) +deployments = local_client.deploy("./cortex.yaml", wait=True) # get the api's endpoint url = deployments[0]["api"]["endpoint"] diff --git a/pkg/workloads/cortex/client/README.md b/pkg/workloads/cortex/client/README.md index d54fe029d5..38eb3abac4 100644 --- a/pkg/workloads/cortex/client/README.md +++ b/pkg/workloads/cortex/client/README.md @@ -38,6 +38,9 @@ You must have [Docker](https://docs.docker.com/install) installed to run Cortex ```bash # clone the Cortex repository git clone -b master https://github.com/cortexlabs/cortex.git + +# navigate to the Pytorch text generator example +cd cortex/examples/pytorch/text-generator ``` ### In Python @@ -48,7 +51,7 @@ import requests local_client = cortex.client("local") # deploy the model as a realtime api and wait for it to become active -deployments = local_client.deploy("cortex/examples/pytorch/text-generator/cortex.yaml", wait=True) +deployments = local_client.deploy("./cortex.yaml", wait=True) # get the api's endpoint url = deployments[0]["api"]["endpoint"] @@ -63,7 +66,7 @@ local_client.delete_api("text-generator") ### Using the CLI ```bash # deploy the model as a realtime api -cortex deploy cortex/examples/pytorch/text-generator/cortex.yaml +cortex deploy # view the status of the api cortex get --watch @@ -77,7 +80,7 @@ cortex get text-generator # generate text curl \ -X POST -H "Content-Type: application/json" \ - -d '{"text": "machine learning is"}' \ + -d '{"text": "machine learning is"}' # delete the api cortex delete text-generator From 4d9e8bc6aa2ae005e53c9bdaa9425079ea8a6484 Mon Sep 17 00:00:00 2001 From: vishal Date: Thu, 22 Oct 2020 14:17:01 -0400 Subject: [PATCH 2/2] Flush standard out --- pkg/workloads/cortex/client/cortex/binary/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pkg/workloads/cortex/client/cortex/binary/__init__.py b/pkg/workloads/cortex/client/cortex/binary/__init__.py index 0c0b8541f6..f7cf580015 100644 --- a/pkg/workloads/cortex/client/cortex/binary/__init__.py +++ b/pkg/workloads/cortex/client/cortex/binary/__init__.py @@ -87,6 +87,7 @@ def run_cli( if not hide_output: if (not mixed_output) or (mixed_output and not result_found): sys.stdout.write(c) + sys.stdout.flush() process.wait()