diff --git a/cli/cmd/delete.go b/cli/cmd/delete.go index 04d841a74e..a4b9b15980 100644 --- a/cli/cmd/delete.go +++ b/cli/cmd/delete.go @@ -95,7 +95,7 @@ var _deleteCmd = &cobra.Command{ if err != nil { exit.Error(err) } - fmt.Println(string(bytes)) + fmt.Print(string(bytes)) return } diff --git a/cli/cmd/deploy.go b/cli/cmd/deploy.go index 7dfc79537f..08fbe81b3f 100644 --- a/cli/cmd/deploy.go +++ b/cli/cmd/deploy.go @@ -119,7 +119,7 @@ var _deployCmd = &cobra.Command{ if err != nil { exit.Error(err) } - fmt.Println(string(bytes)) + fmt.Print(string(bytes)) case flags.MixedOutputType: err := mixedPrint(deployResults) if err != nil { diff --git a/cli/cmd/env.go b/cli/cmd/env.go index 18b91e34a9..46e4c8b260 100644 --- a/cli/cmd/env.go +++ b/cli/cmd/env.go @@ -134,7 +134,7 @@ var _envListCmd = &cobra.Command{ if err != nil { exit.Error(err) } - fmt.Println(string(bytes)) + fmt.Print(string(bytes)) return } diff --git a/cli/cmd/lib_realtime_apis.go b/cli/cmd/lib_realtime_apis.go index f04767eb93..30fd2809fb 100644 --- a/cli/cmd/lib_realtime_apis.go +++ b/cli/cmd/lib_realtime_apis.go @@ -68,8 +68,6 @@ func realtimeAPITable(realtimeAPI schema.APIResponse, env cliconfig.Environment) out += "\n" + console.Bold("endpoint: ") + realtimeAPI.Endpoint - out += fmt.Sprintf("\n%s curl %s -X POST -H \"Content-Type: application/json\" -d @sample.json\n", console.Bold("example curl:"), realtimeAPI.Endpoint) - if !(realtimeAPI.Spec.Predictor.Type == userconfig.PythonPredictorType && realtimeAPI.Spec.Predictor.ModelPath == nil && realtimeAPI.Spec.Predictor.Models == nil) { out += "\n" + describeModelInput(realtimeAPI.Status, realtimeAPI.Spec.Predictor, realtimeAPI.Endpoint) } diff --git a/cli/cmd/lib_traffic_splitters.go b/cli/cmd/lib_traffic_splitters.go index 1a9405c38f..8c4de2dcc1 100644 --- a/cli/cmd/lib_traffic_splitters.go +++ b/cli/cmd/lib_traffic_splitters.go @@ -17,7 +17,6 @@ limitations under the License. package cmd import ( - "fmt" "strings" "time" @@ -51,7 +50,6 @@ func trafficSplitterTable(trafficSplitter schema.APIResponse, env cliconfig.Envi out += "\n" + console.Bold("last updated: ") + libtime.SinceStr(&lastUpdated) out += "\n" + console.Bold("endpoint: ") + trafficSplitter.Endpoint - out += fmt.Sprintf("\n%s curl %s -X POST -H \"Content-Type: application/json\" -d @sample.json\n", console.Bold("example curl:"), trafficSplitter.Endpoint) out += "\n" + apiHistoryTable(trafficSplitter.APIVersions) diff --git a/cli/cmd/patch.go b/cli/cmd/patch.go index b0b7e29207..dd94715bb8 100644 --- a/cli/cmd/patch.go +++ b/cli/cmd/patch.go @@ -82,7 +82,7 @@ var _patchCmd = &cobra.Command{ if err != nil { exit.Error(err) } - fmt.Println(string(bytes)) + fmt.Print(string(bytes)) case flags.PrettyOutputType: message := deployMessage(deployResults, env.Name) if didAnyResultsError(deployResults) { diff --git a/cli/cmd/refresh.go b/cli/cmd/refresh.go index ef280bdb4a..51d54514d5 100644 --- a/cli/cmd/refresh.go +++ b/cli/cmd/refresh.go @@ -73,7 +73,7 @@ var _refreshCmd = &cobra.Command{ if err != nil { exit.Error(err) } - fmt.Println(string(bytes)) + fmt.Print(string(bytes)) return } diff --git a/cli/cmd/root.go b/cli/cmd/root.go index 3f4e2bc948..e10bb2c4e0 100644 --- a/cli/cmd/root.go +++ b/cli/cmd/root.go @@ -182,7 +182,6 @@ func Execute() { updateRootUsage() - printLeadingNewLine() _rootCmd.Execute() exit.Ok() @@ -250,18 +249,11 @@ func envStringIfNotSpecified(envName string, cmd *cobra.Command) (string, error) return "", nil } -func printLeadingNewLine() { - if len(os.Args) == 3 && os.Args[1] == "completion" { - return - } - fmt.Println("") -} - func mixedPrint(a interface{}) error { jsonBytes, err := libjson.Marshal(a) if err != nil { return err } - fmt.Println(fmt.Sprintf("~~cortex~~%s~~cortex~~", base64.StdEncoding.EncodeToString(jsonBytes))) + fmt.Print(fmt.Sprintf("~~cortex~~%s~~cortex~~", base64.StdEncoding.EncodeToString(jsonBytes))) return nil } diff --git a/dev/deploy_test.py b/dev/deploy_test.py index c66d621073..f56eab2013 100644 --- a/dev/deploy_test.py +++ b/dev/deploy_test.py @@ -37,7 +37,7 @@ def predict(self, payload): return self.model(payload["text"])[0] -api = cx.deploy( +api = cx.create_api( api_config, predictor=PythonPredictor, requirements=["torch", "transformers"], diff --git a/dev/generate_cli_md.sh b/dev/generate_cli_md.sh index abd98ae440..c4a9328a76 100755 --- a/dev/generate_cli_md.sh +++ b/dev/generate_cli_md.sh @@ -57,7 +57,7 @@ for cmd in "${commands[@]}"; do echo '' >> $out_file echo "### ${cmd}" >> $out_file echo '' >> $out_file - echo -n '```text' >> $out_file + echo '```text' >> $out_file $ROOT/bin/cortex help ${cmd} >> $out_file echo '```' >> $out_file done diff --git a/dev/generate_python_client_md.sh b/dev/generate_python_client_md.sh index daa5a680c8..fd68e9130f 100755 --- a/dev/generate_python_client_md.sh +++ b/dev/generate_python_client_md.sh @@ -45,7 +45,7 @@ sed -i "/\* \[Client](#cortex\.client\.Client)/d" $ROOT/docs/miscellaneous/pytho sed -i "s/\* \[cortex\.client](#cortex\.client)/\* [cortex\.client\.Client](#cortex-client-client)/g" $ROOT/docs/miscellaneous/python-client.md sed -i "s/# cortex\.client/# cortex\.client\.Client/g" $ROOT/docs/miscellaneous/python-client.md # delete unnecessary section body -sed -i "/# cortex.client.Client/,/## deploy/{//!d}" $ROOT/docs/miscellaneous/python-client.md +sed -i "/# cortex.client.Client/,/## create\\\_api/{//!d}" $ROOT/docs/miscellaneous/python-client.md sed -i "s/# cortex.client.Client/# cortex.client.Client\n/g" $ROOT/docs/miscellaneous/python-client.md # fix table of contents links @@ -61,7 +61,7 @@ sed -i 's/[[:space:]]*$//' $ROOT/docs/miscellaneous/python-client.md truncate -s -1 $ROOT/docs/miscellaneous/python-client.md # Cortex version comment -sed -i "s/^## deploy/## deploy\n\n/g" $ROOT/docs/miscellaneous/python-client.md +sed -i "s/^## create\\\_api/## create\\\_api\n\n/g" $ROOT/docs/miscellaneous/python-client.md pip3 uninstall -y cortex rm -rf $ROOT/pkg/workloads/cortex/client/cortex.egg-info diff --git a/docs/miscellaneous/python-client.md b/docs/miscellaneous/python-client.md index f8c767672b..6c98188c56 100644 --- a/docs/miscellaneous/python-client.md +++ b/docs/miscellaneous/python-client.md @@ -9,7 +9,7 @@ _WARNING: you are on the master branch, please refer to the docs on the branch t * [env\_list](#env_list) * [env\_delete](#env_delete) * [cortex.client.Client](#cortex-client-client) - * [deploy](#deploy) + * [create\_api](#create_api) * [get\_api](#get_api) * [list\_apis](#list_apis) * [get\_job](#get_job) @@ -104,12 +104,12 @@ Delete an environment configured on this machine. # cortex.client.Client -## deploy +## create\_api ```python - | deploy(api_spec: dict, predictor=None, requirements=[], conda_packages=[], project_dir: Optional[str] = None, force: bool = True, wait: bool = False) -> list + | create_api(api_spec: dict, predictor=None, requirements=[], conda_packages=[], project_dir: Optional[str] = None, force: bool = True, wait: bool = False) -> list ``` Deploy an API. diff --git a/examples/pytorch/text-generator/README.md b/examples/pytorch/text-generator/README.md index f99417e3b4..99d1db2b1f 100644 --- a/examples/pytorch/text-generator/README.md +++ b/examples/pytorch/text-generator/README.md @@ -59,7 +59,7 @@ api_spec = { } } -cx_local.deploy(api_spec, project_dir=".", wait=True) +cx_local.create_api(api_spec, project_dir=".", wait=True) ``` ## Consume your API @@ -126,7 +126,7 @@ api_spec = { } } -cx_aws.deploy(api_spec, project_dir=".") +cx_aws.create_api(api_spec, project_dir=".") ``` Monitor the status of your APIs using `cortex get` using your CLI: @@ -169,7 +169,7 @@ api_spec = { } } -cx_aws.deploy(api_spec, project_dir=".") +cx_aws.create_api(api_spec, project_dir=".") ``` As your new API is initializing, the old API will continue to respond to prediction requests. Once the API's status becomes "live" (with one up-to-date replica), traffic will be routed to the updated version. You can track the status of your API using `cortex get`: diff --git a/examples/pytorch/text-generator/deploy.ipynb b/examples/pytorch/text-generator/deploy.ipynb index 5ffbce9caa..dfddc0e213 100644 --- a/examples/pytorch/text-generator/deploy.ipynb +++ b/examples/pytorch/text-generator/deploy.ipynb @@ -39,7 +39,7 @@ " }\n", "}\n", "\n", - "cx.deploy(api_spec, project_dir=\".\", wait=True)" + "cx.create_api(api_spec, project_dir=\".\", wait=True)" ] }, { diff --git a/pkg/workloads/cortex/client/cortex/binary/__init__.py b/pkg/workloads/cortex/client/cortex/binary/__init__.py index bf78481c09..4c1df04be5 100644 --- a/pkg/workloads/cortex/client/cortex/binary/__init__.py +++ b/pkg/workloads/cortex/client/cortex/binary/__init__.py @@ -67,29 +67,36 @@ def run_cli( output = "" result = "" - result_found = False + processing_result = False + processed_result = False for c in iter(lambda: process.stdout.read(1), ""): output += c + if mixed_output: - if output[-2:] == "\n~": - result_found = True - result = "~" - output = output[:-1] - if result_found: + if output[-2:] == "\n~" or output == "~": + processing_result = True output = output[:-1] - if c == "\n": - result_found = False + if processing_result: + result += c + if ( + result[: len(MIXED_CORTEX_MARKER)] == MIXED_CORTEX_MARKER + and result[-len(MIXED_CORTEX_MARKER) :] == MIXED_CORTEX_MARKER + and len(result) > len(MIXED_CORTEX_MARKER) + ): result = result[len(MIXED_CORTEX_MARKER) : -len(MIXED_CORTEX_MARKER)] result = base64.b64decode(result).decode("utf8") - else: - result += c + processed_result = True + output = output[:-1] if not hide_output: - if (not mixed_output) or (mixed_output and not result_found): + if (not mixed_output) or (mixed_output and not processing_result): sys.stdout.write(c) sys.stdout.flush() + if processed_result == True: + processing_result = False + process.wait() if process.returncode == 0: diff --git a/pkg/workloads/cortex/client/cortex/client.py b/pkg/workloads/cortex/client/cortex/client.py index 94c41a0cb0..27722bcaa2 100644 --- a/pkg/workloads/cortex/client/cortex/client.py +++ b/pkg/workloads/cortex/client/cortex/client.py @@ -45,7 +45,7 @@ def __init__(self, env: dict): self.env_name = env["name"] # CORTEX_VERSION_MINOR x5 - def deploy( + def create_api( self, api_spec: dict, predictor=None,