diff --git a/dev/generate_python_client_md.sh b/dev/generate_python_client_md.sh index 1973082ca5..0b0215e76f 100755 --- a/dev/generate_python_client_md.sh +++ b/dev/generate_python_client_md.sh @@ -48,7 +48,7 @@ sed -i "/\* \[Client](#cortex\.client\.Client)/d" $docs_path sed -i "s/\* \[cortex\.client](#cortex\.client)/\* [cortex\.client\.Client](#cortex-client-client)/g" $docs_path sed -i "s/# cortex\.client/# cortex\.client\.Client/g" $docs_path # delete unnecessary section body -sed -i "/# cortex.client.Client/,/## create\\\_api/{//!d}" $docs_path +sed -i "/# cortex.client.Client/,/## deploy/{//!d}" $docs_path sed -i "s/# cortex.client.Client/# cortex.client.Client\n/g" $docs_path # fix table of contents links @@ -64,7 +64,12 @@ sed -i 's/[[:space:]]*$//' $docs_path truncate -s -1 $docs_path # Cortex version comment -sed -i "s/^## create\\\_api/## create\\\_api\n\n/g" $docs_path +sed -i "s/^## deploy$/## deploy\n\n/g" $docs_path +sed -i "s/^## deploy\\\_realtime\\\_api$/## deploy\\\_realtime\\\_api\n\n/g" $docs_path +sed -i "s/^## deploy\\\_async\\\_api$/## deploy\\\_async\\\_api\n\n/g" $docs_path +sed -i "s/^## deploy\\\_batch\\\_api$/## deploy\\\_batch\\\_api\n\n/g" $docs_path +sed -i "s/^## deploy\\\_task\\\_api$/## deploy\\\_task\\\_api\n\n/g" $docs_path +sed -i "s/^## deploy\\\_traffic\\\_splitter$/## deploy\\\_traffic\\\_splitter\n\n/g" $docs_path pip3 uninstall -y cortex rm -rf $ROOT/python/client/cortex.egg-info diff --git a/docs/clients/python.md b/docs/clients/python.md index a832c6bea7..a12f1d4ba8 100644 --- a/docs/clients/python.md +++ b/docs/clients/python.md @@ -6,13 +6,18 @@ * [env\_list](#env_list) * [env\_delete](#env_delete) * [cortex.client.Client](#cortex-client-client) - * [create\_api](#create_api) + * [deploy](#deploy) + * [deploy\_realtime\_api](#deploy_realtime_api) + * [deploy\_async\_api](#deploy_async_api) + * [deploy\_batch\_api](#deploy_batch_api) + * [deploy\_task\_api](#deploy_task_api) + * [deploy\_traffic\_splitter](#deploy_traffic_splitter) * [get\_api](#get_api) * [list\_apis](#list_apis) * [get\_job](#get_job) * [refresh](#refresh) * [patch](#patch) - * [delete\_api](#delete_api) + * [delete](#delete) * [stop\_job](#stop_job) * [stream\_api\_logs](#stream_api_logs) * [stream\_job\_logs](#stream_job_logs) @@ -57,7 +62,7 @@ Create a new environment to connect to an existing cluster, and initialize a cli ## env\_list ```python -env_list() -> list +env_list() -> List ``` List all environments configured on this machine. @@ -76,28 +81,134 @@ Delete an environment configured on this machine. # cortex.client.Client -## create\_api +## deploy ```python - | create_api(api_spec: dict, handler=None, task=None, requirements=[], conda_packages=[], project_dir: Optional[str] = None, force: bool = True, wait: bool = False) -> list + | deploy(api_spec: Dict[str, Any], project_dir: str, force: bool = True, wait: bool = False) ``` -Deploy an API. +Deploy API(s) from a project directory. **Arguments**: - `api_spec` - A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/ for schema. -- `handler` - A Cortex handler class implementation. Not required for TaskAPI/TrafficSplitter kinds. -- `task` - A callable class/function implementation. Not required for RealtimeAPI/BatchAPI/TrafficSplitter kinds. +- `project_dir` - Path to a python project. +- `force` - Override any in-progress api updates. +- `wait` - Streams logs until the APIs are ready. + + +**Returns**: + + Deployment status, API specification, and endpoint for each API. + +## deploy\_realtime\_api + + + +```python + | deploy_realtime_api(api_spec: Dict[str, Any], handler, requirements: Optional[List] = None, conda_packages: Optional[List] = None, force: bool = True, wait: bool = False) -> Dict +``` + +Deploy a Realtime API. + +**Arguments**: + +- `api_spec` - A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/workloads/realtime-apis/configuration for schema. +- `handler` - A Cortex Handler class implementation. - `requirements` - A list of PyPI dependencies that will be installed before the handler class implementation is invoked. - `conda_packages` - A list of Conda dependencies that will be installed before the handler class implementation is invoked. -- `project_dir` - Path to a python project. - `force` - Override any in-progress api updates. - `wait` - Streams logs until the APIs are ready. +**Returns**: + + Deployment status, API specification, and endpoint for each API. + +## deploy\_async\_api + + + +```python + | deploy_async_api(api_spec: Dict[str, Any], handler, requirements: Optional[List] = None, conda_packages: Optional[List] = None, force: bool = True) -> Dict +``` + +Deploy an Async API. + +**Arguments**: + +- `api_spec` - A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/workloads/async-apis/configuration for schema. +- `handler` - A Cortex Handler class implementation. +- `requirements` - A list of PyPI dependencies that will be installed before the handler class implementation is invoked. +- `conda_packages` - A list of Conda dependencies that will be installed before the handler class implementation is invoked. +- `force` - Override any in-progress api updates. + + +**Returns**: + + Deployment status, API specification, and endpoint for each API. + +## deploy\_batch\_api + + + +```python + | deploy_batch_api(api_spec: Dict[str, Any], handler, requirements: Optional[List] = None, conda_packages: Optional[List] = None) -> Dict +``` + +Deploy a Batch API. + +**Arguments**: + +- `api_spec` - A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/workloads/batch-apis/configuration for schema. +- `handler` - A Cortex Handler class implementation. +- `requirements` - A list of PyPI dependencies that will be installed before the handler class implementation is invoked. +- `conda_packages` - A list of Conda dependencies that will be installed before the handler class implementation is invoked. + + +**Returns**: + + Deployment status, API specification, and endpoint for each API. + +## deploy\_task\_api + + + +```python + | deploy_task_api(api_spec: Dict[str, Any], task, requirements: Optional[List] = None, conda_packages: Optional[List] = None) -> Dict +``` + +Deploy a Task API. + +**Arguments**: + +- `api_spec` - A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/workloads/task-apis/configuration for schema. +- `task` - A callable class implementation. +- `requirements` - A list of PyPI dependencies that will be installed before the handler class implementation is invoked. +- `conda_packages` - A list of Conda dependencies that will be installed before the handler class implementation is invoked. + + +**Returns**: + + Deployment status, API specification, and endpoint for each API. + +## deploy\_traffic\_splitter + + + +```python + | deploy_traffic_splitter(api_spec: Dict[str, Any]) -> Dict +``` + +Deploy a Task API. + +**Arguments**: + +- `api_spec` - A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/workloads/realtime-apis/traffic-splitter/configuration for schema. + + **Returns**: Deployment status, API specification, and endpoint for each API. @@ -105,7 +216,7 @@ Deploy an API. ## get\_api ```python - | get_api(api_name: str) -> dict + | get_api(api_name: str) -> Dict ``` Get information about an API. @@ -122,7 +233,7 @@ Get information about an API. ## list\_apis ```python - | list_apis() -> list + | list_apis() -> List ``` List all APIs in the environment. @@ -134,7 +245,7 @@ List all APIs in the environment. ## get\_job ```python - | get_job(api_name: str, job_id: str) -> dict + | get_job(api_name: str, job_id: str) -> Dict ``` Get information about a submitted job. @@ -165,7 +276,7 @@ Restart all of the replicas for a Realtime API without downtime. ## patch ```python - | patch(api_spec: dict, force: bool = False) -> dict + | patch(api_spec: Dict, force: bool = False) -> Dict ``` Update the api specification for an API that has already been deployed. @@ -175,10 +286,10 @@ Update the api specification for an API that has already been deployed. - `api_spec` - The new api specification to apply - `force` - Override an already in-progress API update. -## delete\_api +## delete ```python - | delete_api(api_name: str, keep_cache: bool = False) + | delete(api_name: str, keep_cache: bool = False) ``` Delete an API. diff --git a/docs/workloads/batch/handler.md b/docs/workloads/batch/handler.md index 501cc1f17d..74e8050307 100644 --- a/docs/workloads/batch/handler.md +++ b/docs/workloads/batch/handler.md @@ -121,5 +121,5 @@ class Handler: # get client pointing to the default environment client = cortex.client() # deploy API in the existing cluster using the artifacts in the previous step - client.create_api(...) + client.deploy(...) ``` diff --git a/docs/workloads/dependencies/example.md b/docs/workloads/dependencies/example.md index efb6202f5f..2ff7a58197 100644 --- a/docs/workloads/dependencies/example.md +++ b/docs/workloads/dependencies/example.md @@ -41,7 +41,7 @@ api_spec = { } cx = cortex.client("aws") -cx.create_api(api_spec, project_dir=".") +cx.deploy(api_spec, project_dir=".") ``` ## Deploy using the CLI diff --git a/docs/workloads/realtime/multi-model/example.md b/docs/workloads/realtime/multi-model/example.md index 279abba8af..6eee9d9797 100644 --- a/docs/workloads/realtime/multi-model/example.md +++ b/docs/workloads/realtime/multi-model/example.md @@ -33,7 +33,7 @@ requirements = ["tensorflow", "transformers", "wget", "fasttext"] api_spec = {"name": "multi-model", "kind": "RealtimeAPI"} cx = cortex.client("aws") -cx.create_api(api_spec, handler=Handler, requirements=requirements) +cx.deploy_realtime_api(api_spec, handler=Handler, requirements=requirements) ``` ## Deploy diff --git a/docs/workloads/realtime/traffic-splitter/example.md b/docs/workloads/realtime/traffic-splitter/example.md index 3a26bfebcd..04034dc330 100644 --- a/docs/workloads/realtime/traffic-splitter/example.md +++ b/docs/workloads/realtime/traffic-splitter/example.md @@ -34,8 +34,8 @@ api_spec_gpu = { } cx = cortex.client("aws") -cx.create_api(api_spec_cpu, handler=Handler, requirements=requirements) -cx.create_api(api_spec_gpu, handler=Handler, requirements=requirements) +cx.deploy_realtime_api(api_spec_cpu, handler=Handler, requirements=requirements) +cx.deploy_realtime_api(api_spec_gpu, handler=Handler, requirements=requirements) ``` ## Deploy a traffic splitter @@ -50,7 +50,7 @@ traffic_splitter_spec = { ], } -cx.create_api(traffic_splitter_spec) +cx.deploy_traffic_splitter(traffic_splitter_spec) ``` ## Update the weights of the traffic splitter diff --git a/docs/workloads/task/definitions.md b/docs/workloads/task/definitions.md index 2f930c1cc9..edfffd93e4 100644 --- a/docs/workloads/task/definitions.md +++ b/docs/workloads/task/definitions.md @@ -87,5 +87,5 @@ class Task: # get client pointing to the default environment client = cortex.client() # deploy API in the existing cluster as part of your pipeline workflow - client.create_api(...) + client.deploy(...) ``` diff --git a/python/client/cortex/__init__.py b/python/client/cortex/__init__.py index 5a857999ab..dc400dedc6 100644 --- a/python/client/cortex/__init__.py +++ b/python/client/cortex/__init__.py @@ -13,7 +13,7 @@ # limitations under the License. import json -from typing import Optional +from typing import Optional, List from cortex.binary import run_cli from cortex.client import Client @@ -79,7 +79,7 @@ def new_client( @sentry_wrapper -def env_list() -> list: +def env_list() -> List: """ List all environments configured on this machine. """ diff --git a/python/client/cortex/client.py b/python/client/cortex/client.py index b73b6072ce..22e5bcf333 100644 --- a/python/client/cortex/client.py +++ b/python/client/cortex/client.py @@ -22,19 +22,21 @@ import time import uuid from pathlib import Path -from typing import Optional +from typing import Optional, List, Dict, Any import dill import yaml + from cortex import util from cortex.binary import run_cli, get_cli_path from cortex.consts import EXPECTED_PYTHON_VERSION from cortex.telemetry import sentry_wrapper +from cortex.exceptions import InvalidKindForMethod class Client: @sentry_wrapper - def __init__(self, env: dict): + def __init__(self, env: Dict): """ A client to deploy and manage APIs in the specified environment. @@ -45,19 +47,212 @@ def __init__(self, env: dict): self.env = env self.env_name = env["name"] + # CORTEX_VERSION_MINOR + def deploy( + self, + api_spec: Dict[str, Any], + project_dir: str, + force: bool = True, + wait: bool = False, + ): + """ + Deploy API(s) from a project directory. + + Args: + api_spec: A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/ for schema. + project_dir: Path to a python project. + force: Override any in-progress api updates. + wait: Streams logs until the APIs are ready. + + Returns: + Deployment status, API specification, and endpoint for each API. + """ + return self._create_api( + api_spec=api_spec, + project_dir=project_dir, + force=force, + wait=wait, + ) + + # CORTEX_VERSION_MINOR + def deploy_realtime_api( + self, + api_spec: Dict[str, Any], + handler, + requirements: Optional[List] = None, + conda_packages: Optional[List] = None, + force: bool = True, + wait: bool = False, + ) -> Dict: + """ + Deploy a Realtime API. + + Args: + api_spec: A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/workloads/realtime-apis/configuration for schema. + handler: A Cortex Handler class implementation. + requirements: A list of PyPI dependencies that will be installed before the handler class implementation is invoked. + conda_packages: A list of Conda dependencies that will be installed before the handler class implementation is invoked. + force: Override any in-progress api updates. + wait: Streams logs until the APIs are ready. + + Returns: + Deployment status, API specification, and endpoint for each API. + """ + kind = api_spec.get("kind") + if kind != "RealtimeAPI": + raise InvalidKindForMethod( + f"expected an api_spec with kind 'RealtimeAPI', got kind '{kind}' instead" + ) + + return self._create_api( + api_spec=api_spec, + handler=handler, + requirements=requirements, + conda_packages=conda_packages, + force=force, + wait=wait, + ) + + # CORTEX_VERSION_MINOR + def deploy_async_api( + self, + api_spec: Dict[str, Any], + handler, + requirements: Optional[List] = None, + conda_packages: Optional[List] = None, + force: bool = True, + ) -> Dict: + """ + Deploy an Async API. + + Args: + api_spec: A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/workloads/async-apis/configuration for schema. + handler: A Cortex Handler class implementation. + requirements: A list of PyPI dependencies that will be installed before the handler class implementation is invoked. + conda_packages: A list of Conda dependencies that will be installed before the handler class implementation is invoked. + force: Override any in-progress api updates. + + Returns: + Deployment status, API specification, and endpoint for each API. + """ + kind = api_spec.get("kind") + if kind != "AsyncAPI": + raise InvalidKindForMethod( + f"expected an api_spec with kind 'AsyncAPI', got kind '{kind}' instead" + ) + + return self._create_api( + api_spec=api_spec, + handler=handler, + requirements=requirements, + conda_packages=conda_packages, + force=force, + ) + + # CORTEX_VERSION_MINOR + def deploy_batch_api( + self, + api_spec: Dict[str, Any], + handler, + requirements: Optional[List] = None, + conda_packages: Optional[List] = None, + ) -> Dict: + """ + Deploy a Batch API. + + Args: + api_spec: A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/workloads/batch-apis/configuration for schema. + handler: A Cortex Handler class implementation. + requirements: A list of PyPI dependencies that will be installed before the handler class implementation is invoked. + conda_packages: A list of Conda dependencies that will be installed before the handler class implementation is invoked. + + Returns: + Deployment status, API specification, and endpoint for each API. + """ + + kind = api_spec.get("kind") + if kind != "BatchAPI": + raise InvalidKindForMethod( + f"expected an api_spec with kind 'BatchAPI', got kind '{kind}' instead" + ) + + return self._create_api( + api_spec=api_spec, + handler=handler, + requirements=requirements, + conda_packages=conda_packages, + ) + + # CORTEX_VERSION_MINOR + def deploy_task_api( + self, + api_spec: Dict[str, Any], + task, + requirements: Optional[List] = None, + conda_packages: Optional[List] = None, + ) -> Dict: + """ + Deploy a Task API. + + Args: + api_spec: A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/workloads/task-apis/configuration for schema. + task: A callable class implementation. + requirements: A list of PyPI dependencies that will be installed before the handler class implementation is invoked. + conda_packages: A list of Conda dependencies that will be installed before the handler class implementation is invoked. + + Returns: + Deployment status, API specification, and endpoint for each API. + """ + kind = api_spec.get("kind") + if kind != "TaskAPI": + raise InvalidKindForMethod( + f"expected an api_spec with kind 'TaskAPI', got kind '{kind}' instead" + ) + + return self._create_api( + api_spec=api_spec, + task=task, + requirements=requirements, + conda_packages=conda_packages, + ) + + # CORTEX_VERSION_MINOR + def deploy_traffic_splitter( + self, + api_spec: Dict[str, Any], + ) -> Dict: + """ + Deploy a Task API. + + Args: + api_spec: A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/workloads/realtime-apis/traffic-splitter/configuration for schema. + + Returns: + Deployment status, API specification, and endpoint for each API. + """ + kind = api_spec.get("kind") + if kind != "TrafficSplitter": + raise InvalidKindForMethod( + f"expected an api_spec with kind 'TrafficSplitter', got kind '{kind}' instead" + ) + + return self._create_api( + api_spec=api_spec, + ) + # CORTEX_VERSION_MINOR @sentry_wrapper - def create_api( + def _create_api( self, - api_spec: dict, + api_spec: Dict, handler=None, task=None, - requirements=[], - conda_packages=[], + requirements: Optional[List] = None, + conda_packages: Optional[List] = None, project_dir: Optional[str] = None, force: bool = True, wait: bool = False, - ) -> list: + ) -> Dict: """ Deploy an API. @@ -75,6 +270,9 @@ def create_api( Deployment status, API specification, and endpoint for each API. """ + requirements = requirements if requirements is not None else [] + conda_packages = conda_packages if conda_packages is not None else [] + if project_dir is not None: if handler is not None: raise ValueError( @@ -110,7 +308,8 @@ def create_api( raise ValueError(f"`task` parameter cannot be specified for {api_kind}") else: raise ValueError( - f"invalid {api_kind} kind, `api_spec` must have the `kind` field set to one of the following kinds: {['TrafficSplitter', 'TaskAPI', 'BatchAPI', 'RealtimeAPI']}" + f"invalid {api_kind} kind, `api_spec` must have the `kind` field set to one of the following kinds: " + f"{['TrafficSplitter', 'TaskAPI', 'BatchAPI', 'RealtimeAPI']}" ) if api_spec.get("name") is None: @@ -156,9 +355,6 @@ def create_api( if not inspect.isclass(handler): raise ValueError("`handler` parameter must be a class definition") - impl_rel_path = self._save_impl(handler, project_dir, "handler") - api_spec["handler"]["path"] = impl_rel_path - if api_spec.get("handler") is None: raise ValueError("`api_spec` must have the `handler` section defined") @@ -167,10 +363,14 @@ def create_api( "the `type` field in the `handler` section of the `api_spec` must be set (tensorflow or python)" ) + impl_rel_path = self._save_impl(handler, project_dir, "handler") + api_spec["handler"]["path"] = impl_rel_path + if api_kind == "TaskAPI": if not callable(task): raise ValueError( - "`task` parameter must be a callable (e.g. a function definition or a class definition called `Task` with a `__call__` method implemented" + "`task` parameter must be a callable (e.g. a function definition or a class definition called " + "`Task` with a `__call__` method implemented " ) impl_rel_path = self._save_impl(task, project_dir, "task") @@ -204,7 +404,7 @@ def _deploy( config_file: str, force: bool = False, wait: bool = False, - ) -> list: + ) -> Dict: """ Deploy or update APIs specified in the config_file. @@ -278,7 +478,7 @@ def stream_to_stdout(process): return api @sentry_wrapper - def get_api(self, api_name: str) -> dict: + def get_api(self, api_name: str) -> Dict: """ Get information about an API. @@ -294,7 +494,7 @@ def get_api(self, api_name: str) -> dict: return apis[0] @sentry_wrapper - def list_apis(self) -> list: + def list_apis(self) -> List: """ List all APIs in the environment. @@ -308,7 +508,7 @@ def list_apis(self) -> list: return json.loads(output.strip()) @sentry_wrapper - def get_job(self, api_name: str, job_id: str) -> dict: + def get_job(self, api_name: str, job_id: str) -> Dict: """ Get information about a submitted job. @@ -342,7 +542,7 @@ def refresh(self, api_name: str, force: bool = False): run_cli(args, hide_output=True) @sentry_wrapper - def patch(self, api_spec: dict, force: bool = False) -> dict: + def patch(self, api_spec: Dict, force: bool = False) -> Dict: """ Update the api specification for an API that has already been deployed. @@ -365,7 +565,7 @@ def patch(self, api_spec: dict, force: bool = False) -> dict: return json.loads(output.strip()) @sentry_wrapper - def delete_api(self, api_name: str, keep_cache: bool = False): + def delete(self, api_name: str, keep_cache: bool = False): """ Delete an API. diff --git a/python/client/cortex/exceptions.py b/python/client/cortex/exceptions.py index 5505156a65..16acded963 100644 --- a/python/client/cortex/exceptions.py +++ b/python/client/cortex/exceptions.py @@ -35,3 +35,11 @@ class NotFound(CortexException): """ pass + + +class InvalidKindForMethod(CortexException): + """ + Raise when the specified resource kind is not supported by the used python client method. + """ + + pass diff --git a/python/client/cortex/telemetry.py b/python/client/cortex/telemetry.py index d60fe2ffd1..d8674c8ed4 100644 --- a/python/client/cortex/telemetry.py +++ b/python/client/cortex/telemetry.py @@ -13,6 +13,7 @@ # limitations under the License. import os +from typing import Dict from uuid import uuid4 import sentry_sdk @@ -70,7 +71,7 @@ def _sentry_client( return client -def _create_default_scope(optional_tags: dict = {}) -> sentry_sdk.Scope: +def _create_default_scope(optional_tags: Dict = {}) -> sentry_sdk.Scope: """ Creates default scope. Adds user ID as tag to the reported event. Can add optional tags. diff --git a/test/apis/pytorch/iris-classifier/deploy.py b/test/apis/pytorch/iris-classifier/deploy.py index f5e468b948..9124b5d206 100644 --- a/test/apis/pytorch/iris-classifier/deploy.py +++ b/test/apis/pytorch/iris-classifier/deploy.py @@ -17,6 +17,6 @@ }, } -print(cx.create_api(api_spec, project_dir=dir_path)) +print(cx.deploy(api_spec, project_dir=dir_path)) -# cx.delete_api("iris-classifier") +# cx.delete("iris-classifier") diff --git a/test/apis/pytorch/text-generator/deploy_class.py b/test/apis/pytorch/text-generator/deploy_class.py index 5b246b93c2..312d55d21e 100644 --- a/test/apis/pytorch/text-generator/deploy_class.py +++ b/test/apis/pytorch/text-generator/deploy_class.py @@ -22,7 +22,7 @@ def handle_post(self, payload): return self.model(payload["text"])[0] -api = cx.create_api( +api = cx.deploy_realtime_api( api_spec, handler=Handler, requirements=["torch", "transformers"], @@ -37,4 +37,4 @@ def handle_post(self, payload): print(response.status_code) print(response.text) -cx.delete_api(api_spec["name"]) +cx.delete(api_spec["name"]) diff --git a/test/apis/sleep/deploy.py b/test/apis/sleep/deploy.py index f8d277206f..67d226f9eb 100644 --- a/test/apis/sleep/deploy.py +++ b/test/apis/sleep/deploy.py @@ -14,6 +14,6 @@ }, } -print(cx.create_api(api_spec, project_dir=dir_path)) +print(cx.deploy(api_spec, project_dir=dir_path)) -# cx.delete_api("sleep") +# cx.delete("sleep") diff --git a/test/e2e/e2e/tests.py b/test/e2e/e2e/tests.py index f37fcd3b73..cf2a5615a3 100644 --- a/test/e2e/e2e/tests.py +++ b/test/e2e/e2e/tests.py @@ -58,7 +58,7 @@ def delete_apis(client: cx.Client, api_names: List[str]): for name in api_names: - client.delete_api(name) + client.delete(name) def test_realtime_api( @@ -79,7 +79,7 @@ def test_realtime_api( api_name = api_specs[0]["name"] for api_spec in api_specs: - client.create_api(api_spec=api_spec, project_dir=str(api_dir)) + client.deploy(api_spec=api_spec, project_dir=str(api_dir)) try: assert apis_ready( @@ -147,7 +147,7 @@ def test_batch_api( assert len(api_specs) == 1 api_name = api_specs[0]["name"] - client.create_api(api_spec=api_specs[0], project_dir=str(api_dir)) + client.deploy(api_spec=api_specs[0], project_dir=str(api_dir)) try: endpoint_override = f"http://localhost:8888/batch/{api_name}" if local_operator else None @@ -234,7 +234,7 @@ def test_async_api( assert len(api_specs) == 1 api_name = api_specs[0]["name"] - client.create_api(api_spec=api_specs[0], project_dir=str(api_dir)) + client.deploy(api_spec=api_specs[0], project_dir=str(api_dir)) try: assert apis_ready( @@ -343,7 +343,7 @@ def test_task_api( assert len(api_specs) == 1 api_name = api_specs[0]["name"] - client.create_api(api_spec=api_specs[0], project_dir=str(api_dir)) + client.deploy(api_spec=api_specs[0], project_dir=str(api_dir)) try: endpoint_override = f"http://localhost:8888/tasks/{api_name}" if local_operator else None @@ -422,7 +422,7 @@ def test_autoscaling( "downscale_stabilization_period": "1m", } all_api_names.append(api_specs[0]["name"]) - client.create_api(api_spec=api_specs[0], project_dir=api_dir) + client.deploy(api_spec=api_specs[0], project_dir=api_dir) primary_api_name = all_api_names[0] autoscaling = client.get_api(primary_api_name)["spec"]["autoscaling"] @@ -534,7 +534,7 @@ def test_load_realtime( "max_replicas": desired_replicas, } api_name = api_specs[0]["name"] - client.create_api(api_spec=api_specs[0], project_dir=str(api_dir)) + client.deploy(api_spec=api_specs[0], project_dir=str(api_dir)) # controls the flow of requests request_stopper = td.Event() @@ -643,7 +643,7 @@ def test_load_async( "max_replicas": desired_replicas, } api_name = api_specs[0]["name"] - client.create_api(api_spec=api_specs[0], project_dir=str(api_dir)) + client.deploy(api_spec=api_specs[0], project_dir=str(api_dir)) request_stopper = td.Event() map_stopper = td.Event() @@ -767,7 +767,7 @@ def test_load_batch( sample_generator = load_generator(sample_generator_path) api_name = api_specs[0]["name"] - client.create_api(api_spec=api_specs[0], project_dir=str(api_dir)) + client.deploy(api_spec=api_specs[0], project_dir=str(api_dir)) api_endpoint = client.get_api(api_name)["endpoint"] try: @@ -860,7 +860,7 @@ def test_load_task( assert len(api_specs) == 1 api_name = api_specs[0]["name"] - client.create_api(api_spec=api_specs[0], project_dir=str(api_dir)) + client.deploy(api_spec=api_specs[0], project_dir=str(api_dir)) request_stopper = td.Event() map_stopper = td.Event() @@ -937,7 +937,7 @@ def test_long_running_realtime( api_name = api_specs[0]["name"] for api_spec in api_specs: - client.create_api(api_spec=api_spec, project_dir=str(api_dir)) + client.deploy(api_spec=api_spec, project_dir=str(api_dir)) try: assert apis_ready(