From 9b021f02554fd00e00f6fba6d69a1733f9ea85f7 Mon Sep 17 00:00:00 2001 From: Anindyadeep Date: Fri, 10 May 2024 15:02:53 +0000 Subject: [PATCH 1/9] feat(dspy): add premai python sdk --- dsp/modules/premai.py | 151 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 151 insertions(+) create mode 100644 dsp/modules/premai.py diff --git a/dsp/modules/premai.py b/dsp/modules/premai.py new file mode 100644 index 0000000000..002a8f7b19 --- /dev/null +++ b/dsp/modules/premai.py @@ -0,0 +1,151 @@ +import logging +from typing import Optional + +import backoff +import premai.errors + +from dsp.modules.lm import LM + +try: + import premai +except ImportError as err: + raise ImportError( + "Not loading Mistral AI because it is not installed. Install it with `pip install premai`.", + ) from err + + +# Set up logging +logger = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO) + + +class ChatPremAPIError(Exception): + """Error with the `PremAI` API.""" + + +ERROR = ChatPremAPIError + + +def backoff_hdlr(details) -> None: + """Handler for the backoff package. + + See more at: https://pypi.org/project/backoff/ + """ + logger.info( + "Backing off {wait:0.1f} seconds after {tries} tries calling function {target} with kwargs {kwargs}".format( + **details, + ), + ) + + +def giveup_hdlr(details) -> bool: + """Wrapper function that decides when to give up on retry.""" + if "rate limits" in details.message: + return False + return True + + +class PremAI(LM): + """Wrapper around Prem AI's API.""" + + def __init__( + self, + model: str, + project_id: int, + api_key: str, + base_url: Optional[str] = None, + session_id: Optional[int] = None, + **kwargs, + ) -> None: + """Parameters + + model: str + The name of model name + project_id: int + "The project ID in which the experiments or deployments are carried out. can find all your projects here: https://app.premai.io/projects/" + api_key: str + Prem AI API key, to connect with the API. + session_id: int + The ID of the session to use. It helps to track the chat history. + **kwargs: dict + Additional arguments to pass to the API provider + """ + super().__init__(model) + self.kwargs = kwargs if kwargs == {} else self.kwargs + + self.project_id = project_id + self.session_id = session_id + + if base_url is not None: + self.client = premai.Prem(api_key=api_key, base_url=base_url) + else: + self.client = premai.Prem(api_key=api_key) + self.provider = "premai" + + self.kwargs = { + "model": model, + "temperature": 0.17, + "max_tokens": 150, + **kwargs, + } + if session_id is not None: + kwargs["sesion_id"] = session_id + + def _get_all_kwargs(self, **kwargs) -> dict: + other_kwargs = { + "seed": None, + "logit_bias": None, + "tools": None, + "system_prompt": None, + } + all_kwargs = { + **self.kwargs, + **other_kwargs, + **kwargs, + } + + _keys_that_cannot_be_none = [ + "system_prompt", + "frequency_penalty", + "presence_penalty", + "tools", + "model", + ] + + for key in _keys_that_cannot_be_none: + if all_kwargs.get(key) is None: + all_kwargs.pop(key, None) + return all_kwargs + + def basic_request(self, prompt, **kwargs) -> str: + """Handles retrieval of completions from Prem AI whilst handling API errors.""" + all_kwargs = self._get_all_kwargs(**kwargs) + message = [] + + if "system_prompt" in all_kwargs: + message.append({"role": "system", "content": all_kwargs["system_prompt"]}) + message.append({"role": "user", "content": prompt}) + + response = self.client.chat.completions.create( + project_id=self.project_id, + messages=message, + **all_kwargs, + ) + if not response.choices: + raise ChatPremAPIError("ChatResponse must have at least one candidate") + + return response.choices[0].message.content or "" + + @backoff.on_exception( + backoff.expo, + (ERROR), + max_time=1000, + on_backoff=backoff_hdlr, + giveup=giveup_hdlr, + ) + def request(self, prompt, **kwargs) -> str: + """Handles retrieval of completions from Prem AI whilst handling API errors.""" + return self.basic_request(prompt=prompt, **kwargs) + + def __call__(self, prompt, **kwargs): + return self.request(prompt, **kwargs) From 74944b33bac13793a736edebf0ccbcd6f425aaa8 Mon Sep 17 00:00:00 2001 From: Anindyadeep Date: Fri, 10 May 2024 15:41:25 +0000 Subject: [PATCH 2/9] refactor(dspy): added premai integration in __init__ files --- dsp/modules/__init__.py | 1 + dspy/__init__.py | 1 + 2 files changed, 2 insertions(+) diff --git a/dsp/modules/__init__.py b/dsp/modules/__init__.py index 829fd655b1..0b624965f6 100644 --- a/dsp/modules/__init__.py +++ b/dsp/modules/__init__.py @@ -19,6 +19,7 @@ from .hf_client import Anyscale, HFClientTGI, Together from .mistral import * from .ollama import * +from .premai import PremAI from .pyserini import * from .sbert import * from .sentence_vectorizer import * diff --git a/dspy/__init__.py b/dspy/__init__.py index 957e4755db..4ed6dbaa5e 100644 --- a/dspy/__init__.py +++ b/dspy/__init__.py @@ -43,6 +43,7 @@ AWSMeta = dsp.AWSMeta Watsonx = dsp.Watsonx +PremAI = dsp.PremAI configure = settings.configure context = settings.context From 0df0730b57737db0eb732f89b0aa438ce327e692 Mon Sep 17 00:00:00 2001 From: Anindyadeep Date: Mon, 13 May 2024 17:28:26 +0000 Subject: [PATCH 3/9] fix(dspy) lint and ruff fix --- docs/api/language_model_clients/PremAI.md | 70 +++++++++++++++++++++++ dsp/modules/lm.py | 5 +- dsp/modules/premai.py | 50 +++++++++------- 3 files changed, 103 insertions(+), 22 deletions(-) create mode 100644 docs/api/language_model_clients/PremAI.md diff --git a/docs/api/language_model_clients/PremAI.md b/docs/api/language_model_clients/PremAI.md new file mode 100644 index 0000000000..6bc99c71b6 --- /dev/null +++ b/docs/api/language_model_clients/PremAI.md @@ -0,0 +1,70 @@ +--- +sidebar_position: 5 +--- + +# dsp.PremAI + +[PremAI](https://app.premai.io) is a unified platform that lets you build powerful production-ready GenAI-powered applications with the least effort so that you can focus more on user experience and overall growth. With dspy, you can connect with several [best-in-class LLMs](https://models.premai.io/) of your choice with a single interface. + +### Prerequisites + +Refer to the [quick start](https://docs.premai.io/introduction) guide to getting started with the PremAI platform, create your first project and grab your API key. + +### Usage + +Please make sure you have premai python sdk installed. Otherwise you can do it using this command: + +```bash +pip install -U premai +``` + +Here is a quick example on how to use premai python sdk with dspy + +```python +from dspy import PremAI + +llm = PremAI(model='mistral-tiny', project_id=123, api_key="your-premai-api-key") +print(llm("what is a large language model")) +``` + +> Please note: Project ID 123 is just an example. You can find your project ID inside our platform under which you created your project. + +### Constructor + +The constructor initializes the base class `LM` and verifies the `api_key` provided or defined through the `PREMAI_API_KEY` environment variable. + +```python +class PremAI(LM): + def __init__( + self, + model: str, + project_id: int, + api_key: str, + base_url: Optional[str] = None, + session_id: Optional[int] = None, + **kwargs, + ) -> None: +``` + +**Parameters:** + +- `model` (_str_): Models supported by PremAI. Example: `mistral-tiny`. We recommend using the model selected in [project launchpad](https://docs.premai.io/get-started/launchpad). +- `project_id` (_int_): The [project id](https://docs.premai.io/get-started/projects) which contains the model of choice. +- `api_key` (_Optional[str]_, _optional_): API provider from PremAI. Defaults to None. +- `session_id` (_Optional[int]_, _optional_): The ID of the session to use. It helps to track the chat history. +- `**kwargs`: Additional language model arguments will be passed to the API provider. + +### Methods + +#### `__call__(self, prompt: str, **kwargs) -> List[Dict[str, Any]]` + +Retrieves completions from PremAI by calling `request`. + +Internally, the method handles the specifics of preparing the request prompt and corresponding payload to obtain the response. + +After generation, the completions are post-processed based on the `model_type` parameter. + +**Parameters:** + +- `prompt` (_str_): Prompt to send to PremAI. +- `**kwargs`: Additional keyword arguments for completion request. Example: parameters like `temperature`, `max_tokens` etc. You can find all the additional kwargs [here](https://docs.premai.io/get-started/sdk#optional-parameters). diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index f56df569ea..a610d34633 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -52,6 +52,7 @@ def inspect_history(self, n: int = 1, skip: int = 0): or provider == "groq" or provider == "Bedrock" or provider == "Sagemaker" + or provider == "premai" ): printed.append((prompt, x["response"])) elif provider == "anthropic": @@ -85,7 +86,7 @@ def inspect_history(self, n: int = 1, skip: int = 0): if provider == "cohere" or provider == "Bedrock" or provider == "Sagemaker": text = choices elif provider == "openai" or provider == "ollama": - text = ' ' + self._get_choice_text(choices[0]).strip() + text = " " + self._get_choice_text(choices[0]).strip() elif provider == "clarifai" or provider == "claude": text = choices elif provider == "groq": @@ -98,6 +99,8 @@ def inspect_history(self, n: int = 1, skip: int = 0): text = choices[0] elif provider == "ibm": text = choices + elif provider == "premai": + text = choices else: text = choices[0]["text"] printing_value += self.print_green(text, end="") diff --git a/dsp/modules/premai.py b/dsp/modules/premai.py index 002a8f7b19..91180c9071 100644 --- a/dsp/modules/premai.py +++ b/dsp/modules/premai.py @@ -1,8 +1,7 @@ -import logging -from typing import Optional +import os +from typing import Any, Optional import backoff -import premai.errors from dsp.modules.lm import LM @@ -10,15 +9,10 @@ import premai except ImportError as err: raise ImportError( - "Not loading Mistral AI because it is not installed. Install it with `pip install premai`.", + "Not loading Prem AI because it is not installed. Install it with `pip install -U premai`.", ) from err -# Set up logging -logger = logging.getLogger(__name__) -logging.basicConfig(level=logging.INFO) - - class ChatPremAPIError(Exception): """Error with the `PremAI` API.""" @@ -31,7 +25,7 @@ def backoff_hdlr(details) -> None: See more at: https://pypi.org/project/backoff/ """ - logger.info( + print( "Backing off {wait:0.1f} seconds after {tries} tries calling function {target} with kwargs {kwargs}".format( **details, ), @@ -45,6 +39,16 @@ def giveup_hdlr(details) -> bool: return True +def get_premai_api_key(api_key: Optional[str] = None) -> str: + """Retrieve the PreMAI API key from a passed argument or environment variable.""" + api_key = api_key or os.environ.get("PREMAI_API_KEY") + if api_key is None: + raise RuntimeError( + "No API key found. See the quick start guide at https://docs.premai.io/introduction to get your API key.", + ) + return api_key + + class PremAI(LM): """Wrapper around Prem AI's API.""" @@ -52,8 +56,7 @@ def __init__( self, model: str, project_id: int, - api_key: str, - base_url: Optional[str] = None, + api_key: Optional[str] = None, session_id: Optional[int] = None, **kwargs, ) -> None: @@ -63,9 +66,10 @@ def __init__( The name of model name project_id: int "The project ID in which the experiments or deployments are carried out. can find all your projects here: https://app.premai.io/projects/" - api_key: str - Prem AI API key, to connect with the API. - session_id: int + api_key: Optional[str] + Prem AI API key, to connect with the API. If not provided then it will check from env var by the name + PREMAI_API_KEY + session_id: Optional[int] The ID of the session to use. It helps to track the chat history. **kwargs: dict Additional arguments to pass to the API provider @@ -76,11 +80,10 @@ def __init__( self.project_id = project_id self.session_id = session_id - if base_url is not None: - self.client = premai.Prem(api_key=api_key, base_url=base_url) - else: - self.client = premai.Prem(api_key=api_key) + api_key = get_premai_api_key(api_key=api_key) + self.client = premai.Prem(api_key=api_key) self.provider = "premai" + self.history: list[dict[str, Any]] = [] self.kwargs = { "model": model, @@ -89,7 +92,7 @@ def __init__( **kwargs, } if session_id is not None: - kwargs["sesion_id"] = session_id + kwargs["session_id"] = session_id def _get_all_kwargs(self, **kwargs) -> dict: other_kwargs = { @@ -134,7 +137,12 @@ def basic_request(self, prompt, **kwargs) -> str: if not response.choices: raise ChatPremAPIError("ChatResponse must have at least one candidate") - return response.choices[0].message.content or "" + content = response.choices[0].message.content + output_text = content or "" + + self.history.append({"prompt": prompt, "response": content, "kwargs": all_kwargs, "raw_kwargs": kwargs}) + + return output_text @backoff.on_exception( backoff.expo, From 852e3bf0f7818b402a32cf2905ee462310057cd3 Mon Sep 17 00:00:00 2001 From: Anindyadeep Date: Thu, 16 May 2024 09:55:51 +0000 Subject: [PATCH 4/9] fix(dspy): lint and run --- dsp/modules/lm.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/dsp/modules/lm.py b/dsp/modules/lm.py index a610d34633..8bed65aa36 100644 --- a/dsp/modules/lm.py +++ b/dsp/modules/lm.py @@ -56,7 +56,11 @@ def inspect_history(self, n: int = 1, skip: int = 0): ): printed.append((prompt, x["response"])) elif provider == "anthropic": - blocks = [{"text": block.text} for block in x["response"].content if block.type == "text"] + blocks = [ + {"text": block.text} + for block in x["response"].content + if block.type == "text" + ] printed.append((prompt, blocks)) elif provider == "cohere": printed.append((prompt, x["response"].text)) @@ -97,16 +101,16 @@ def inspect_history(self, n: int = 1, skip: int = 0): text = choices[0].message.content elif provider == "cloudflare": text = choices[0] - elif provider == "ibm": - text = choices - elif provider == "premai": + elif provider == "ibm" or provider == "premai": text = choices else: text = choices[0]["text"] printing_value += self.print_green(text, end="") if len(choices) > 1 and isinstance(choices, list): - printing_value += self.print_red(f" \t (and {len(choices)-1} other completions)", end="") + printing_value += self.print_red( + f" \t (and {len(choices)-1} other completions)", end="", + ) printing_value += "\n\n\n" From 86f5470fd30997c6dfec3847308103a8ec19ff91 Mon Sep 17 00:00:00 2001 From: Anindyadeep Date: Thu, 16 May 2024 09:57:54 +0000 Subject: [PATCH 5/9] fix(dspy): imports and minor bugs --- dsp/modules/premai.py | 35 ++++++++++++++++++++++------------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/dsp/modules/premai.py b/dsp/modules/premai.py index 91180c9071..e10d6b71de 100644 --- a/dsp/modules/premai.py +++ b/dsp/modules/premai.py @@ -7,17 +7,12 @@ try: import premai -except ImportError as err: - raise ImportError( - "Not loading Prem AI because it is not installed. Install it with `pip install -U premai`.", - ) from err - -class ChatPremAPIError(Exception): - """Error with the `PremAI` API.""" - - -ERROR = ChatPremAPIError + premai_error = premai.errors.UnexpectedStatus +except ImportError: + premai_api_error = Exception +except AttributeError: + premai_api_error = Exception def backoff_hdlr(details) -> None: @@ -75,6 +70,10 @@ def __init__( Additional arguments to pass to the API provider """ super().__init__(model) + if premai_api_error == Exception: + raise ImportError( + "Not loading Prem AI because it is not installed. Install it with `pip install premai`.", + ) self.kwargs = kwargs if kwargs == {} else self.kwargs self.project_id = project_id @@ -135,18 +134,28 @@ def basic_request(self, prompt, **kwargs) -> str: **all_kwargs, ) if not response.choices: - raise ChatPremAPIError("ChatResponse must have at least one candidate") + raise premai_api_error("ChatResponse must have at least one candidate") content = response.choices[0].message.content + if not content: + raise premai_api_error("ChatResponse is none") + output_text = content or "" - self.history.append({"prompt": prompt, "response": content, "kwargs": all_kwargs, "raw_kwargs": kwargs}) + self.history.append( + { + "prompt": prompt, + "response": content, + "kwargs": all_kwargs, + "raw_kwargs": kwargs, + }, + ) return output_text @backoff.on_exception( backoff.expo, - (ERROR), + (premai_api_error), max_time=1000, on_backoff=backoff_hdlr, giveup=giveup_hdlr, From 47f919949684d7c10c73d89cc50a3a3d214f1016 Mon Sep 17 00:00:00 2001 From: Anindyadeep Date: Thu, 16 May 2024 18:14:33 +0000 Subject: [PATCH 6/9] refactor(dspy): added prem integration as remote llm inside main doc --- docs/docs/building-blocks/1-language_models.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/docs/building-blocks/1-language_models.md b/docs/docs/building-blocks/1-language_models.md index ddf5f815e0..25fb134c47 100644 --- a/docs/docs/building-blocks/1-language_models.md +++ b/docs/docs/building-blocks/1-language_models.md @@ -137,6 +137,7 @@ lm = dspy.{provider_listed_below}(model="your model", model_request_kwargs="..." 4. `dspy.Together` for hosted various open source models. +5. `dspy.PremAI` for hosted best open source and closed source models. ### Local LMs. @@ -173,4 +174,4 @@ model = 'dist/prebuilt/mlc-chat-Llama-2-7b-chat-hf-q4f16_1' model_path = 'dist/prebuilt/lib/Llama-2-7b-chat-hf-q4f16_1-cuda.so' llama = dspy.ChatModuleClient(model=model, model_path=model_path) -``` +``` \ No newline at end of file From 62eaf886ebb0c94b308f07612382b08e866a34e1 Mon Sep 17 00:00:00 2001 From: Anindyadeep Date: Thu, 16 May 2024 18:15:03 +0000 Subject: [PATCH 7/9] removed adding model as a required argument, so that it aligns with prem's workflow --- dsp/modules/premai.py | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/dsp/modules/premai.py b/dsp/modules/premai.py index e10d6b71de..3c9589cae4 100644 --- a/dsp/modules/premai.py +++ b/dsp/modules/premai.py @@ -8,7 +8,7 @@ try: import premai - premai_error = premai.errors.UnexpectedStatus + premai_api_error = premai.errors.UnexpectedStatus except ImportError: premai_api_error = Exception except AttributeError: @@ -49,18 +49,18 @@ class PremAI(LM): def __init__( self, - model: str, project_id: int, + model: Optional[str] = None, api_key: Optional[str] = None, session_id: Optional[int] = None, **kwargs, ) -> None: """Parameters - model: str - The name of model name project_id: int "The project ID in which the experiments or deployments are carried out. can find all your projects here: https://app.premai.io/projects/" + model: Optional[str] + The name of model deployed on launchpad. When None, it will show 'default' api_key: Optional[str] Prem AI API key, to connect with the API. If not provided then it will check from env var by the name PREMAI_API_KEY @@ -69,6 +69,7 @@ def __init__( **kwargs: dict Additional arguments to pass to the API provider """ + model = "default" if model is None else model super().__init__(model) if premai_api_error == Exception: raise ImportError( @@ -85,13 +86,18 @@ def __init__( self.history: list[dict[str, Any]] = [] self.kwargs = { - "model": model, "temperature": 0.17, "max_tokens": 150, **kwargs, } if session_id is not None: - kwargs["session_id"] = session_id + self.kwargs["session_id"] = session_id + + # However this is not recommended to change the model once + # deployed from launchpad + + if model != "default": + self.kwargs["model"] = model def _get_all_kwargs(self, **kwargs) -> dict: other_kwargs = { @@ -111,7 +117,6 @@ def _get_all_kwargs(self, **kwargs) -> dict: "frequency_penalty", "presence_penalty", "tools", - "model", ] for key in _keys_that_cannot_be_none: @@ -122,15 +127,15 @@ def _get_all_kwargs(self, **kwargs) -> dict: def basic_request(self, prompt, **kwargs) -> str: """Handles retrieval of completions from Prem AI whilst handling API errors.""" all_kwargs = self._get_all_kwargs(**kwargs) - message = [] + messages = [] if "system_prompt" in all_kwargs: - message.append({"role": "system", "content": all_kwargs["system_prompt"]}) - message.append({"role": "user", "content": prompt}) + messages.append({"role": "system", "content": all_kwargs["system_prompt"]}) + messages.append({"role": "user", "content": prompt}) response = self.client.chat.completions.create( project_id=self.project_id, - messages=message, + messages=messages, **all_kwargs, ) if not response.choices: From d831ad19d97e26234e8df8c08451bf5fa56a88f2 Mon Sep 17 00:00:00 2001 From: Anindyadeep Date: Thu, 16 May 2024 18:15:25 +0000 Subject: [PATCH 8/9] Added prem sdk documentation under deep dive --- .../remote_models/PremAI.mdx | 70 +++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 docs/docs/deep-dive/language_model_clients/remote_models/PremAI.mdx diff --git a/docs/docs/deep-dive/language_model_clients/remote_models/PremAI.mdx b/docs/docs/deep-dive/language_model_clients/remote_models/PremAI.mdx new file mode 100644 index 0000000000..e94aac1e92 --- /dev/null +++ b/docs/docs/deep-dive/language_model_clients/remote_models/PremAI.mdx @@ -0,0 +1,70 @@ +## PremAI + +[PremAI](https://app.premai.io) is a unified platform that lets you build powerful production-ready GenAI-powered applications with the least effort so that you can focus more on user experience and overall growth. With dspy, you can connect with several [best-in-class LLMs](https://models.premai.io/) of your choice with a single interface. + +### Prerequisites + +Refer to the [quick start](https://docs.premai.io/introduction) guide to getting started with the PremAI platform, create your first project and grab your API key. + +### Setting up the PremAI Client + +The constructor initializes the base class `LM` to support prompting requests to supported PremAI hosted models. This requires the following parameters: + +- `model` (_str_): Models supported by PremAI. Example: `mistral-tiny`. We recommend using the model selected in [project launchpad](https://docs.premai.io/get-started/launchpad). +- `project_id` (_int_): The [project id](https://docs.premai.io/get-started/projects) which contains the model of choice. +- `api_key` (_Optional[str]_, _optional_): API provider from PremAI. Defaults to None. +- `session_id` (_Optional[int]_, _optional_): The ID of the session to use. It helps to track the chat history. +- `**kwargs`: Additional language model arguments will be passed to the API provider. + +Example of PremAI constructor: + +```python +class PremAI(LM): + def __init__( + self, + model: str, + project_id: int, + api_key: str, + base_url: Optional[str] = None, + session_id: Optional[int] = None, + **kwargs, + ) -> None: +``` + +### Under the Hood + +#### `__call__(self, prompt: str, **kwargs) -> str` + +**Parameters:** +- `prompt` (_str_): Prompt to send to PremAI. +- `**kwargs`: Additional keyword arguments for completion request. + +**Returns:** +- `str`: Completions string from the chosen LLM provider + +Internally, the method handles the specifics of preparing the request prompt and corresponding payload to obtain the response. + +### Using the PremAI client + +```python +premai_client = dspy.PremAI(project_id=1111) +``` + +Please note that, this is a dummy `project_id`. You need to change this to the project_id you are interested to use with dspy. + +```python +dspy.configure(lm=premai_client) + +#Example DSPy CoT QA program +qa = dspy.ChainOfThought('question -> answer') + +response = qa(question="What is the capital of Paris?") +print(response.answer) +``` + +2) Generate responses using the client directly. + +```python +response = premai_client(prompt='What is the capital of Paris?') +print(response) +``` \ No newline at end of file From 4044d384d1d87b821af11658afa79e79746d7764 Mon Sep 17 00:00:00 2001 From: Anindyadeep Date: Sat, 18 May 2024 11:00:30 +0530 Subject: [PATCH 9/9] Added new description of premai --- docs/api/language_model_clients/PremAI.md | 2 +- .../deep-dive/language_model_clients/remote_models/PremAI.mdx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/api/language_model_clients/PremAI.md b/docs/api/language_model_clients/PremAI.md index 6bc99c71b6..1bc7d90474 100644 --- a/docs/api/language_model_clients/PremAI.md +++ b/docs/api/language_model_clients/PremAI.md @@ -4,7 +4,7 @@ sidebar_position: 5 # dsp.PremAI -[PremAI](https://app.premai.io) is a unified platform that lets you build powerful production-ready GenAI-powered applications with the least effort so that you can focus more on user experience and overall growth. With dspy, you can connect with several [best-in-class LLMs](https://models.premai.io/) of your choice with a single interface. +[PremAI](https://app.premai.io) is an all-in-one platform that simplifies the process of creating robust, production-ready applications powered by Generative AI. By streamlining the development process, PremAI allows you to concentrate on enhancing user experience and driving overall growth for your application. ### Prerequisites diff --git a/docs/docs/deep-dive/language_model_clients/remote_models/PremAI.mdx b/docs/docs/deep-dive/language_model_clients/remote_models/PremAI.mdx index e94aac1e92..f41bae1394 100644 --- a/docs/docs/deep-dive/language_model_clients/remote_models/PremAI.mdx +++ b/docs/docs/deep-dive/language_model_clients/remote_models/PremAI.mdx @@ -1,6 +1,6 @@ ## PremAI -[PremAI](https://app.premai.io) is a unified platform that lets you build powerful production-ready GenAI-powered applications with the least effort so that you can focus more on user experience and overall growth. With dspy, you can connect with several [best-in-class LLMs](https://models.premai.io/) of your choice with a single interface. +[PremAI](https://app.premai.io) is an all-in-one platform that simplifies the process of creating robust, production-ready applications powered by Generative AI. By streamlining the development process, PremAI allows you to concentrate on enhancing user experience and driving overall growth for your application. ### Prerequisites