Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
89 commits
Select commit Hold shift + click to select a range
23b2898
Update Finetuner search metadata functional tests (#172)
lucas-aixplain May 2, 2024
208a081
Downgrade dataclasses-json for compatibility (#170)
thiago-aixplain May 2, 2024
a837e1a
Fix model cost parameters (#179)
thiago-aixplain May 10, 2024
754f478
Treat label URLs (#176)
thiago-aixplain May 15, 2024
f1c9935
Add new metric test (#181)
thiago-aixplain Jun 4, 2024
a48ccfd
LLMModel class and parameters (#184)
thiago-aixplain Jun 5, 2024
c7f59ce
Gpus (#185)
mikelam-us-aixplain Jun 5, 2024
16eb2e1
Create and get Pipelines with api key as input parameter (#187)
thiago-aixplain Jun 7, 2024
2849d6f
Merge branch 'test' into development
thiago-aixplain Jun 11, 2024
04246b1
M 6769474660 save pipelines (#191)
thiago-aixplain Jun 17, 2024
73021a7
M 6769474660 save pipelines (#192)
thiago-aixplain Jun 18, 2024
474602b
Solving bug when LLM parameters are set on data (#196)
thiago-aixplain Jun 26, 2024
c471703
Merge branch 'test' into development
thiago-aixplain Jun 26, 2024
3695686
Fix pipeline functional test (#200)
lucas-aixplain Jul 3, 2024
9014061
M 6656407247 agentification (#197)
thiago-aixplain Jul 13, 2024
e9091c2
Fixing circular import in the SDK (#211)
thiago-aixplain Jul 30, 2024
f437815
create model/pipeline tools from AgentFactory (#214)
thiago-aixplain Aug 2, 2024
8457087
Merge branch 'test' into development
thiago-aixplain Aug 6, 2024
03009c6
Set model ID as a parameter (#216)
thiago-aixplain Aug 7, 2024
02f7482
Content inputs to be processed according to the query. (#215)
thiago-aixplain Aug 7, 2024
4947959
ENG-1: programmatic api introduced (#219)
kadirpekel Aug 9, 2024
ef16dd5
Updated image upload tests (#213)
mikelam-us-aixplain Aug 12, 2024
d0ad51d
Eng 217 local path (#220)
thiago-aixplain Aug 13, 2024
dca1a37
Eng 389 fix tests (#222)
thiago-aixplain Aug 13, 2024
d43f67f
Merge branch 'test' into development
thiago-aixplain Aug 13, 2024
b113368
Tool Validation when creating agents (#226)
xainaz Aug 19, 2024
0032947
Eng 398 sdk get users credits - Initial (#232)
xainaz Aug 20, 2024
a567535
Eng 398 sdk get users credits (#234)
thiago-aixplain Aug 20, 2024
e919fab
Removed wallet_factoy.py (#235)
xainaz Aug 21, 2024
9ffe3f7
Merge branch 'test' into development
thiago-aixplain Aug 22, 2024
115bf13
Adding supervisor/planning options into SDK (#233)
thiago-aixplain Aug 22, 2024
3357e56
Adjustments to get user credits (#237)
xainaz Aug 23, 2024
ee76afd
Put conditions inside try statements according to changes required. (…
xainaz Aug 23, 2024
1660f5f
Fixing none credit (#238)
xainaz Aug 27, 2024
ed20ba7
Merge branch 'test' into development
thiago-aixplain Aug 27, 2024
481dab2
Merge branch 'test' into development
thiago-aixplain Aug 27, 2024
9a89f52
Update click dependency (#241)
thiago-aixplain Aug 28, 2024
cb0d313
Added input and output attributes to model (#244)
xainaz Sep 2, 2024
716d898
Eng 467 ai xplain sdk update finetune functional tests to cover all n…
xainaz Sep 3, 2024
50d7c6a
Merge branch 'test' into development
thiago-aixplain Sep 4, 2024
f3d89ed
Added name to update (#245)
xainaz Sep 5, 2024
1700304
ENG-504: Make the agent architecture configurable (#243)
thiago-aixplain Sep 6, 2024
357e10d
Eng 544 ai xplain sdk update llm functional tests to cover all new ll…
xainaz Sep 9, 2024
731a150
Eng 399 - Introducing Metric Nodes in Designer (#247)
kadirpekel Sep 10, 2024
0e62774
Add TeamAgent factory and module. Fix typos in code comments (#227)
lucas-aixplain Sep 12, 2024
7f64955
Merge branch 'test' into development
ikxplain Sep 19, 2024
b93a706
Add fileMetadata information in script node (#251)
thiago-aixplain Sep 20, 2024
ea846e6
Merge branch 'test' into development
thiago-aixplain Sep 23, 2024
f7bd983
Name Validation of Agents and Team Agents (#253)
thiago-aixplain Sep 23, 2024
b886287
Fixes in pipeline design and reconstructor node (#255)
thiago-aixplain Sep 26, 2024
8bd6460
Add get method to agent and team agent tests (#259)
lucas-aixplain Sep 29, 2024
4bd9bc0
initial API key factory (#261)
xainaz Oct 7, 2024
63e0f82
BUG-177: Fixed pipeline validation (#262)
kadirpekel Oct 7, 2024
0208be1
Get usage limit (#264)
thiago-aixplain Oct 8, 2024
662420e
Update SDK version (#266)
thiago-aixplain Oct 9, 2024
08c925e
Merge branch 'test' into development
thiago-aixplain Oct 9, 2024
828bdee
Eng 739 get api key (#268)
thiago-aixplain Oct 9, 2024
5ece957
Merge branch 'test' into development
thiago-aixplain Oct 9, 2024
b13c21d
Fix Update API Key Bug (#272)
thiago-aixplain Oct 10, 2024
2a2a476
Merge branch 'test' into development
thiago-aixplain Oct 10, 2024
8182293
Eng 735 ai xplain sdk improve error log messages (#271)
xainaz Oct 16, 2024
ecba34f
Max tokens and iterations in agents/teams (#276)
thiago-aixplain Oct 17, 2024
2127cc5
Update model running endpoints from v1 to v2 (#275)
thiago-aixplain Oct 17, 2024
736a7b1
Eng 711 new model endpoints (#278)
thiago-aixplain Oct 17, 2024
fa33531
Group of Improvements in API Key CRUD (#277)
thiago-aixplain Oct 25, 2024
349ea60
Bug 149 - Path validation removed and decision node output param hand…
kadirpekel Oct 25, 2024
dd46dcf
Changed function to required field (#283)
xainaz Oct 29, 2024
b86d5e7
BUG-206: Fixed passthrough parameter reflection to next node (#284)
kadirpekel Oct 29, 2024
0dbfab1
Get model description (#286)
thiago-aixplain Oct 29, 2024
2eebc27
Merge branch 'test' into development
thiago-aixplain Oct 29, 2024
7ce3c73
Fixing default parameters setting (#288)
thiago-aixplain Oct 29, 2024
9fbc3e6
Merge branch 'test' into development
thiago-aixplain Oct 29, 2024
09908ed
Eng 893 ai xplain sdk improve error log message when deleting an agen…
xainaz Oct 30, 2024
c3c0228
Fixes of errors pointed by functional test (#291)
thiago-aixplain Oct 30, 2024
45e0ff0
Merge branch 'test' into development
thiago-aixplain Oct 30, 2024
ef5d61f
Fix agent and team agent functional tests (#294)
lucas-aixplain Oct 31, 2024
1e43ed3
designer pipeline building are now compatible with custom inputs (#296)
kadirpekel Oct 31, 2024
d77348a
Model Response Class (#279)
xainaz Nov 4, 2024
d1538af
Set Model Tool description (#292)
thiago-aixplain Nov 4, 2024
d731ff0
Eng 812 update agents (#285)
thiago-aixplain Nov 4, 2024
8340687
Service mode parameter in the SDK (#295)
thiago-aixplain Nov 4, 2024
4e79c02
Merge branch 'test' into development
ikxplain Nov 5, 2024
f1bd8f3
BUG-233 Prompt variables are now populated and validated automaticall…
kadirpekel Nov 7, 2024
e2d1be1
Fix check_storage_type to not detect folder as a file (#302)
lucas-aixplain Nov 8, 2024
9700903
Merge branch 'test' into development
thiago-aixplain Nov 11, 2024
3754cc4
ENG-979 New functional test for script nodes (#305)
kadirpekel Nov 11, 2024
6353254
Bug 228 sdk always return model response when running model (#304)
xainaz Nov 11, 2024
b5e2285
Make function filter in model search optional (#307)
thiago-aixplain Nov 11, 2024
d273330
Set default 'parameters' to None and adding tests (#300)
lucas-aixplain Nov 11, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ repos:
hooks:
- id: pytest-check
name: pytest-check
entry: coverage run -m pytest tests/unit
entry: coverage run --source=. -m pytest tests/unit
language: python
pass_filenames: false
types: [python]
Expand Down
2 changes: 1 addition & 1 deletion aixplain/enums/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
from .supplier import Supplier
from .sort_by import SortBy
from .sort_order import SortOrder
from .model_status import ModelStatus
from .response_status import ResponseStatus
11 changes: 0 additions & 11 deletions aixplain/enums/model_status.py

This file was deleted.

31 changes: 31 additions & 0 deletions aixplain/enums/response_status.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
__author__ = "thiagocastroferreira"

"""
Copyright 2024 The aiXplain SDK authors

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

Author: Duraikrishna Selvaraju, Thiago Castro Ferreira, Shreyas Sharma and Lucas Pavanelli
Date: February 21st 2024
Description:
Asset Enum
"""

from enum import Enum
from typing import Text


class ResponseStatus(Text, Enum):
IN_PROGRESS = "IN_PROGRESS"
SUCCESS = "SUCCESS"
FAILED = "FAILED"
1 change: 1 addition & 0 deletions aixplain/enums/supplier.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ def load_suppliers():
headers = {"x-aixplain-key": aixplain_key, "Content-Type": "application/json"}
else:
headers = {"x-api-key": api_key, "Content-Type": "application/json"}
logging.debug(f"Start service for GET API Creation - {url} - {headers}")
r = _request_with_retry("get", url, headers=headers)
if not 200 <= r.status_code < 300:
raise Exception(
Expand Down
2 changes: 1 addition & 1 deletion aixplain/factories/file_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def check_storage_type(cls, input_link: Any) -> StorageType:
Returns:
StorageType: URL, TEXT or FILE
"""
if os.path.exists(input_link) is True:
if os.path.exists(input_link) is True and os.path.isfile(input_link) is True:
return StorageType.FILE
elif (
input_link.startswith("s3://")
Expand Down
2 changes: 1 addition & 1 deletion aixplain/factories/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def _get_assets_from_page(
@classmethod
def list(
cls,
function: Function,
function: Optional[Function] = None,
query: Optional[Text] = "",
suppliers: Optional[Union[Supplier, List[Supplier]]] = None,
source_languages: Optional[Union[Language, List[Language]]] = None,
Expand Down
2 changes: 1 addition & 1 deletion aixplain/factories/pipeline_factory/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def build_from_response(response: Dict, load_architecture: bool = False) -> Pipe
data_type=custom_input.get("dataType"),
code=custom_input["code"],
value=custom_input.get("value"),
is_required=custom_input.get("isRequired", False),
is_required=custom_input.get("isRequired", True),
)
node.number = node_json["number"]
node.label = node_json["label"]
Expand Down
41 changes: 25 additions & 16 deletions aixplain/modules/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
from typing import Union, Optional, Text, Dict
from datetime import datetime
from aixplain.modules.model.response import ModelResponse
from aixplain.enums import ModelStatus
from aixplain.enums.response_status import ResponseStatus


class Model(Asset):
Expand Down Expand Up @@ -106,6 +106,7 @@ def to_dict(self) -> Dict:
return {
"id": self.id,
"name": self.name,
"description": self.description,
"supplier": self.supplier,
"additional_info": clean_additional_info,
"input_params": self.input_params,
Expand All @@ -118,7 +119,9 @@ def __repr__(self):
except Exception:
return f"<Model: {self.name} by {self.supplier}>"

def sync_poll(self, poll_url: Text, name: Text = "model_process", wait_time: float = 0.5, timeout: float = 300) -> Dict:
def sync_poll(
self, poll_url: Text, name: Text = "model_process", wait_time: float = 0.5, timeout: float = 300
) -> ModelResponse:
"""Keeps polling the platform to check whether an asynchronous call is done.

Args:
Expand All @@ -135,7 +138,7 @@ def sync_poll(self, poll_url: Text, name: Text = "model_process", wait_time: flo
# keep wait time as 0.2 seconds the minimum
wait_time = max(wait_time, 0.2)
completed = False
response_body = {"status": "FAILED", "completed": False}
response_body = ModelResponse(status=ResponseStatus.FAILED, completed=False)
while not completed and (end - start) < timeout:
try:
response_body = self.poll(poll_url, name=name)
Expand All @@ -147,13 +150,17 @@ def sync_poll(self, poll_url: Text, name: Text = "model_process", wait_time: flo
if wait_time < 60:
wait_time *= 1.1
except Exception as e:
response_body = {"status": "FAILED", "completed": False, "error_message": "No response from the service."}
response_body = ModelResponse(
status=ResponseStatus.FAILED, completed=False, error_message="No response from the service."
)
logging.error(f"Polling for Model: polling for {name}: {e}")
break
if response_body["completed"] is True:
logging.debug(f"Polling for Model: Final status of polling for {name}: {response_body}")
else:
response_body["status"] = "FAILED"
response_body = ModelResponse(
status=ResponseStatus.FAILED, completed=False, error_message="No response from the service."
)
logging.error(
f"Polling for Model: Final status of polling for {name}: No response in {timeout} seconds - {response_body}"
)
Expand All @@ -174,11 +181,11 @@ def poll(self, poll_url: Text, name: Text = "model_process") -> ModelResponse:
try:
resp = r.json()
if resp["completed"] is True:
status = ModelStatus.SUCCESS
status = ResponseStatus.SUCCESS
if "error_message" in resp or "supplierError" in resp:
status = ModelStatus.FAILED
status = ResponseStatus.FAILED
else:
status = ModelStatus.IN_PROGRESS
status = ResponseStatus.IN_PROGRESS
logging.debug(f"Single Poll for Model: Status of polling for {name}: {resp}")
return ModelResponse(
status=resp.pop("status", status),
Expand All @@ -195,7 +202,7 @@ def poll(self, poll_url: Text, name: Text = "model_process") -> ModelResponse:
resp = {"status": "FAILED"}
logging.error(f"Single Poll for Model: Error of polling for {name}: {e}")
return ModelResponse(
status=ModelStatus.FAILED,
status=ResponseStatus.FAILED,
error_message=str(e),
completed=False,
)
Expand All @@ -205,7 +212,7 @@ def run(
data: Union[Text, Dict],
name: Text = "model_process",
timeout: float = 300,
parameters: Optional[Dict] = {},
parameters: Optional[Dict] = None,
wait_time: float = 0.5,
) -> ModelResponse:
"""Runs a model call.
Expand All @@ -214,7 +221,7 @@ def run(
data (Union[Text, Dict]): link to the input data
name (Text, optional): ID given to a call. Defaults to "model_process".
timeout (float, optional): total polling time. Defaults to 300.
parameters (Dict, optional): optional parameters to the model. Defaults to "{}".
parameters (Dict, optional): optional parameters to the model. Defaults to None.
wait_time (float, optional): wait time in seconds between polling calls. Defaults to 0.5.

Returns:
Expand All @@ -234,9 +241,9 @@ def run(
msg = f"Error in request for {name} - {traceback.format_exc()}"
logging.error(f"Model Run: Error in running for {name}: {e}")
end = time.time()
response = {"status": "FAILED", "error": msg, "runTime": end - start}
response = {"status": "FAILED", "error_message": msg, "runTime": end - start}
return ModelResponse(
status=response.pop("status", ModelStatus.FAILED),
status=response.pop("status", ResponseStatus.FAILED),
data=response.pop("data", ""),
details=response.pop("details", {}),
completed=response.pop("completed", False),
Expand All @@ -247,13 +254,15 @@ def run(
**response,
)

def run_async(self, data: Union[Text, Dict], name: Text = "model_process", parameters: Optional[Dict] = {}) -> ModelResponse:
def run_async(
self, data: Union[Text, Dict], name: Text = "model_process", parameters: Optional[Dict] = None
) -> ModelResponse:
"""Runs asynchronously a model call.

Args:
data (Union[Text, Dict]): link to the input data
name (Text, optional): ID given to a call. Defaults to "model_process".
parameters (Dict, optional): optional parameters to the model. Defaults to "{}".
parameters (Dict, optional): optional parameters to the model. Defaults to None.

Returns:
dict: polling URL in response
Expand All @@ -263,7 +272,7 @@ def run_async(self, data: Union[Text, Dict], name: Text = "model_process", param
payload = build_payload(data=data, parameters=parameters)
response = call_run_endpoint(payload=payload, url=url, api_key=self.api_key)
return ModelResponse(
status=response.pop("status", ModelStatus.FAILED),
status=response.pop("status", ResponseStatus.FAILED),
data=response.pop("data", ""),
details=response.pop("details", {}),
completed=response.pop("completed", False),
Expand Down
42 changes: 23 additions & 19 deletions aixplain/modules/model/llm_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from aixplain.utils import config
from typing import Union, Optional, List, Text, Dict
from aixplain.modules.model.response import ModelResponse
from aixplain.enums import ModelStatus
from aixplain.enums.response_status import ResponseStatus


class LLM(Model):
Expand Down Expand Up @@ -104,7 +104,7 @@ def run(
top_p: float = 1.0,
name: Text = "model_process",
timeout: float = 300,
parameters: Optional[Dict] = {},
parameters: Optional[Dict] = None,
wait_time: float = 0.5,
) -> ModelResponse:
"""Synchronously running a Large Language Model (LLM) model.
Expand All @@ -119,21 +119,23 @@ def run(
top_p (float, optional): Top P. Defaults to 1.0.
name (Text, optional): ID given to a call. Defaults to "model_process".
timeout (float, optional): total polling time. Defaults to 300.
parameters (Dict, optional): optional parameters to the model. Defaults to "{}".
parameters (Dict, optional): optional parameters to the model. Defaults to None.
wait_time (float, optional): wait time in seconds between polling calls. Defaults to 0.5.

Returns:
Dict: parsed output from model
"""
start = time.time()
if parameters is None:
parameters = {}
parameters.update(
{
"context": parameters["context"] if "context" in parameters else context,
"prompt": parameters["prompt"] if "prompt" in parameters else prompt,
"history": parameters["history"] if "history" in parameters else history,
"temperature": parameters["temperature"] if "temperature" in parameters else temperature,
"max_tokens": parameters["max_tokens"] if "max_tokens" in parameters else max_tokens,
"top_p": parameters["top_p"] if "top_p" in parameters else top_p,
"context": parameters.get("context", context),
"prompt": parameters.get("prompt", prompt),
"history": parameters.get("history", history),
"temperature": parameters.get("temperature", temperature),
"max_tokens": parameters.get("max_tokens", max_tokens),
"top_p": parameters.get("top_p", top_p),
}
)
payload = build_payload(data=data, parameters=parameters)
Expand All @@ -152,7 +154,7 @@ def run(
end = time.time()
response = {"status": "FAILED", "error": msg, "elapsed_time": end - start}
return ModelResponse(
status=response.pop("status", ModelStatus.FAILED),
status=response.pop("status", ResponseStatus.FAILED),
data=response.pop("data", ""),
details=response.pop("details", {}),
completed=response.pop("completed", False),
Expand All @@ -173,7 +175,7 @@ def run_async(
max_tokens: int = 128,
top_p: float = 1.0,
name: Text = "model_process",
parameters: Optional[Dict] = {},
parameters: Optional[Dict] = None,
) -> ModelResponse:
"""Runs asynchronously a model call.

Expand All @@ -186,27 +188,29 @@ def run_async(
max_tokens (int, optional): Maximum Generation Tokens. Defaults to 128.
top_p (float, optional): Top P. Defaults to 1.0.
name (Text, optional): ID given to a call. Defaults to "model_process".
parameters (Dict, optional): optional parameters to the model. Defaults to "{}".
parameters (Dict, optional): optional parameters to the model. Defaults to None.

Returns:
dict: polling URL in response
"""
url = f"{self.url}/{self.id}"
logging.debug(f"Model Run Async: Start service for {name} - {url}")
if parameters is None:
parameters = {}
parameters.update(
{
"context": parameters["context"] if "context" in parameters else context,
"prompt": parameters["prompt"] if "prompt" in parameters else prompt,
"history": parameters["history"] if "history" in parameters else history,
"temperature": parameters["temperature"] if "temperature" in parameters else temperature,
"max_tokens": parameters["max_tokens"] if "max_tokens" in parameters else max_tokens,
"top_p": parameters["top_p"] if "top_p" in parameters else top_p,
"context": parameters.get("context", context),
"prompt": parameters.get("prompt", prompt),
"history": parameters.get("history", history),
"temperature": parameters.get("temperature", temperature),
"max_tokens": parameters.get("max_tokens", max_tokens),
"top_p": parameters.get("top_p", top_p),
}
)
payload = build_payload(data=data, parameters=parameters)
response = call_run_endpoint(payload=payload, url=url, api_key=self.api_key)
return ModelResponse(
status=response.pop("status", ModelStatus.FAILED),
status=response.pop("status", ResponseStatus.FAILED),
data=response.pop("data", ""),
details=response.pop("details", {}),
completed=response.pop("completed", False),
Expand Down
6 changes: 2 additions & 4 deletions aixplain/modules/model/response.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
from dataclasses import dataclass
from typing import Text, Any, Optional, Dict, List, Union
from aixplain.enums import ModelStatus
from aixplain.enums import ResponseStatus


@dataclass
class ModelResponse:
"""ModelResponse class to store the response of the model run."""

def __init__(
self,
status: ModelStatus,
status: ResponseStatus,
data: Text = "",
details: Optional[Union[Dict, List]] = {},
completed: bool = False,
Expand Down
7 changes: 5 additions & 2 deletions aixplain/modules/model/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,15 @@
import json
import logging
from aixplain.utils.file_utils import _request_with_retry
from typing import Dict, Text, Union
from typing import Dict, Text, Union, Optional


def build_payload(data: Union[Text, Dict], parameters: Dict = {}):
def build_payload(data: Union[Text, Dict], parameters: Optional[Dict] = None):
from aixplain.factories import FileFactory

if parameters is None:
parameters = {}

data = FileFactory.to_link(data)
if isinstance(data, dict):
payload = data
Expand Down
Loading