Skip to content

Commit

Permalink
Revert "Feature/gsk 2334 talk to my model mvp (#1831)"
Browse files Browse the repository at this point in the history
This reverts commit 5d1894d.
  • Loading branch information
rabah-khalek committed Apr 11, 2024
1 parent 5d1894d commit 451a3a0
Show file tree
Hide file tree
Showing 20 changed files with 120 additions and 1,785 deletions.
223 changes: 0 additions & 223 deletions docs/open_source/ai_quality_copilot/index.md

This file was deleted.

5 changes: 0 additions & 5 deletions docs/open_source/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,6 @@ integrate_tests/index
:link: testset_generation/index.html
::::

::::{grid-item-card} <br/><h3>🤖 AI Quality Copilot</h3>
:text-align: center
:link: ai_quality_copilot/index.html
::::

::::{grid-item-card} <br/><h3>🧪 Customize your tests</h3>
:text-align: center
:link: customize_tests/index.html
Expand Down
5 changes: 0 additions & 5 deletions giskard/llm/client/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,6 @@
class ChatMessage:
role: str
content: Optional[str] = None
name: Optional[str] = None
tool_call_id: Optional[str] = None
tool_calls: Optional[list] = None


_logger = LLMLogger()
Expand All @@ -30,8 +27,6 @@ def complete(
temperature: float = 1,
max_tokens: Optional[int] = None,
caller_id: Optional[str] = None,
tools=None,
tool_choice=None,
seed: Optional[int] = None,
format=None,
) -> ChatMessage:
Expand Down
34 changes: 3 additions & 31 deletions giskard/llm/client/openai.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from typing import Optional, Sequence

from dataclasses import asdict
from logging import warning

from ..config import LLMConfigurationError
Expand Down Expand Up @@ -28,29 +29,6 @@ def _supports_json_format(model: str) -> bool:
return False


def _format_message(msg: ChatMessage) -> dict:
"""Format chat message.
Based on a message's role, include related attributes and exclude non-related.
Parameters
----------
msg : ChatMessage
Message to the LLMClient.
Returns
-------
dict
A dictionary with attributes related to the role.
"""
fmt_msg = {"role": msg.role, "content": msg.content}
if msg.role == "tool":
fmt_msg.update({"name": msg.name, "tool_call_id": msg.tool_call_id})
if msg.role == "assistant" and msg.tool_calls:
fmt_msg.update({"tool_calls": msg.tool_calls})
return fmt_msg


class OpenAIClient(LLMClient):
def __init__(
self, model: str = "gpt-4-turbo-preview", client: openai.Client = None, json_mode: Optional[bool] = None
Expand All @@ -65,17 +43,11 @@ def complete(
temperature: float = 1.0,
max_tokens: Optional[int] = None,
caller_id: Optional[str] = None,
tools=None,
tool_choice=None,
seed: Optional[int] = None,
format=None,
) -> ChatMessage:
extra_params = dict()

if tools is not None:
extra_params["tools"] = tools
if tool_choice is not None:
extra_params["tool_choice"] = tool_choice
if seed is not None:
extra_params["seed"] = seed

Expand All @@ -90,7 +62,7 @@ def complete(
try:
completion = self._client.chat.completions.create(
model=self.model,
messages=[_format_message(m) for m in messages],
messages=[asdict(m) for m in messages],
temperature=temperature,
max_tokens=max_tokens,
**extra_params,
Expand All @@ -108,4 +80,4 @@ def complete(

msg = completion.choices[0].message

return ChatMessage(role=msg.role, content=msg.content, tool_calls=msg.tool_calls)
return ChatMessage(role=msg.role, content=msg.content)
Loading

0 comments on commit 451a3a0

Please sign in to comment.