Skip to content

Commit

Permalink
tests covered
Browse files Browse the repository at this point in the history
  • Loading branch information
eavanvalkenburg committed Apr 23, 2024
1 parent 335f5ad commit 2ac25dd
Show file tree
Hide file tree
Showing 7 changed files with 96 additions and 55 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,15 @@ class HuggingFacePromptExecutionSettings(PromptExecutionSettings):
num_return_sequences: int = 1
stop_sequences: Any = None
pad_token_id: int = 50256
temperature: float = 0.0
eos_token_id: int = 50256
temperature: float = 1.0
top_p: float = 1.0

def get_generation_config(self) -> GenerationConfig:
return GenerationConfig(
**self.model_dump(
include={"max_new_tokens", "pad_token_id", "temperature", "top_p"},
exclude_unset=True,
include={"max_new_tokens", "pad_token_id", "eos_token_id", "temperature", "top_p"},
exclude_unset=False,
exclude_none=True,
by_alias=True,
)
Expand Down
2 changes: 1 addition & 1 deletion python/semantic_kernel/contents/chat_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def serialize(self) -> str:
"""
try:
return self.model_dump_json(indent=2, exclude_none=True)
except Exception as e:
except Exception as e: # pragma: no cover
raise ContentSerializationError(f"Unable to serialize ChatHistory to JSON: {e}") from e

@classmethod
Expand Down
21 changes: 18 additions & 3 deletions python/semantic_kernel/contents/chat_message_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,13 @@
FUNCTION_RESULT_CONTENT_TAG: FunctionResultContent,
}

ITEM_TYPES = Union[TextContent, StreamingTextContent, FunctionCallContent, FunctionResultContent]
ITEM_TYPES = Union[TextContent, StreamingTextContent, FunctionResultContent, FunctionCallContent]

logger = logging.getLogger(__name__)


class ChatMessageContent(KernelContent):
"""This is the base class for chat message response content.
"""This is the class for chat message response content.
All Chat Completion Services should return a instance of this class as response.
Or they can implement their own subclass of this class and return an instance.
Expand Down Expand Up @@ -83,7 +83,7 @@ def __init__(
ai_model_id: Optional[str] - The id of the AI model that generated this response.
metadata: Dict[str, Any] - Any metadata that should be attached to the response.
role: ChatRole - The role of the chat message.
items: list[KernelContent] - The inner content.
items: list[TextContent, StreamingTextContent, FunctionCallContent, FunctionResultContent] - The content.
encoding: Optional[str] - The encoding of the text.
"""

Expand Down Expand Up @@ -127,6 +127,20 @@ def __init__( # type: ignore
metadata: dict[str, Any] | None = None,
**kwargs: Any,
):
"""All Chat Completion Services should return a instance of this class as response.
Or they can implement their own subclass of this class and return an instance.
Args:
inner_content: Optional[Any] - The inner content of the response,
this should hold all the information from the response so even
when not creating a subclass a developer can leverage the full thing.
ai_model_id: Optional[str] - The id of the AI model that generated this response.
metadata: Dict[str, Any] - Any metadata that should be attached to the response.
role: ChatRole - The role of the chat message.
content: str - The text of the response.
items: list[TextContent, StreamingTextContent, FunctionCallContent, FunctionResultContent] - The content.
encoding: Optional[str] - The encoding of the text.
"""
kwargs["role"] = role
if encoding:
kwargs["encoding"] = encoding
Expand Down Expand Up @@ -191,6 +205,7 @@ def content(self, value: str):
)

def __str__(self) -> str:
"""Get the content of the response as a string."""
return self.content or ""

def to_element(self) -> "Element":
Expand Down
17 changes: 0 additions & 17 deletions python/semantic_kernel/contents/function_result_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
from typing import TYPE_CHECKING, Any
from xml.etree.ElementTree import Element

from pydantic import model_validator

from semantic_kernel.contents.const import FUNCTION_RESULT_CONTENT_TAG, TEXT_CONTENT_TAG
from semantic_kernel.contents.kernel_content import KernelContent
from semantic_kernel.contents.text_content import TextContent
Expand Down Expand Up @@ -44,21 +42,6 @@ class FunctionResultContent(KernelContent):
result: str
encoding: str | None = None

@model_validator(mode="before")
def _validate_result(cls, data: dict[str, Any]) -> dict[str, Any]:
"""Validate the supplied result."""
if "result" not in data:
# let pydantic validation handle this case
return data
result = data["result"]
try:
data["result"] = str(result)
if "inner_content" not in data:
data["inner_content"] = result
return data
except Exception as e:
raise ValueError(f"Failed to convert result to string: {e}") from e

def __str__(self) -> str:
return self.result

Expand Down
27 changes: 21 additions & 6 deletions python/semantic_kernel/contents/streaming_chat_message_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@


class StreamingChatMessageContent(ChatMessageContent, StreamingContentMixin):
"""This is the base class for streaming chat message response content.
"""This is the class for streaming chat message response content.
All Chat Completion Services should return a instance of this class as streaming response,
where each part of the response as it is streamed is converted to a instance of this class,
Expand Down Expand Up @@ -56,7 +56,7 @@ def __init__(
ai_model_id: str | None = None,
metadata: dict[str, Any] | None = None,
) -> None:
"""All Chat Completion Services should return a instance of this class as response.
"""All Chat Completion Services should return a instance of this class as response for streaming.
Or they can implement their own subclass of this class and return an instance.
Args:
Expand All @@ -66,7 +66,7 @@ def __init__(
ai_model_id: Optional[str] - The id of the AI model that generated this response.
metadata: Dict[str, Any] - Any metadata that should be attached to the response.
role: ChatRole - The role of the chat message.
items: list[KernelContent] - The inner content.
items: list[TextContent, FunctionCallContent, FunctionResultContent] - The content.
encoding: Optional[str] - The encoding of the text.
"""

Expand All @@ -83,7 +83,7 @@ def __init__(
ai_model_id: str | None = None,
metadata: dict[str, Any] | None = None,
) -> None:
"""All Chat Completion Services should return a instance of this class as response.
"""All Chat Completion Services should return a instance of this class as response for streaming.
Or they can implement their own subclass of this class and return an instance.
Args:
Expand All @@ -110,6 +110,20 @@ def __init__( # type: ignore
ai_model_id: str | None = None,
metadata: dict[str, Any] | None = None,
):
"""All Chat Completion Services should return a instance of this class as response for streaming.
Or they can implement their own subclass of this class and return an instance.
Args:
inner_content: Optional[Any] - The inner content of the response,
this should hold all the information from the response so even
when not creating a subclass a developer can leverage the full thing.
ai_model_id: Optional[str] - The id of the AI model that generated this response.
metadata: Dict[str, Any] - Any metadata that should be attached to the response.
role: ChatRole - The role of the chat message.
content: str - The text of the response.
items: list[TextContent, FunctionCallContent, FunctionResultContent] - The content.
encoding: Optional[str] - The encoding of the text.
"""
kwargs: dict[str, Any] = {
"role": role,
"choice_index": choice_index,
Expand Down Expand Up @@ -146,6 +160,7 @@ def __init__( # type: ignore
)

def __bytes__(self) -> bytes:
"""Return the content of the response encoded in the encoding."""
return self.content.encode(self.encoding if self.encoding else "utf-8") if self.content else b""

def __add__(self, other: StreamingChatMessageContent) -> StreamingChatMessageContent:
Expand Down Expand Up @@ -195,13 +210,13 @@ def __add__(self, other: StreamingChatMessageContent) -> StreamingChatMessageCon
)

def to_element(self) -> "Element":
"""Convert the ChatMessageContent to an XML Element.
"""Convert the StreamingChatMessageContent to an XML Element.
Args:
root_key: str - The key to use for the root of the XML Element.
Returns:
Element - The XML Element representing the ChatMessageContent.
Element - The XML Element representing the StreamingChatMessageContent.
"""
root = Element(CHAT_MESSAGE_CONTENT_TAG)
for field in self.model_fields_set:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,7 @@ async def test_text_completion(model_name, task, input_str):
service=sk_hf.HuggingFaceTextCompletion(service_id=model_name, ai_model_id=model_name, task=task),
)

exec_settings = PromptExecutionSettings(
service_id=model_name, extension_data={"max_tokens": 25, "temperature": 0.7, "top_p": 0.5}
)
exec_settings = PromptExecutionSettings(service_id=model_name, extension_data={"max_new_tokens": 25})

# Define semantic function using SK prompt template language
prompt = "{{$input}}"
Expand Down
73 changes: 51 additions & 22 deletions python/tests/unit/contents/test_chat_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
from semantic_kernel.contents.author_role import AuthorRole
from semantic_kernel.contents.chat_history import ChatHistory
from semantic_kernel.contents.chat_message_content import ChatMessageContent
from semantic_kernel.contents.function_call_content import FunctionCallContent
from semantic_kernel.contents.function_result_content import FunctionResultContent
from semantic_kernel.contents.text_content import TextContent
from semantic_kernel.exceptions import ContentInitializationError
from semantic_kernel.functions.kernel_arguments import KernelArguments
from semantic_kernel.kernel import Kernel
Expand Down Expand Up @@ -46,6 +49,13 @@ def test_add_system_message(chat_history: ChatHistory):
assert chat_history.messages[-1].role == AuthorRole.SYSTEM


def test_add_system_message_item(chat_history: ChatHistory):
content = [TextContent(text="System message")]
chat_history.add_system_message(content)
assert chat_history.messages[-1].content == str(content[0])
assert chat_history.messages[-1].role == AuthorRole.SYSTEM


def test_add_system_message_at_init():
content = "System message"
chat_history = ChatHistory(system_message=content)
Expand All @@ -60,20 +70,41 @@ def test_add_user_message(chat_history: ChatHistory):
assert chat_history.messages[-1].role == AuthorRole.USER


def test_add_user_message_list(chat_history: ChatHistory):
content = [TextContent(text="User message")]
chat_history.add_user_message(content)
assert chat_history.messages[-1].content == content[0].text
assert chat_history.messages[-1].role == AuthorRole.USER


def test_add_assistant_message(chat_history: ChatHistory):
content = "Assistant message"
chat_history.add_assistant_message(content)
assert chat_history.messages[-1].content == content
assert chat_history.messages[-1].role == AuthorRole.ASSISTANT


def test_add_assistant_message_list(chat_history: ChatHistory):
content = [TextContent(text="Assistant message")]
chat_history.add_assistant_message(content)
assert chat_history.messages[-1].content == content[0].text
assert chat_history.messages[-1].role == AuthorRole.ASSISTANT


def test_add_tool_message(chat_history: ChatHistory):
content = "Tool message"
chat_history.add_tool_message(content)
assert chat_history.messages[-1].content == content
assert chat_history.messages[-1].role == AuthorRole.TOOL


def test_add_tool_message_list(chat_history: ChatHistory):
content = [FunctionResultContent(id="test", result="Tool message")]
chat_history.add_tool_message(content)
assert chat_history.messages[-1].items[0].result == content[0].result
assert chat_history.messages[-1].role == AuthorRole.TOOL


def test_add_message(chat_history: ChatHistory):
content = "Test message"
role = AuthorRole.USER
Expand Down Expand Up @@ -410,28 +441,6 @@ async def test_handwritten_xml_as_arg():
assert chat_history.messages[0].role == AuthorRole.USER


# @pytest.mark.asyncio
# async def test_history_openai_cmc(chat_history: ChatHistory):
# chat_history.add_message(
# message=OpenAIChatMessageContent(
# inner_content=None,
# role=AuthorRole.ASSISTANT,
# function_call=FunctionCall(name="test-test", arguments='{"input": "test"}'),
# )
# )
# template = "{{$chat_history}}"
# rendered = await KernelPromptTemplate(
# prompt_template_config=PromptTemplateConfig(name="test", description="test", template=template)
# ).render(
# kernel=Kernel(),
# arguments=KernelArguments(chat_history=chat_history),
# )
# chat_history1 = ChatHistory.from_rendered_prompt(rendered)

# assert chat_history1.messages[0].role == AuthorRole.ASSISTANT
# assert chat_history1.messages[0].function_call.name == "test-test"


@pytest.mark.asyncio
async def test_template_empty_history(chat_history: ChatHistory):
template = "system stuff{{$chat_history}}{{$input}}"
Expand All @@ -447,3 +456,23 @@ async def test_template_empty_history(chat_history: ChatHistory):
assert chat_history_2.messages[0].role == AuthorRole.SYSTEM
assert chat_history_2.messages[1].content == "What can you do?"
assert chat_history_2.messages[1].role == AuthorRole.USER


def test_to_from_file(chat_history: ChatHistory, tmp_path):
chat_history.add_system_message("You are an AI assistant")
chat_history.add_user_message("What is the weather in Seattle?")
chat_history.add_assistant_message(
[FunctionCallContent(id="test1", name="WeatherPlugin-GetWeather", arguments='{{ "location": "Seattle" }}')]
)
chat_history.add_tool_message([FunctionResultContent(id="test1", result="It is raining")])
chat_history.add_assistant_message("It is raining in Seattle, what else can I help you with?")

file_path = tmp_path / "chat_history.json"
chat_history.store_chat_history_to_file(file_path)
chat_history_2 = ChatHistory.load_chat_history_from_file(file_path)
assert len(chat_history_2.messages) == len(chat_history.messages)
assert chat_history_2.messages[0] == chat_history.messages[0]
assert chat_history_2.messages[1] == chat_history.messages[1]
assert chat_history_2.messages[2] == chat_history.messages[2]
assert chat_history_2.messages[3] == chat_history.messages[3]
assert chat_history_2.messages[4] == chat_history.messages[4]

0 comments on commit 2ac25dd

Please sign in to comment.