Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
09a44a6
Fix spelling of "H.265" encoding standard in `reolink` (#152130)
NoRi2909 Sep 12, 2025
4c1364d
Fix wrong type annotation in exposed_entities (#152142)
emontnemery Sep 12, 2025
1ef9018
Add plug mini eu for switchbot integration (#151130)
zerzhang Sep 12, 2025
2c34561
Add humidifier support for switchbot cloud integration (#149039)
zerzhang Sep 12, 2025
299cc5e
Fix sentence-casing of "CPU temperature" in `fritz` (#152149)
NoRi2909 Sep 12, 2025
68d987f
Bump hass-nabucasa from 1.1.0 to 1.1.1 (#152147)
ludeeus Sep 12, 2025
1d214ae
For the met integration Increase the hourly forecast limit to 48 hour…
jm-cook Sep 12, 2025
64ba437
Fix KNX Light - individual color initialisation from UI config (#151815)
farmio Sep 12, 2025
e438b11
Use `native_visibility` property instead of `visibility` for OpenWeat…
bieniu Sep 12, 2025
8003a49
Add guest mode switch to Teslemetry (#151550)
Bre77 Sep 12, 2025
ee506e6
Implement thinking content for Gemini (#150347)
Shulyaka Sep 12, 2025
2b61601
Remove the host from the AI Task generated image URL (#151887)
balloob Sep 12, 2025
207c848
Improve SwitchBot device discovery when Bluetooth adapter is in passi…
bdraco Sep 12, 2025
8412581
Implement snapshot-testing for Plugwise climate platform (#151070)
bouwew Sep 12, 2025
8263ea4
Don't try to connect after exiting loop in ntfy (#152011)
tr4nt0r Sep 12, 2025
baf4382
Miele consumption sensors consistent behavior with RestoreSensor (#15…
aturri Sep 12, 2025
4c22264
Add support for `inH₂O` pressure unit (#148289)
ekobres Sep 12, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions homeassistant/components/ai_task/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from homeassistant.helpers import llm
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.network import get_url
from homeassistant.util import RE_SANITIZE_FILENAME, slugify

from .const import (
Expand Down Expand Up @@ -249,7 +248,7 @@ def _purge_image(filename: str, now: datetime) -> None:
if IMAGE_EXPIRY_TIME > 0:
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))

service_result["url"] = get_url(hass) + async_sign_path(
service_result["url"] = async_sign_path(
hass,
f"/api/{DOMAIN}/images/{filename}",
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
Expand Down
2 changes: 1 addition & 1 deletion homeassistant/components/cloud/manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.1.0"],
"requirements": ["hass-nabucasa==1.1.1"],
"single_config_entry": true
}
2 changes: 1 addition & 1 deletion homeassistant/components/fritz/strings.json
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@
"name": "Max connection upload throughput"
},
"cpu_temperature": {
"name": "CPU Temperature"
"name": "CPU temperature"
}
}
},
Expand Down
191 changes: 161 additions & 30 deletions homeassistant/components/google_generative_ai_conversation/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,13 @@
from __future__ import annotations

import asyncio
import base64
import codecs
from collections.abc import AsyncGenerator, AsyncIterator, Callable
from dataclasses import replace
from dataclasses import dataclass, replace
import mimetypes
from pathlib import Path
from typing import TYPE_CHECKING, Any, cast
from typing import TYPE_CHECKING, Any, Literal, cast

from google.genai import Client
from google.genai.errors import APIError, ClientError
Expand All @@ -27,6 +28,7 @@
PartUnionDict,
SafetySetting,
Schema,
ThinkingConfig,
Tool,
ToolListUnion,
)
Expand Down Expand Up @@ -201,6 +203,30 @@ def _create_google_tool_response_content(
)


@dataclass(slots=True)
class PartDetails:
"""Additional data for a content part."""

part_type: Literal["text", "thought", "function_call"]
"""The part type for which this data is relevant for."""

index: int
"""Start position or number of the tool."""

length: int = 0
"""Length of the relevant data."""

thought_signature: str | None = None
"""Base64 encoded thought signature, if available."""


@dataclass(slots=True)
class ContentDetails:
"""Native data for AssistantContent."""

part_details: list[PartDetails]


def _convert_content(
content: (
conversation.UserContent
Expand All @@ -209,32 +235,91 @@ def _convert_content(
),
) -> Content:
"""Convert HA content to Google content."""
if content.role != "assistant" or not content.tool_calls:
role = "model" if content.role == "assistant" else content.role
if content.role != "assistant":
return Content(
role=role,
parts=[
Part.from_text(text=content.content if content.content else ""),
],
role=content.role,
parts=[Part.from_text(text=content.content if content.content else "")],
)

# Handle the Assistant content with tool calls.
assert type(content) is conversation.AssistantContent
parts: list[Part] = []
part_details: list[PartDetails] = (
content.native.part_details
if isinstance(content.native, ContentDetails)
else []
)
details: PartDetails | None = None

if content.content:
parts.append(Part.from_text(text=content.content))
index = 0
for details in part_details:
if details.part_type == "text":
if index < details.index:
parts.append(
Part.from_text(text=content.content[index : details.index])
)
index = details.index
parts.append(
Part.from_text(
text=content.content[index : index + details.length],
)
)
if details.thought_signature:
parts[-1].thought_signature = base64.b64decode(
details.thought_signature
)
index += details.length
if index < len(content.content):
parts.append(Part.from_text(text=content.content[index:]))

if content.thinking_content:
index = 0
for details in part_details:
if details.part_type == "thought":
if index < details.index:
parts.append(
Part.from_text(
text=content.thinking_content[index : details.index]
)
)
parts[-1].thought = True
index = details.index
parts.append(
Part.from_text(
text=content.thinking_content[index : index + details.length],
)
)
parts[-1].thought = True
if details.thought_signature:
parts[-1].thought_signature = base64.b64decode(
details.thought_signature
)
index += details.length
if index < len(content.thinking_content):
parts.append(Part.from_text(text=content.thinking_content[index:]))
parts[-1].thought = True

if content.tool_calls:
parts.extend(
[
for index, tool_call in enumerate(content.tool_calls):
parts.append(
Part.from_function_call(
name=tool_call.tool_name,
args=_escape_decode(tool_call.tool_args),
)
for tool_call in content.tool_calls
]
)
)
if details := next(
(
d
for d in part_details
if d.part_type == "function_call" and d.index == index
),
None,
):
if details.thought_signature:
parts[-1].thought_signature = base64.b64decode(
details.thought_signature
)

return Content(role="model", parts=parts)

Expand All @@ -243,14 +328,20 @@ async def _transform_stream(
result: AsyncIterator[GenerateContentResponse],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
new_message = True
part_details: list[PartDetails] = []
try:
async for response in result:
LOGGER.debug("Received response chunk: %s", response)
chunk: conversation.AssistantContentDeltaDict = {}

if new_message:
chunk["role"] = "assistant"
if part_details:
yield {"native": ContentDetails(part_details=part_details)}
part_details = []
yield {"role": "assistant"}
new_message = False
content_index = 0
thinking_content_index = 0
tool_call_index = 0

# According to the API docs, this would mean no candidate is returned, so we can safely throw an error here.
if response.prompt_feedback or not response.candidates:
Expand Down Expand Up @@ -284,23 +375,62 @@ async def _transform_stream(
else []
)

content = "".join([part.text for part in response_parts if part.text])
tool_calls = []
for part in response_parts:
if not part.function_call:
continue
tool_call = part.function_call
tool_name = tool_call.name if tool_call.name else ""
tool_args = _escape_decode(tool_call.args)
tool_calls.append(
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
)
chunk: conversation.AssistantContentDeltaDict = {}

if part.text:
if part.thought:
chunk["thinking_content"] = part.text
if part.thought_signature:
part_details.append(
PartDetails(
part_type="thought",
index=thinking_content_index,
length=len(part.text),
thought_signature=base64.b64encode(
part.thought_signature
).decode("utf-8"),
)
)
thinking_content_index += len(part.text)
else:
chunk["content"] = part.text
if part.thought_signature:
part_details.append(
PartDetails(
part_type="text",
index=content_index,
length=len(part.text),
thought_signature=base64.b64encode(
part.thought_signature
).decode("utf-8"),
)
)
content_index += len(part.text)

if part.function_call:
tool_call = part.function_call
tool_name = tool_call.name if tool_call.name else ""
tool_args = _escape_decode(tool_call.args)
chunk["tool_calls"] = [
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
]
if part.thought_signature:
part_details.append(
PartDetails(
part_type="function_call",
index=tool_call_index,
thought_signature=base64.b64encode(
part.thought_signature
).decode("utf-8"),
)
)

yield chunk

if tool_calls:
chunk["tool_calls"] = tool_calls
if part_details:
yield {"native": ContentDetails(part_details=part_details)}

chunk["content"] = content
yield chunk
except (
APIError,
ValueError,
Expand Down Expand Up @@ -522,6 +652,7 @@ def create_generate_content_config(self) -> GenerateContentConfig:
),
),
],
thinking_config=ThinkingConfig(include_thoughts=True),
)


Expand Down
2 changes: 1 addition & 1 deletion homeassistant/components/homeassistant/exposed_entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@ def ws_expose_entity(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Expose an entity to an assistant."""
entity_ids: str = msg["entity_ids"]
entity_ids: list[str] = msg["entity_ids"]

if blocked := next(
(
Expand Down
14 changes: 10 additions & 4 deletions homeassistant/components/knx/light.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,13 +285,19 @@ def _create_ui_light(xknx: XKNX, knx_config: ConfigType, name: str) -> XknxLight
group_address_switch_green_state=conf.get_state_and_passive(
CONF_COLOR, CONF_GA_GREEN_SWITCH
),
group_address_brightness_green=conf.get_write(CONF_GA_GREEN_BRIGHTNESS),
group_address_brightness_green=conf.get_write(
CONF_COLOR, CONF_GA_GREEN_BRIGHTNESS
),
group_address_brightness_green_state=conf.get_state_and_passive(
CONF_COLOR, CONF_GA_GREEN_BRIGHTNESS
),
group_address_switch_blue=conf.get_write(CONF_GA_BLUE_SWITCH),
group_address_switch_blue_state=conf.get_state_and_passive(CONF_GA_BLUE_SWITCH),
group_address_brightness_blue=conf.get_write(CONF_GA_BLUE_BRIGHTNESS),
group_address_switch_blue=conf.get_write(CONF_COLOR, CONF_GA_BLUE_SWITCH),
group_address_switch_blue_state=conf.get_state_and_passive(
CONF_COLOR, CONF_GA_BLUE_SWITCH
),
group_address_brightness_blue=conf.get_write(
CONF_COLOR, CONF_GA_BLUE_BRIGHTNESS
),
group_address_brightness_blue_state=conf.get_state_and_passive(
CONF_COLOR, CONF_GA_BLUE_BRIGHTNESS
),
Expand Down
10 changes: 5 additions & 5 deletions homeassistant/components/knx/storage/entity_store_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,19 +240,19 @@ class LightColorMode(StrEnum):
write_required=True, valid_dpt="5.001"
),
"section_blue": KNXSectionFlat(),
vol.Required(CONF_GA_BLUE_BRIGHTNESS): GASelector(
write_required=True, valid_dpt="5.001"
),
vol.Optional(CONF_GA_BLUE_SWITCH): GASelector(
write_required=False, valid_dpt="1"
),
"section_white": KNXSectionFlat(),
vol.Optional(CONF_GA_WHITE_BRIGHTNESS): GASelector(
vol.Required(CONF_GA_BLUE_BRIGHTNESS): GASelector(
write_required=True, valid_dpt="5.001"
),
"section_white": KNXSectionFlat(),
vol.Optional(CONF_GA_WHITE_SWITCH): GASelector(
write_required=False, valid_dpt="1"
),
vol.Optional(CONF_GA_WHITE_BRIGHTNESS): GASelector(
write_required=True, valid_dpt="5.001"
),
},
),
GroupSelectOption(
Expand Down
4 changes: 3 additions & 1 deletion homeassistant/components/met/coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,9 @@ async def fetch_data(self) -> Self:
self.current_weather_data = self._weather_data.get_current_weather()
time_zone = dt_util.get_default_time_zone()
self.daily_forecast = self._weather_data.get_forecast(time_zone, False, 0)
self.hourly_forecast = self._weather_data.get_forecast(time_zone, True)
self.hourly_forecast = self._weather_data.get_forecast(
time_zone, True, range_stop=49
)
return self


Expand Down
Loading
Loading