From ca6289a57610b67df74a6099c9f12f0effab1675 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 16 Sep 2025 12:15:43 +0200 Subject: [PATCH 01/16] Refactor template engine: Extract string functions into StringExtension (#152420) --- homeassistant/helpers/template/__init__.py | 33 +--- .../helpers/template/extensions/__init__.py | 9 +- .../helpers/template/extensions/string.py | 58 +++++++ .../template/extensions/test_string.py | 164 ++++++++++++++++++ tests/helpers/template/test_init.py | 54 ------ 5 files changed, 232 insertions(+), 86 deletions(-) create mode 100644 homeassistant/helpers/template/extensions/string.py create mode 100644 tests/helpers/template/extensions/test_string.py diff --git a/homeassistant/helpers/template/__init__.py b/homeassistant/helpers/template/__init__.py index 6635cb139f5461..b876cb2c6aed71 100644 --- a/homeassistant/helpers/template/__init__.py +++ b/homeassistant/helpers/template/__init__.py @@ -31,7 +31,6 @@ cast, overload, ) -from urllib.parse import urlencode as urllib_urlencode import weakref from awesomeversion import AwesomeVersion @@ -82,12 +81,7 @@ from homeassistant.helpers.translation import async_translate_state from homeassistant.helpers.typing import TemplateVarsType from homeassistant.loader import bind_hass -from homeassistant.util import ( - convert, - dt as dt_util, - location as location_util, - slugify as slugify_util, -) +from homeassistant.util import convert, dt as dt_util, location as location_util from homeassistant.util.async_ import run_callback_threadsafe from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads @@ -2327,16 +2321,6 @@ def from_hex(value: str) -> bytes: return bytes.fromhex(value) -def ordinal(value): - """Perform ordinal conversion.""" - suffixes = ["th", "st", "nd", "rd"] + ["th"] * 6 # codespell:ignore nd - return str(value) + ( - suffixes[(int(str(value)[-1])) % 10] - if int(str(value)[-2:]) % 100 not in range(11, 14) - else "th" - ) - - def from_json(value, default=_SENTINEL): """Convert a JSON string to an object.""" try: @@ -2483,16 +2467,6 @@ def time_until(hass: HomeAssistant, value: Any | datetime, precision: int = 1) - return dt_util.get_time_remaining(value, precision) -def urlencode(value): - """Urlencode dictionary and return as UTF-8 string.""" - return urllib_urlencode(value).encode("utf-8") - - -def slugify(value, separator="_"): - """Convert a string into a slug, such as what is used for entity ids.""" - return slugify_util(value, separator=separator) - - def iif( value: Any, if_true: Any = True, if_false: Any = False, if_none: Any = _SENTINEL ) -> Any: @@ -2789,6 +2763,7 @@ def __init__( self.add_extension("homeassistant.helpers.template.extensions.CryptoExtension") self.add_extension("homeassistant.helpers.template.extensions.MathExtension") self.add_extension("homeassistant.helpers.template.extensions.RegexExtension") + self.add_extension("homeassistant.helpers.template.extensions.StringExtension") self.globals["as_datetime"] = as_datetime self.globals["as_function"] = as_function @@ -2808,7 +2783,6 @@ def __init__( self.globals["pack"] = struct_pack self.globals["set"] = _to_set self.globals["shuffle"] = shuffle - self.globals["slugify"] = slugify self.globals["strptime"] = strptime self.globals["symmetric_difference"] = symmetric_difference self.globals["timedelta"] = timedelta @@ -2816,7 +2790,6 @@ def __init__( self.globals["typeof"] = typeof self.globals["union"] = union self.globals["unpack"] = struct_unpack - self.globals["urlencode"] = urlencode self.globals["version"] = version self.globals["zip"] = zip @@ -2842,12 +2815,10 @@ def __init__( self.filters["is_number"] = is_number self.filters["multiply"] = multiply self.filters["ord"] = ord - self.filters["ordinal"] = ordinal self.filters["pack"] = struct_pack self.filters["random"] = random_every_time self.filters["round"] = forgiving_round self.filters["shuffle"] = shuffle - self.filters["slugify"] = slugify self.filters["symmetric_difference"] = symmetric_difference self.filters["timestamp_custom"] = timestamp_custom self.filters["timestamp_local"] = timestamp_local diff --git a/homeassistant/helpers/template/extensions/__init__.py b/homeassistant/helpers/template/extensions/__init__.py index 22ce6b71c4ae6a..b6bb7fb8ad9425 100644 --- a/homeassistant/helpers/template/extensions/__init__.py +++ b/homeassistant/helpers/template/extensions/__init__.py @@ -4,5 +4,12 @@ from .crypto import CryptoExtension from .math import MathExtension from .regex import RegexExtension +from .string import StringExtension -__all__ = ["Base64Extension", "CryptoExtension", "MathExtension", "RegexExtension"] +__all__ = [ + "Base64Extension", + "CryptoExtension", + "MathExtension", + "RegexExtension", + "StringExtension", +] diff --git a/homeassistant/helpers/template/extensions/string.py b/homeassistant/helpers/template/extensions/string.py new file mode 100644 index 00000000000000..ee0af35e2a86dd --- /dev/null +++ b/homeassistant/helpers/template/extensions/string.py @@ -0,0 +1,58 @@ +"""Jinja2 extension for string processing functions.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any +from urllib.parse import urlencode as urllib_urlencode + +from homeassistant.util import slugify as slugify_util + +from .base import BaseTemplateExtension, TemplateFunction + +if TYPE_CHECKING: + from homeassistant.helpers.template import TemplateEnvironment + + +class StringExtension(BaseTemplateExtension): + """Jinja2 extension for string processing functions.""" + + def __init__(self, environment: TemplateEnvironment) -> None: + """Initialize the string extension.""" + super().__init__( + environment, + functions=[ + TemplateFunction( + "ordinal", + self.ordinal, + as_filter=True, + ), + TemplateFunction( + "slugify", + self.slugify, + as_global=True, + as_filter=True, + ), + TemplateFunction( + "urlencode", + self.urlencode, + as_global=True, + ), + ], + ) + + def ordinal(self, value: Any) -> str: + """Perform ordinal conversion.""" + suffixes = ["th", "st", "nd", "rd"] + ["th"] * 6 # codespell:ignore nd + return str(value) + ( + suffixes[(int(str(value)[-1])) % 10] + if int(str(value)[-2:]) % 100 not in range(11, 14) + else "th" + ) + + def slugify(self, value: Any, separator: str = "_") -> str: + """Convert a string into a slug, such as what is used for entity ids.""" + return slugify_util(str(value), separator=separator) + + def urlencode(self, value: Any) -> bytes: + """Urlencode dictionary and return as UTF-8 string.""" + return urllib_urlencode(value).encode("utf-8") diff --git a/tests/helpers/template/extensions/test_string.py b/tests/helpers/template/extensions/test_string.py new file mode 100644 index 00000000000000..241bf40eef1092 --- /dev/null +++ b/tests/helpers/template/extensions/test_string.py @@ -0,0 +1,164 @@ +"""Test string template extension.""" + +from __future__ import annotations + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import template + + +def test_ordinal(hass: HomeAssistant) -> None: + """Test the ordinal filter.""" + tests = [ + (1, "1st"), + (2, "2nd"), + (3, "3rd"), + (4, "4th"), + (5, "5th"), + (12, "12th"), + (100, "100th"), + (101, "101st"), + ] + + for value, expected in tests: + assert ( + template.Template(f"{{{{ {value} | ordinal }}}}", hass).async_render() + == expected + ) + + +def test_slugify(hass: HomeAssistant) -> None: + """Test the slugify filter.""" + # Test as global function + assert ( + template.Template('{{ slugify("Home Assistant") }}', hass).async_render() + == "home_assistant" + ) + + # Test as filter + assert ( + template.Template('{{ "Home Assistant" | slugify }}', hass).async_render() + == "home_assistant" + ) + + # Test with custom separator as global + assert ( + template.Template('{{ slugify("Home Assistant", "-") }}', hass).async_render() + == "home-assistant" + ) + + # Test with custom separator as filter + assert ( + template.Template('{{ "Home Assistant" | slugify("-") }}', hass).async_render() + == "home-assistant" + ) + + +def test_urlencode(hass: HomeAssistant) -> None: + """Test the urlencode method.""" + # Test with dictionary + tpl = template.Template( + "{% set dict = {'foo': 'x&y', 'bar': 42} %}{{ dict | urlencode }}", + hass, + ) + assert tpl.async_render() == "foo=x%26y&bar=42" + + # Test with string + tpl = template.Template( + "{% set string = 'the quick brown fox = true' %}{{ string | urlencode }}", + hass, + ) + assert tpl.async_render() == "the%20quick%20brown%20fox%20%3D%20true" + + +def test_string_functions_with_non_string_input(hass: HomeAssistant) -> None: + """Test string functions with non-string input (automatic conversion).""" + # Test ordinal with integer + assert template.Template("{{ 42 | ordinal }}", hass).async_render() == "42nd" + + # Test slugify with integer - Note: Jinja2 may return integer for simple cases + result = template.Template("{{ 123 | slugify }}", hass).async_render() + # Accept either string or integer result for simple numeric cases + assert result in ["123", 123] + + +def test_ordinal_edge_cases(hass: HomeAssistant) -> None: + """Test ordinal function with edge cases.""" + # Test teens (11th, 12th, 13th should all be 'th') + teens_tests = [ + (11, "11th"), + (12, "12th"), + (13, "13th"), + (111, "111th"), + (112, "112th"), + (113, "113th"), + ] + + for value, expected in teens_tests: + assert ( + template.Template(f"{{{{ {value} | ordinal }}}}", hass).async_render() + == expected + ) + + # Test other numbers ending in 1, 2, 3 + other_tests = [ + (21, "21st"), + (22, "22nd"), + (23, "23rd"), + (121, "121st"), + (122, "122nd"), + (123, "123rd"), + ] + + for value, expected in other_tests: + assert ( + template.Template(f"{{{{ {value} | ordinal }}}}", hass).async_render() + == expected + ) + + +def test_slugify_various_separators(hass: HomeAssistant) -> None: + """Test slugify with various separators.""" + test_cases = [ + ("Hello World", "_", "hello_world"), + ("Hello World", "-", "hello-world"), + ("Hello World", ".", "hello.world"), + ("Hello-World_Test", "~", "hello~world~test"), + ] + + for text, separator, expected in test_cases: + # Test as global function + assert ( + template.Template( + f'{{{{ slugify("{text}", "{separator}") }}}}', hass + ).async_render() + == expected + ) + + # Test as filter + assert ( + template.Template( + f'{{{{ "{text}" | slugify("{separator}") }}}}', hass + ).async_render() + == expected + ) + + +def test_urlencode_various_types(hass: HomeAssistant) -> None: + """Test urlencode with various data types.""" + # Test with nested dictionary values + tpl = template.Template( + "{% set data = {'key': 'value with spaces', 'num': 123} %}{{ data | urlencode }}", + hass, + ) + result = tpl.async_render() + # URL encoding can have different order, so check both parts are present + # Note: urllib.parse.urlencode uses + for spaces in form data + assert "key=value+with+spaces" in result + assert "num=123" in result + + # Test with special characters + tpl = template.Template( + "{% set data = {'special': 'a+b=c&d'} %}{{ data | urlencode }}", + hass, + ) + assert tpl.async_render() == "special=a%2Bb%3Dc%26d" diff --git a/tests/helpers/template/test_init.py b/tests/helpers/template/test_init.py index 959eea7ec4e56d..77191af52593e9 100644 --- a/tests/helpers/template/test_init.py +++ b/tests/helpers/template/test_init.py @@ -1202,46 +1202,6 @@ def test_from_hex(hass: HomeAssistant) -> None: ) -def test_slugify(hass: HomeAssistant) -> None: - """Test the slugify filter.""" - assert ( - template.Template('{{ slugify("Home Assistant") }}', hass).async_render() - == "home_assistant" - ) - assert ( - template.Template('{{ "Home Assistant" | slugify }}', hass).async_render() - == "home_assistant" - ) - assert ( - template.Template('{{ slugify("Home Assistant", "-") }}', hass).async_render() - == "home-assistant" - ) - assert ( - template.Template('{{ "Home Assistant" | slugify("-") }}', hass).async_render() - == "home-assistant" - ) - - -def test_ordinal(hass: HomeAssistant) -> None: - """Test the ordinal filter.""" - tests = [ - (1, "1st"), - (2, "2nd"), - (3, "3rd"), - (4, "4th"), - (5, "5th"), - (12, "12th"), - (100, "100th"), - (101, "101st"), - ] - - for value, expected in tests: - assert ( - template.Template(f"{{{{ {value} | ordinal }}}}", hass).async_render() - == expected - ) - - def test_timestamp_utc(hass: HomeAssistant) -> None: """Test the timestamps to local filter.""" now = dt_util.utcnow() @@ -4495,20 +4455,6 @@ def test_render_complex_handling_non_template_values(hass: HomeAssistant) -> Non ) == {True: 1, False: 2} -def test_urlencode(hass: HomeAssistant) -> None: - """Test the urlencode method.""" - tpl = template.Template( - "{% set dict = {'foo': 'x&y', 'bar': 42} %}{{ dict | urlencode }}", - hass, - ) - assert tpl.async_render() == "foo=x%26y&bar=42" - tpl = template.Template( - "{% set string = 'the quick brown fox = true' %}{{ string | urlencode }}", - hass, - ) - assert tpl.async_render() == "the%20quick%20brown%20fox%20%3D%20true" - - def test_as_timedelta(hass: HomeAssistant) -> None: """Test the as_timedelta function/filter.""" tpl = template.Template("{{ as_timedelta('PT10M') }}", hass) From aa8d78622c1293ef30ba388d32ae62c8fed1b46e Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Tue, 16 Sep 2025 12:15:57 +0200 Subject: [PATCH 02/16] Add La Marzocco specific client headers (#152419) --- homeassistant/components/lamarzocco/__init__.py | 17 ++++++++++++++++- .../components/lamarzocco/config_flow.py | 4 ++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index 15ff16346879c1..96d4f4c61ac4cd 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -4,6 +4,7 @@ import logging import uuid +from aiohttp import ClientSession from packaging import version from pylamarzocco import ( LaMarzoccoBluetoothClient, @@ -21,6 +22,7 @@ CONF_TOKEN, CONF_USERNAME, Platform, + __version__, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady @@ -63,7 +65,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], installation_key=InstallationKey.from_json(entry.data[CONF_INSTALLATION_KEY]), - client=async_create_clientsession(hass), + client=create_client_session(hass), ) try: @@ -185,6 +187,7 @@ async def async_migrate_entry( username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], installation_key=installation_key, + client=create_client_session(hass), ) try: await cloud_client.async_register_client() @@ -203,3 +206,15 @@ async def async_migrate_entry( _LOGGER.debug("Migrated La Marzocco config entry to version 4") return True + + +def create_client_session(hass: HomeAssistant) -> ClientSession: + """Create a ClientSession with La Marzocco specific headers.""" + + return async_create_clientsession( + hass, + headers={ + "X-Client": "HOME_ASSISTANT", + "X-Client-Build": __version__, + }, + ) diff --git a/homeassistant/components/lamarzocco/config_flow.py b/homeassistant/components/lamarzocco/config_flow.py index 7f08ac9a48edf4..ab99fbbc63fe09 100644 --- a/homeassistant/components/lamarzocco/config_flow.py +++ b/homeassistant/components/lamarzocco/config_flow.py @@ -35,7 +35,6 @@ ) from homeassistant.core import callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.aiohttp_client import async_create_clientsession from homeassistant.helpers.selector import ( SelectOptionDict, SelectSelector, @@ -47,6 +46,7 @@ ) from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo +from . import create_client_session from .const import CONF_INSTALLATION_KEY, CONF_USE_BLUETOOTH, DOMAIN from .coordinator import LaMarzoccoConfigEntry @@ -86,7 +86,7 @@ async def async_step_user( **user_input, } - self._client = async_create_clientsession(self.hass) + self._client = create_client_session(self.hass) self._installation_key = generate_installation_key( str(uuid.uuid4()).lower() ) From f9b1c52d653275f24736f9c5247ff1c3c0551364 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 16 Sep 2025 12:42:37 +0200 Subject: [PATCH 03/16] Fix warning in prowl tests (#152424) --- tests/components/prowl/test_notify.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/prowl/test_notify.py b/tests/components/prowl/test_notify.py index 638c9a217e9a99..8047ed177e669a 100644 --- a/tests/components/prowl/test_notify.py +++ b/tests/components/prowl/test_notify.py @@ -69,7 +69,7 @@ async def test_send_notification_service( ( SyntaxError(), SyntaxError, - "", + None, ), ], ) @@ -79,7 +79,7 @@ async def test_fail_send_notification( mock_prowlpy: Mock, prowlpy_side_effect: Exception, raised_exception: type[Exception], - exception_message: str, + exception_message: str | None, ) -> None: """Sending a message via Prowl with a failure.""" mock_prowlpy.send.side_effect = prowlpy_side_effect From 44a95242dc50ed305f025e857e4ca0b0d28791f4 Mon Sep 17 00:00:00 2001 From: Chris Oldfield Date: Tue, 16 Sep 2025 21:06:14 +1000 Subject: [PATCH 04/16] Add downloading and seeding counts to Deluge (#150623) --- homeassistant/components/deluge/const.py | 2 + .../components/deluge/coordinator.py | 45 +++++++++++++++---- homeassistant/components/deluge/icons.json | 12 +++++ homeassistant/components/deluge/sensor.py | 12 +++++ homeassistant/components/deluge/strings.json | 8 ++++ tests/components/deluge/__init__.py | 6 +++ tests/components/deluge/test_coordinator.py | 15 +++++++ 7 files changed, 92 insertions(+), 8 deletions(-) create mode 100644 homeassistant/components/deluge/icons.json create mode 100644 tests/components/deluge/test_coordinator.py diff --git a/homeassistant/components/deluge/const.py b/homeassistant/components/deluge/const.py index a76817519da9ef..909fa2e98c3645 100644 --- a/homeassistant/components/deluge/const.py +++ b/homeassistant/components/deluge/const.py @@ -43,3 +43,5 @@ class DelugeSensorType(enum.StrEnum): UPLOAD_SPEED_SENSOR = "upload_speed" PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR = "protocol_traffic_upload_speed" PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR = "protocol_traffic_download_speed" + DOWNLOADING_COUNT_SENSOR = "downloading_count" + SEEDING_COUNT_SENSOR = "seeding_count" diff --git a/homeassistant/components/deluge/coordinator.py b/homeassistant/components/deluge/coordinator.py index c5836243b9ddb8..f86f92767ee95b 100644 --- a/homeassistant/components/deluge/coordinator.py +++ b/homeassistant/components/deluge/coordinator.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections import Counter from datetime import timedelta from ssl import SSLError from typing import Any @@ -14,11 +15,22 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import LOGGER, DelugeGetSessionStatusKeys +from .const import LOGGER, DelugeGetSessionStatusKeys, DelugeSensorType type DelugeConfigEntry = ConfigEntry[DelugeDataUpdateCoordinator] +def count_states(data: dict[str, Any]) -> dict[str, int]: + """Count the states of the provided torrents.""" + + counts = Counter(torrent[b"state"].decode() for torrent in data.values()) + + return { + DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value: counts.get("Downloading", 0), + DelugeSensorType.SEEDING_COUNT_SENSOR.value: counts.get("Seeding", 0), + } + + class DelugeDataUpdateCoordinator( DataUpdateCoordinator[dict[Platform, dict[str, Any]]] ): @@ -39,19 +51,22 @@ def __init__( ) self.api = api - async def _async_update_data(self) -> dict[Platform, dict[str, Any]]: - """Get the latest data from Deluge and updates the state.""" + def _get_deluge_data(self): + """Get the latest data from Deluge.""" + data = {} try: - _data = await self.hass.async_add_executor_job( - self.api.call, + data["session_status"] = self.api.call( "core.get_session_status", [iter_member.value for iter_member in list(DelugeGetSessionStatusKeys)], ) - data[Platform.SENSOR] = {k.decode(): v for k, v in _data.items()} - data[Platform.SWITCH] = await self.hass.async_add_executor_job( - self.api.call, "core.get_torrents_status", {}, ["paused"] + data["torrents_status_state"] = self.api.call( + "core.get_torrents_status", {}, ["state"] ) + data["torrents_status_paused"] = self.api.call( + "core.get_torrents_status", {}, ["paused"] + ) + except ( ConnectionRefusedError, TimeoutError, @@ -66,4 +81,18 @@ async def _async_update_data(self) -> dict[Platform, dict[str, Any]]: ) from ex LOGGER.error("Unknown error connecting to Deluge: %s", ex) raise + + return data + + async def _async_update_data(self) -> dict[Platform, dict[str, Any]]: + """Get the latest data from Deluge and updates the state.""" + + deluge_data = await self.hass.async_add_executor_job(self._get_deluge_data) + + data = {} + data[Platform.SENSOR] = { + k.decode(): v for k, v in deluge_data["session_status"].items() + } + data[Platform.SENSOR].update(count_states(deluge_data["torrents_status_state"])) + data[Platform.SWITCH] = deluge_data["torrents_status_paused"] return data diff --git a/homeassistant/components/deluge/icons.json b/homeassistant/components/deluge/icons.json new file mode 100644 index 00000000000000..67805322cdb89f --- /dev/null +++ b/homeassistant/components/deluge/icons.json @@ -0,0 +1,12 @@ +{ + "entity": { + "sensor": { + "downloading_count": { + "default": "mdi:download" + }, + "seeding_count": { + "default": "mdi:upload" + } + } + } +} diff --git a/homeassistant/components/deluge/sensor.py b/homeassistant/components/deluge/sensor.py index d6809967703f66..eb6ac9b27b912a 100644 --- a/homeassistant/components/deluge/sensor.py +++ b/homeassistant/components/deluge/sensor.py @@ -110,6 +110,18 @@ class DelugeSensorEntityDescription(SensorEntityDescription): data, DelugeSensorType.PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR.value ), ), + DelugeSensorEntityDescription( + key=DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value, + translation_key=DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value, + state_class=SensorStateClass.TOTAL, + value=lambda data: data[DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value], + ), + DelugeSensorEntityDescription( + key=DelugeSensorType.SEEDING_COUNT_SENSOR.value, + translation_key=DelugeSensorType.SEEDING_COUNT_SENSOR.value, + state_class=SensorStateClass.TOTAL, + value=lambda data: data[DelugeSensorType.SEEDING_COUNT_SENSOR.value], + ), ) diff --git a/homeassistant/components/deluge/strings.json b/homeassistant/components/deluge/strings.json index ddea78b315f49d..be412b7108172b 100644 --- a/homeassistant/components/deluge/strings.json +++ b/homeassistant/components/deluge/strings.json @@ -36,6 +36,10 @@ "idle": "[%key:common::state::idle%]" } }, + "downloading_count": { + "name": "Downloading count", + "unit_of_measurement": "torrents" + }, "download_speed": { "name": "Download speed" }, @@ -45,6 +49,10 @@ "protocol_traffic_upload_speed": { "name": "Protocol traffic upload speed" }, + "seeding_count": { + "name": "Seeding count", + "unit_of_measurement": "[%key:component::deluge::entity::sensor::downloading_count::unit_of_measurement%]" + }, "upload_speed": { "name": "Upload speed" } diff --git a/tests/components/deluge/__init__.py b/tests/components/deluge/__init__.py index c9027f0c11ff58..5d5e6bf3e022c9 100644 --- a/tests/components/deluge/__init__.py +++ b/tests/components/deluge/__init__.py @@ -21,3 +21,9 @@ "dht_upload_rate": 7818.0, "dht_download_rate": 2658.0, } + +GET_TORRENT_STATES_RESPONSE = { + "6dcd3f46d09547b62bf07ba9b2943c95d53ddae3": {b"state": b"Seeding"}, + "1c56ea49918b9baed94cf4bc0ee9f324efc8841a": {b"state": b"Downloading"}, + "fbf4dab701189a344fa5ab06d7b87c11a74e3da0": {b"state": b"Seeding"}, +} diff --git a/tests/components/deluge/test_coordinator.py b/tests/components/deluge/test_coordinator.py new file mode 100644 index 00000000000000..a2ca30d7c94ccc --- /dev/null +++ b/tests/components/deluge/test_coordinator.py @@ -0,0 +1,15 @@ +"""Test Deluge coordinator.py methods.""" + +from homeassistant.components.deluge.const import DelugeSensorType +from homeassistant.components.deluge.coordinator import count_states + +from . import GET_TORRENT_STATES_RESPONSE + + +def test_get_count() -> None: + """Tests count_states().""" + + states = count_states(GET_TORRENT_STATES_RESPONSE) + + assert states[DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value] == 1 + assert states[DelugeSensorType.SEEDING_COUNT_SENSOR.value] == 2 From 0254285285da3ff2c37ef6733267bf1b68fa19af Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 16 Sep 2025 13:30:36 +0200 Subject: [PATCH 05/16] Fix warning in template extensions tests (#152425) --- tests/helpers/template/extensions/test_regex.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/helpers/template/extensions/test_regex.py b/tests/helpers/template/extensions/test_regex.py index 7e15be547db797..290b55bad1fed1 100644 --- a/tests/helpers/template/extensions/test_regex.py +++ b/tests/helpers/template/extensions/test_regex.py @@ -193,7 +193,7 @@ def test_regex_with_non_string_input(hass: HomeAssistant) -> None: """Test regex functions with non-string input (automatic conversion).""" # Test with integer tpl = template.Template( - """ + r""" {{ 12345 | regex_match('\\d+') }} """, hass, @@ -202,7 +202,7 @@ def test_regex_with_non_string_input(hass: HomeAssistant) -> None: # Test with list (string conversion) tpl = template.Template( - """ + r""" {{ [1, 2, 3] | regex_search('\\d') }} """, hass, From 892f3f267be95927f56682eca8ee36fdd1921798 Mon Sep 17 00:00:00 2001 From: onsmam Date: Tue, 16 Sep 2025 13:31:43 +0200 Subject: [PATCH 06/16] Added rain_start and lightningstrike event to publish on the event bus (#146652) Co-authored-by: Joost Lekkerkerker --- .../components/weatherflow/__init__.py | 1 + homeassistant/components/weatherflow/event.py | 104 ++++++++++++++++++ .../components/weatherflow/icons.json | 8 ++ .../components/weatherflow/strings.json | 8 ++ 4 files changed, 121 insertions(+) create mode 100644 homeassistant/components/weatherflow/event.py diff --git a/homeassistant/components/weatherflow/__init__.py b/homeassistant/components/weatherflow/__init__.py index 819ad90b3542f6..3e30d15aebee8d 100644 --- a/homeassistant/components/weatherflow/__init__.py +++ b/homeassistant/components/weatherflow/__init__.py @@ -17,6 +17,7 @@ from .const import DOMAIN, LOGGER, format_dispatch_call PLATFORMS = [ + Platform.EVENT, Platform.SENSOR, ] diff --git a/homeassistant/components/weatherflow/event.py b/homeassistant/components/weatherflow/event.py new file mode 100644 index 00000000000000..05f7ecc28651aa --- /dev/null +++ b/homeassistant/components/weatherflow/event.py @@ -0,0 +1,104 @@ +"""Event entities for the WeatherFlow integration.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from pyweatherflowudp.device import EVENT_RAIN_START, EVENT_STRIKE, WeatherFlowDevice + +from homeassistant.components.event import EventEntity, EventEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import DOMAIN, LOGGER, format_dispatch_call + + +@dataclass(frozen=True, kw_only=True) +class WeatherFlowEventEntityDescription(EventEntityDescription): + """Describes a WeatherFlow event entity.""" + + wf_event: str + event_types: list[str] + + +EVENT_DESCRIPTIONS: list[WeatherFlowEventEntityDescription] = [ + WeatherFlowEventEntityDescription( + key="precip_start_event", + translation_key="precip_start_event", + event_types=["precipitation_start"], + wf_event=EVENT_RAIN_START, + ), + WeatherFlowEventEntityDescription( + key="lightning_strike_event", + translation_key="lightning_strike_event", + event_types=["lightning_strike"], + wf_event=EVENT_STRIKE, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up WeatherFlow event entities using config entry.""" + + @callback + def async_add_events(device: WeatherFlowDevice) -> None: + LOGGER.debug("Adding events for %s", device) + async_add_entities( + WeatherFlowEventEntity(device, description) + for description in EVENT_DESCRIPTIONS + ) + + config_entry.async_on_unload( + async_dispatcher_connect( + hass, + format_dispatch_call(config_entry), + async_add_events, + ) + ) + + +class WeatherFlowEventEntity(EventEntity): + """Generic WeatherFlow event entity.""" + + _attr_has_entity_name = True + entity_description: WeatherFlowEventEntityDescription + + def __init__( + self, + device: WeatherFlowDevice, + description: WeatherFlowEventEntityDescription, + ) -> None: + """Initialize the WeatherFlow event entity.""" + + self.device = device + self.entity_description = description + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device.serial_number)}, + manufacturer="WeatherFlow", + model=device.model, + name=device.serial_number, + sw_version=device.firmware_revision, + ) + self._attr_unique_id = f"{device.serial_number}_{description.key}" + + async def async_added_to_hass(self) -> None: + """Subscribe to the configured WeatherFlow device event.""" + self.async_on_remove( + self.device.on(self.entity_description.wf_event, self._handle_event) + ) + + @callback + def _handle_event(self, event) -> None: + self._trigger_event( + self.entity_description.event_types[0], + {}, + ) + self.async_write_ha_state() diff --git a/homeassistant/components/weatherflow/icons.json b/homeassistant/components/weatherflow/icons.json index e0d2459b072aac..8e45060681ebcf 100644 --- a/homeassistant/components/weatherflow/icons.json +++ b/homeassistant/components/weatherflow/icons.json @@ -38,6 +38,14 @@ "337.5": "mdi:arrow-up" } } + }, + "event": { + "lightning_strike_event": { + "default": "mdi:weather-lightning" + }, + "precip_start_event": { + "default": "mdi:weather-rainy" + } } } } diff --git a/homeassistant/components/weatherflow/strings.json b/homeassistant/components/weatherflow/strings.json index cf23f02d78166c..a4e3aac8ddd8a3 100644 --- a/homeassistant/components/weatherflow/strings.json +++ b/homeassistant/components/weatherflow/strings.json @@ -79,6 +79,14 @@ "wind_lull": { "name": "Wind lull" } + }, + "event": { + "lightning_strike_event": { + "name": "Lightning strike" + }, + "precip_start_event": { + "name": "Precipitation start" + } } } } From de7e2303a79ce14468f7164d63f5955e4404228c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20L=C3=B3pez=20Diez?= Date: Tue, 16 Sep 2025 13:32:10 +0200 Subject: [PATCH 07/16] Add support for multi-tap action in Lutron Caseta integration (#150551) --- homeassistant/components/lutron_caseta/__init__.py | 5 ++++- homeassistant/components/lutron_caseta/const.py | 1 + .../components/lutron_caseta/device_trigger.py | 3 ++- .../components/lutron_caseta/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../components/lutron_caseta/test_device_trigger.py | 13 ++++++++++++- 7 files changed, 22 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/lutron_caseta/__init__.py b/homeassistant/components/lutron_caseta/__init__.py index b489fe9dba782b..bde3e7d4ec472c 100644 --- a/homeassistant/components/lutron_caseta/__init__.py +++ b/homeassistant/components/lutron_caseta/__init__.py @@ -8,7 +8,7 @@ import ssl from typing import Any, cast -from pylutron_caseta import BUTTON_STATUS_PRESSED +from pylutron_caseta import BUTTON_STATUS_MULTITAP, BUTTON_STATUS_PRESSED from pylutron_caseta.smartbridge import Smartbridge import voluptuous as vol @@ -25,6 +25,7 @@ from homeassistant.helpers.typing import ConfigType from .const import ( + ACTION_MULTITAP, ACTION_PRESS, ACTION_RELEASE, ATTR_ACTION, @@ -448,6 +449,8 @@ def _async_button_event(button_id, event_type): if event_type == BUTTON_STATUS_PRESSED: action = ACTION_PRESS + elif event_type == BUTTON_STATUS_MULTITAP: + action = ACTION_MULTITAP else: action = ACTION_RELEASE diff --git a/homeassistant/components/lutron_caseta/const.py b/homeassistant/components/lutron_caseta/const.py index 26a83de6f4b720..07f60ae0b96039 100644 --- a/homeassistant/components/lutron_caseta/const.py +++ b/homeassistant/components/lutron_caseta/const.py @@ -29,6 +29,7 @@ ATTR_AREA_NAME = "area_name" ATTR_ACTION = "action" +ACTION_MULTITAP = "multi_tap" ACTION_PRESS = "press" ACTION_RELEASE = "release" diff --git a/homeassistant/components/lutron_caseta/device_trigger.py b/homeassistant/components/lutron_caseta/device_trigger.py index 31c9a0e171d620..b3bfaaa7c62407 100644 --- a/homeassistant/components/lutron_caseta/device_trigger.py +++ b/homeassistant/components/lutron_caseta/device_trigger.py @@ -21,6 +21,7 @@ from homeassistant.helpers.typing import ConfigType from .const import ( + ACTION_MULTITAP, ACTION_PRESS, ACTION_RELEASE, ATTR_ACTION, @@ -39,7 +40,7 @@ def _reverse_dict(forward_dict: dict) -> dict: return {v: k for k, v in forward_dict.items()} -SUPPORTED_INPUTS_EVENTS_TYPES = [ACTION_PRESS, ACTION_RELEASE] +SUPPORTED_INPUTS_EVENTS_TYPES = [ACTION_PRESS, ACTION_MULTITAP, ACTION_RELEASE] LUTRON_BUTTON_TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend( { diff --git a/homeassistant/components/lutron_caseta/manifest.json b/homeassistant/components/lutron_caseta/manifest.json index 96b00a1f392a7c..0f0c199e4482f5 100644 --- a/homeassistant/components/lutron_caseta/manifest.json +++ b/homeassistant/components/lutron_caseta/manifest.json @@ -9,7 +9,7 @@ }, "iot_class": "local_push", "loggers": ["pylutron_caseta"], - "requirements": ["pylutron-caseta==0.24.0"], + "requirements": ["pylutron-caseta==0.25.0"], "zeroconf": [ { "type": "_lutron._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 6a3553ff6c2dd9..8f5f652906d50a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2135,7 +2135,7 @@ pylitejet==0.6.3 pylitterbot==2024.2.4 # homeassistant.components.lutron_caseta -pylutron-caseta==0.24.0 +pylutron-caseta==0.25.0 # homeassistant.components.lutron pylutron==0.2.18 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 70deab03a5fb8e..759cf0f794b1a6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1783,7 +1783,7 @@ pylitejet==0.6.3 pylitterbot==2024.2.4 # homeassistant.components.lutron_caseta -pylutron-caseta==0.24.0 +pylutron-caseta==0.25.0 # homeassistant.components.lutron pylutron==0.2.18 diff --git a/tests/components/lutron_caseta/test_device_trigger.py b/tests/components/lutron_caseta/test_device_trigger.py index 001bf86ad54b80..061cfca096ae48 100644 --- a/tests/components/lutron_caseta/test_device_trigger.py +++ b/tests/components/lutron_caseta/test_device_trigger.py @@ -148,6 +148,17 @@ async def test_get_triggers(hass: HomeAssistant) -> None: } for subtype in ("on", "stop", "off", "raise", "lower") ] + expected_triggers += [ + { + CONF_DEVICE_ID: device_id, + CONF_DOMAIN: DOMAIN, + CONF_PLATFORM: "device", + CONF_SUBTYPE: subtype, + CONF_TYPE: "multi_tap", + "metadata": {}, + } + for subtype in ("on", "stop", "off", "raise", "lower") + ] triggers = await async_get_device_automations( hass, DeviceAutomationType.TRIGGER, device_id @@ -439,7 +450,7 @@ async def test_validate_trigger_invalid_triggers( }, ) - assert "value must be one of ['press', 'release']" in caplog.text + assert "value must be one of ['multi_tap', 'press', 'release']" in caplog.text async def test_if_fires_on_button_event_late_setup( From 3649e949b11671dd5683cf68997768526cb33843 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafael=20L=C3=B3pez=20Diez?= Date: Tue, 16 Sep 2025 14:06:15 +0200 Subject: [PATCH 08/16] Add support for sending chat actions in Telegram bot integration (#151378) --- .../components/telegram_bot/__init__.py | 39 +++++++++++++++++++ homeassistant/components/telegram_bot/bot.py | 22 +++++++++++ .../components/telegram_bot/const.py | 14 +++++++ .../components/telegram_bot/icons.json | 3 ++ .../components/telegram_bot/services.yaml | 32 +++++++++++++++ .../components/telegram_bot/strings.json | 37 ++++++++++++++++++ .../telegram_bot/test_telegram_bot.py | 34 ++++++++++++++++ 7 files changed, 181 insertions(+) diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index 50c721e5f37932..91bbc088744c74 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -43,6 +43,7 @@ ATTR_AUTHENTICATION, ATTR_CALLBACK_QUERY_ID, ATTR_CAPTION, + ATTR_CHAT_ACTION, ATTR_CHAT_ID, ATTR_DISABLE_NOTIF, ATTR_DISABLE_WEB_PREV, @@ -71,6 +72,17 @@ ATTR_URL, ATTR_USERNAME, ATTR_VERIFY_SSL, + CHAT_ACTION_CHOOSE_STICKER, + CHAT_ACTION_FIND_LOCATION, + CHAT_ACTION_RECORD_VIDEO, + CHAT_ACTION_RECORD_VIDEO_NOTE, + CHAT_ACTION_RECORD_VOICE, + CHAT_ACTION_TYPING, + CHAT_ACTION_UPLOAD_DOCUMENT, + CHAT_ACTION_UPLOAD_PHOTO, + CHAT_ACTION_UPLOAD_VIDEO, + CHAT_ACTION_UPLOAD_VIDEO_NOTE, + CHAT_ACTION_UPLOAD_VOICE, CONF_ALLOWED_CHAT_IDS, CONF_BOT_COUNT, CONF_CONFIG_ENTRY_ID, @@ -89,6 +101,7 @@ SERVICE_EDIT_REPLYMARKUP, SERVICE_LEAVE_CHAT, SERVICE_SEND_ANIMATION, + SERVICE_SEND_CHAT_ACTION, SERVICE_SEND_DOCUMENT, SERVICE_SEND_LOCATION, SERVICE_SEND_MESSAGE, @@ -153,6 +166,26 @@ {vol.Required(ATTR_MESSAGE): cv.string, vol.Optional(ATTR_TITLE): cv.string} ) +SERVICE_SCHEMA_SEND_CHAT_ACTION = BASE_SERVICE_SCHEMA.extend( + { + vol.Required(ATTR_CHAT_ACTION): vol.In( + ( + CHAT_ACTION_TYPING, + CHAT_ACTION_UPLOAD_PHOTO, + CHAT_ACTION_RECORD_VIDEO, + CHAT_ACTION_UPLOAD_VIDEO, + CHAT_ACTION_RECORD_VOICE, + CHAT_ACTION_UPLOAD_VOICE, + CHAT_ACTION_UPLOAD_DOCUMENT, + CHAT_ACTION_CHOOSE_STICKER, + CHAT_ACTION_FIND_LOCATION, + CHAT_ACTION_RECORD_VIDEO_NOTE, + CHAT_ACTION_UPLOAD_VIDEO_NOTE, + ) + ), + } +) + SERVICE_SCHEMA_SEND_FILE = BASE_SERVICE_SCHEMA.extend( { vol.Optional(ATTR_URL): cv.string, @@ -268,6 +301,7 @@ SERVICE_MAP = { SERVICE_SEND_MESSAGE: SERVICE_SCHEMA_SEND_MESSAGE, + SERVICE_SEND_CHAT_ACTION: SERVICE_SCHEMA_SEND_CHAT_ACTION, SERVICE_SEND_PHOTO: SERVICE_SCHEMA_SEND_FILE, SERVICE_SEND_STICKER: SERVICE_SCHEMA_SEND_STICKER, SERVICE_SEND_ANIMATION: SERVICE_SCHEMA_SEND_FILE, @@ -367,6 +401,10 @@ async def async_send_telegram_message(service: ServiceCall) -> ServiceResponse: messages = await notify_service.send_message( context=service.context, **kwargs ) + elif msgtype == SERVICE_SEND_CHAT_ACTION: + messages = await notify_service.send_chat_action( + context=service.context, **kwargs + ) elif msgtype in [ SERVICE_SEND_PHOTO, SERVICE_SEND_ANIMATION, @@ -433,6 +471,7 @@ async def async_send_telegram_message(service: ServiceCall) -> ServiceResponse: if service_notif in [ SERVICE_SEND_MESSAGE, + SERVICE_SEND_CHAT_ACTION, SERVICE_SEND_PHOTO, SERVICE_SEND_ANIMATION, SERVICE_SEND_VIDEO, diff --git a/homeassistant/components/telegram_bot/bot.py b/homeassistant/components/telegram_bot/bot.py index 3145badbed74f5..42bd493489b73e 100644 --- a/homeassistant/components/telegram_bot/bot.py +++ b/homeassistant/components/telegram_bot/bot.py @@ -617,6 +617,28 @@ async def answer_callback_query( context=context, ) + async def send_chat_action( + self, + chat_action: str = "", + target: Any = None, + context: Context | None = None, + **kwargs: Any, + ) -> dict[int, int]: + """Send a chat action to pre-allowed chat IDs.""" + result = {} + for chat_id in self.get_target_chat_ids(target): + _LOGGER.debug("Send action %s in chat ID %s", chat_action, chat_id) + is_successful = await self._send_msg( + self.bot.send_chat_action, + "Error sending action", + None, + chat_id=chat_id, + action=chat_action, + context=context, + ) + result[chat_id] = is_successful + return result + async def send_file( self, file_type: str, diff --git a/homeassistant/components/telegram_bot/const.py b/homeassistant/components/telegram_bot/const.py index 0f1d5193e2cd64..34b8a476c788cf 100644 --- a/homeassistant/components/telegram_bot/const.py +++ b/homeassistant/components/telegram_bot/const.py @@ -32,6 +32,7 @@ DEFAULT_TRUSTED_NETWORKS = [ip_network("149.154.160.0/20"), ip_network("91.108.4.0/22")] +SERVICE_SEND_CHAT_ACTION = "send_chat_action" SERVICE_SEND_MESSAGE = "send_message" SERVICE_SEND_PHOTO = "send_photo" SERVICE_SEND_STICKER = "send_sticker" @@ -59,10 +60,23 @@ PARSER_MD2 = "markdownv2" PARSER_PLAIN_TEXT = "plain_text" +ATTR_CHAT_ACTION = "chat_action" ATTR_DATA = "data" ATTR_MESSAGE = "message" ATTR_TITLE = "title" +CHAT_ACTION_TYPING = "typing" +CHAT_ACTION_UPLOAD_PHOTO = "upload_photo" +CHAT_ACTION_RECORD_VIDEO = "record_video" +CHAT_ACTION_UPLOAD_VIDEO = "upload_video" +CHAT_ACTION_RECORD_VOICE = "record_voice" +CHAT_ACTION_UPLOAD_VOICE = "upload_voice" +CHAT_ACTION_UPLOAD_DOCUMENT = "upload_document" +CHAT_ACTION_CHOOSE_STICKER = "choose_sticker" +CHAT_ACTION_FIND_LOCATION = "find_location" +CHAT_ACTION_RECORD_VIDEO_NOTE = "record_video_note" +CHAT_ACTION_UPLOAD_VIDEO_NOTE = "upload_video_note" + ATTR_ARGS = "args" ATTR_AUTHENTICATION = "authentication" ATTR_CALLBACK_QUERY = "callback_query" diff --git a/homeassistant/components/telegram_bot/icons.json b/homeassistant/components/telegram_bot/icons.json index 3a53e2b4118a02..3208fdfbc3e750 100644 --- a/homeassistant/components/telegram_bot/icons.json +++ b/homeassistant/components/telegram_bot/icons.json @@ -3,6 +3,9 @@ "send_message": { "service": "mdi:send" }, + "send_chat_action": { + "service": "mdi:send" + }, "send_photo": { "service": "mdi:camera" }, diff --git a/homeassistant/components/telegram_bot/services.yaml b/homeassistant/components/telegram_bot/services.yaml index 0ebe7988642f79..e0e03921a932ec 100644 --- a/homeassistant/components/telegram_bot/services.yaml +++ b/homeassistant/components/telegram_bot/services.yaml @@ -66,6 +66,38 @@ send_message: number: mode: box +send_chat_action: + fields: + config_entry_id: + selector: + config_entry: + integration: telegram_bot + chat_action: + selector: + select: + options: + - "typing" + - "upload_photo" + - "record_video" + - "upload_video" + - "record_voice" + - "upload_voice" + - "upload_document" + - "choose_sticker" + - "find_location" + - "record_video_note" + - "upload_video_note" + translation_key: "chat_action" + target: + example: "[12345, 67890] or 12345" + selector: + text: + multiple: true + message_thread_id: + selector: + number: + mode: box + send_photo: fields: config_entry_id: diff --git a/homeassistant/components/telegram_bot/strings.json b/homeassistant/components/telegram_bot/strings.json index 29bf51ecd0cf5a..759b22a33683ba 100644 --- a/homeassistant/components/telegram_bot/strings.json +++ b/homeassistant/components/telegram_bot/strings.json @@ -138,6 +138,21 @@ "digest": "Digest", "bearer_token": "Bearer token" } + }, + "chat_action": { + "options": { + "typing": "Typing", + "upload_photo": "Uploading photo", + "record_video": "Recording video", + "upload_video": "Uploading video", + "record_voice": "Recording voice", + "upload_voice": "Uploading voice", + "upload_document": "Uploading document", + "choose_sticker": "Choosing sticker", + "find_location": "Finding location", + "record_video_note": "Recording video note", + "upload_video_note": "Uploading video note" + } } }, "services": { @@ -199,6 +214,28 @@ } } }, + "send_chat_action": { + "name": "Send chat action", + "description": "Sends a chat action.", + "fields": { + "config_entry_id": { + "name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]", + "description": "The config entry representing the Telegram bot to send the chat action." + }, + "chat_action": { + "name": "Chat action", + "description": "Chat action to be sent." + }, + "target": { + "name": "Target", + "description": "An array of pre-authorized chat IDs to send the chat action to. If not present, first allowed chat ID is the default." + }, + "message_thread_id": { + "name": "[%key:component::telegram_bot::services::send_message::fields::message_thread_id::name%]", + "description": "[%key:component::telegram_bot::services::send_message::fields::message_thread_id::description%]" + } + } + }, "send_photo": { "name": "Send photo", "description": "Sends a photo.", diff --git a/tests/components/telegram_bot/test_telegram_bot.py b/tests/components/telegram_bot/test_telegram_bot.py index eec2bd5ecf7e1b..cda2583e74bb84 100644 --- a/tests/components/telegram_bot/test_telegram_bot.py +++ b/tests/components/telegram_bot/test_telegram_bot.py @@ -26,6 +26,7 @@ ATTR_AUTHENTICATION, ATTR_CALLBACK_QUERY_ID, ATTR_CAPTION, + ATTR_CHAT_ACTION, ATTR_CHAT_ID, ATTR_DISABLE_NOTIF, ATTR_DISABLE_WEB_PREV, @@ -48,6 +49,7 @@ ATTR_URL, ATTR_USERNAME, ATTR_VERIFY_SSL, + CHAT_ACTION_TYPING, CONF_CONFIG_ENTRY_ID, DOMAIN, PARSER_PLAIN_TEXT, @@ -60,6 +62,7 @@ SERVICE_EDIT_REPLYMARKUP, SERVICE_LEAVE_CHAT, SERVICE_SEND_ANIMATION, + SERVICE_SEND_CHAT_ACTION, SERVICE_SEND_DOCUMENT, SERVICE_SEND_LOCATION, SERVICE_SEND_MESSAGE, @@ -300,6 +303,37 @@ def _read_file_as_bytesio_mock(file_path): return _file +async def test_send_chat_action( + hass: HomeAssistant, + webhook_platform, + mock_broadcast_config_entry: MockConfigEntry, +) -> None: + """Test the send_chat_action service.""" + mock_broadcast_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_broadcast_config_entry.entry_id) + await hass.async_block_till_done() + + with patch( + "homeassistant.components.telegram_bot.bot.Bot.send_chat_action", + AsyncMock(return_value=True), + ) as mock: + await hass.services.async_call( + DOMAIN, + SERVICE_SEND_CHAT_ACTION, + { + CONF_CONFIG_ENTRY_ID: mock_broadcast_config_entry.entry_id, + ATTR_TARGET: [123456], + ATTR_CHAT_ACTION: CHAT_ACTION_TYPING, + }, + blocking=True, + return_response=True, + ) + + await hass.async_block_till_done() + mock.assert_called_once() + mock.assert_called_with(chat_id=123456, action=CHAT_ACTION_TYPING) + + @pytest.mark.parametrize( "service", [ From b2c53f2d78369049ac972e05e2905ed2f7005a00 Mon Sep 17 00:00:00 2001 From: marc7s <34547876+marc7s@users.noreply.github.com> Date: Tue, 16 Sep 2025 14:13:54 +0200 Subject: [PATCH 09/16] Add geocaching cache sensors (#145453) --- homeassistant/components/geocaching/entity.py | 39 +++++++ .../components/geocaching/icons.json | 18 +++ homeassistant/components/geocaching/sensor.py | 109 +++++++++++++++--- .../components/geocaching/strings.json | 35 +++++- 4 files changed, 182 insertions(+), 19 deletions(-) create mode 100644 homeassistant/components/geocaching/entity.py diff --git a/homeassistant/components/geocaching/entity.py b/homeassistant/components/geocaching/entity.py new file mode 100644 index 00000000000000..6912b65ec0469e --- /dev/null +++ b/homeassistant/components/geocaching/entity.py @@ -0,0 +1,39 @@ +"""Sensor entities for Geocaching.""" + +from typing import cast + +from geocachingapi.models import GeocachingCache + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import GeocachingDataUpdateCoordinator + + +# Base class for all platforms +class GeocachingBaseEntity(CoordinatorEntity[GeocachingDataUpdateCoordinator]): + """Base class for Geocaching sensors.""" + + _attr_has_entity_name = True + + +# Base class for cache entities +class GeocachingCacheEntity(GeocachingBaseEntity): + """Base class for Geocaching cache entities.""" + + def __init__( + self, coordinator: GeocachingDataUpdateCoordinator, cache: GeocachingCache + ) -> None: + """Initialize the Geocaching cache entity.""" + super().__init__(coordinator) + self.cache = cache + + # A device can have multiple entities, and for a cache which requires multiple entities we want to group them together. + # Therefore, we create a device for each cache, which holds all related entities. + self._attr_device_info = DeviceInfo( + name=f"Geocache {cache.name}", + identifiers={(DOMAIN, cast(str, cache.reference_code))}, + entry_type=DeviceEntryType.SERVICE, + manufacturer=cache.owner.username, + ) diff --git a/homeassistant/components/geocaching/icons.json b/homeassistant/components/geocaching/icons.json index 7dce199672b12b..1431efee62b86d 100644 --- a/homeassistant/components/geocaching/icons.json +++ b/homeassistant/components/geocaching/icons.json @@ -15,6 +15,24 @@ }, "awarded_favorite_points": { "default": "mdi:heart" + }, + "cache_name": { + "default": "mdi:label" + }, + "cache_owner": { + "default": "mdi:account" + }, + "cache_found_date": { + "default": "mdi:calendar-search" + }, + "cache_found": { + "default": "mdi:package-variant-closed-check" + }, + "cache_favorite_points": { + "default": "mdi:star-check" + }, + "cache_hidden_date": { + "default": "mdi:calendar-badge" } } } diff --git a/homeassistant/components/geocaching/sensor.py b/homeassistant/components/geocaching/sensor.py index 5ceef21dfbfdc2..daf64546f47d27 100644 --- a/homeassistant/components/geocaching/sensor.py +++ b/homeassistant/components/geocaching/sensor.py @@ -4,18 +4,25 @@ from collections.abc import Callable from dataclasses import dataclass +import datetime from typing import cast -from geocachingapi.models import GeocachingStatus +from geocachingapi.models import GeocachingCache, GeocachingStatus -from homeassistant.components.sensor import SensorEntity, SensorEntityDescription +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.helpers.typing import StateType from .const import DOMAIN from .coordinator import GeocachingConfigEntry, GeocachingDataUpdateCoordinator +from .entity import GeocachingBaseEntity, GeocachingCacheEntity @dataclass(frozen=True, kw_only=True) @@ -25,43 +32,63 @@ class GeocachingSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[GeocachingStatus], str | int | None] -SENSORS: tuple[GeocachingSensorEntityDescription, ...] = ( +PROFILE_SENSORS: tuple[GeocachingSensorEntityDescription, ...] = ( GeocachingSensorEntityDescription( key="find_count", translation_key="find_count", - native_unit_of_measurement="caches", value_fn=lambda status: status.user.find_count, ), GeocachingSensorEntityDescription( key="hide_count", translation_key="hide_count", - native_unit_of_measurement="caches", entity_registry_visible_default=False, value_fn=lambda status: status.user.hide_count, ), GeocachingSensorEntityDescription( key="favorite_points", translation_key="favorite_points", - native_unit_of_measurement="points", entity_registry_visible_default=False, value_fn=lambda status: status.user.favorite_points, ), GeocachingSensorEntityDescription( key="souvenir_count", translation_key="souvenir_count", - native_unit_of_measurement="souvenirs", value_fn=lambda status: status.user.souvenir_count, ), GeocachingSensorEntityDescription( key="awarded_favorite_points", translation_key="awarded_favorite_points", - native_unit_of_measurement="points", entity_registry_visible_default=False, value_fn=lambda status: status.user.awarded_favorite_points, ), ) +@dataclass(frozen=True, kw_only=True) +class GeocachingCacheSensorDescription(SensorEntityDescription): + """Define Sensor entity description class.""" + + value_fn: Callable[[GeocachingCache], StateType | datetime.date] + + +CACHE_SENSORS: tuple[GeocachingCacheSensorDescription, ...] = ( + GeocachingCacheSensorDescription( + key="found_date", + device_class=SensorDeviceClass.DATE, + value_fn=lambda cache: cache.found_date_time, + ), + GeocachingCacheSensorDescription( + key="favorite_points", + value_fn=lambda cache: cache.favorite_points, + ), + GeocachingCacheSensorDescription( + key="hidden_date", + device_class=SensorDeviceClass.DATE, + value_fn=lambda cache: cache.hidden_date, + ), +) + + async def async_setup_entry( hass: HomeAssistant, entry: GeocachingConfigEntry, @@ -69,14 +96,68 @@ async def async_setup_entry( ) -> None: """Set up a Geocaching sensor entry.""" coordinator = entry.runtime_data - async_add_entities( - GeocachingSensor(coordinator, description) for description in SENSORS + + entities: list[Entity] = [] + + entities.extend( + GeocachingProfileSensor(coordinator, description) + for description in PROFILE_SENSORS + ) + + status = coordinator.data + + # Add entities for tracked caches + entities.extend( + GeoEntityCacheSensorEntity(coordinator, cache, description) + for cache in status.tracked_caches + for description in CACHE_SENSORS ) + async_add_entities(entities) + + +# Base class for a cache entity. +# Sets the device, ID and translation settings to correctly group the entity to the correct cache device and give it the correct name. +class GeoEntityBaseCache(GeocachingCacheEntity, SensorEntity): + """Base class for cache entities.""" + + def __init__( + self, + coordinator: GeocachingDataUpdateCoordinator, + cache: GeocachingCache, + key: str, + ) -> None: + """Initialize the Geocaching sensor.""" + super().__init__(coordinator, cache) + + self._attr_unique_id = f"{cache.reference_code}_{key}" + + # The translation key determines the name of the entity as this is the lookup for the `strings.json` file. + self._attr_translation_key = f"cache_{key}" + + +class GeoEntityCacheSensorEntity(GeoEntityBaseCache, SensorEntity): + """Representation of a cache sensor.""" + + entity_description: GeocachingCacheSensorDescription + + def __init__( + self, + coordinator: GeocachingDataUpdateCoordinator, + cache: GeocachingCache, + description: GeocachingCacheSensorDescription, + ) -> None: + """Initialize the Geocaching sensor.""" + super().__init__(coordinator, cache, description.key) + self.entity_description = description + + @property + def native_value(self) -> StateType | datetime.date: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.cache) + -class GeocachingSensor( - CoordinatorEntity[GeocachingDataUpdateCoordinator], SensorEntity -): +class GeocachingProfileSensor(GeocachingBaseEntity, SensorEntity): """Representation of a Sensor.""" entity_description: GeocachingSensorEntityDescription diff --git a/homeassistant/components/geocaching/strings.json b/homeassistant/components/geocaching/strings.json index ca6e9d5e67f59b..990ebf9f0f81bf 100644 --- a/homeassistant/components/geocaching/strings.json +++ b/homeassistant/components/geocaching/strings.json @@ -33,11 +33,36 @@ }, "entity": { "sensor": { - "find_count": { "name": "Total finds" }, - "hide_count": { "name": "Total hides" }, - "favorite_points": { "name": "Favorite points" }, - "souvenir_count": { "name": "Total souvenirs" }, - "awarded_favorite_points": { "name": "Awarded favorite points" } + "find_count": { + "name": "Total finds", + "unit_of_measurement": "caches" + }, + "hide_count": { + "name": "Total hides", + "unit_of_measurement": "caches" + }, + "favorite_points": { + "name": "Favorite points", + "unit_of_measurement": "points" + }, + "souvenir_count": { + "name": "Total souvenirs", + "unit_of_measurement": "souvenirs" + }, + "awarded_favorite_points": { + "name": "Awarded favorite points", + "unit_of_measurement": "points" + }, + "cache_found_date": { + "name": "Found date" + }, + "cache_favorite_points": { + "name": "Favorite points", + "unit_of_measurement": "points" + }, + "cache_hidden_date": { + "name": "Hidden date" + } } } } From df0cfd69a93eef579a3607a9f63f437b52ba1280 Mon Sep 17 00:00:00 2001 From: Samuel Xiao <40679757+XiaoLing-git@users.noreply.github.com> Date: Tue, 16 Sep 2025 20:14:09 +0800 Subject: [PATCH 10/16] Add Climate Panel support to Switchbot Cloud (#152427) --- homeassistant/components/switchbot_cloud/__init__.py | 6 ++++++ homeassistant/components/switchbot_cloud/binary_sensor.py | 4 ++++ homeassistant/components/switchbot_cloud/sensor.py | 7 +++++-- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/switchbot_cloud/__init__.py b/homeassistant/components/switchbot_cloud/__init__.py index 7eaac3af8f9d5f..1b6ed062563a20 100644 --- a/homeassistant/components/switchbot_cloud/__init__.py +++ b/homeassistant/components/switchbot_cloud/__init__.py @@ -270,6 +270,12 @@ async def make_device_data( ) devices_data.humidifiers.append((device, coordinator)) devices_data.sensors.append((device, coordinator)) + if isinstance(device, Device) and device.device_type == "Climate Panel": + coordinator = await coordinator_for_device( + hass, entry, api, device, coordinators_by_id + ) + devices_data.binary_sensors.append((device, coordinator)) + devices_data.sensors.append((device, coordinator)) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/switchbot_cloud/binary_sensor.py b/homeassistant/components/switchbot_cloud/binary_sensor.py index 936300621f22d8..a9148076ae730d 100644 --- a/homeassistant/components/switchbot_cloud/binary_sensor.py +++ b/homeassistant/components/switchbot_cloud/binary_sensor.py @@ -104,6 +104,10 @@ class SwitchBotCloudBinarySensorEntityDescription(BinarySensorEntityDescription) ), "Hub 3": (MOVE_DETECTED_DESCRIPTION,), "Water Detector": (LEAK_DESCRIPTION,), + "Climate Panel": ( + IS_LIGHT_DESCRIPTION, + MOVE_DETECTED_DESCRIPTION, + ), } diff --git a/homeassistant/components/switchbot_cloud/sensor.py b/homeassistant/components/switchbot_cloud/sensor.py index 5b5274909b35d3..7e1324717051d6 100644 --- a/homeassistant/components/switchbot_cloud/sensor.py +++ b/homeassistant/components/switchbot_cloud/sensor.py @@ -119,7 +119,6 @@ class SwitchbotCloudSensorEntityDescription(SensorEntityDescription): state_class=SensorStateClass.MEASUREMENT, ) - SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = { "Bot": (BATTERY_DESCRIPTION,), "Battery Circulator Fan": (BATTERY_DESCRIPTION,), @@ -189,6 +188,11 @@ class SwitchbotCloudSensorEntityDescription(SensorEntityDescription): "Contact Sensor": (BATTERY_DESCRIPTION,), "Water Detector": (BATTERY_DESCRIPTION,), "Humidifier": (TEMPERATURE_DESCRIPTION,), + "Climate Panel": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + ), } @@ -226,7 +230,6 @@ def _set_attributes(self) -> None: """Set attributes from coordinator data.""" if not self.coordinator.data: return - if isinstance( self.entity_description, SwitchbotCloudSensorEntityDescription, From 031b12752fe38152038fcbb62d21d36f6b393081 Mon Sep 17 00:00:00 2001 From: yufeng Date: Tue, 16 Sep 2025 21:34:21 +0800 Subject: [PATCH 11/16] Add sensors for Tuya energy storage systems (xnyjcn) (#149237) Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/tuya/const.py | 11 + homeassistant/components/tuya/sensor.py | 73 +++ homeassistant/components/tuya/strings.json | 30 + .../tuya/snapshots/test_sensor.ambr | 613 ++++++++++++++++++ 4 files changed, 727 insertions(+) diff --git a/homeassistant/components/tuya/const.py b/homeassistant/components/tuya/const.py index 862e10c6fa14c9..81ef495dabc88b 100644 --- a/homeassistant/components/tuya/const.py +++ b/homeassistant/components/tuya/const.py @@ -124,6 +124,7 @@ class DPCode(StrEnum): BASIC_WDR = "basic_wdr" BATTERY = "battery" # Used by non-standard contact sensor implementations BATTERY_PERCENTAGE = "battery_percentage" # Battery percentage + BATTERY_POWER = "battery_power" BATTERY_STATE = "battery_state" # Battery state BATTERY_VALUE = "battery_value" # Battery value BRIGHT_CONTROLLER = "bright_controller" @@ -184,11 +185,17 @@ class DPCode(StrEnum): COUNTDOWN_LEFT = "countdown_left" COUNTDOWN_SET = "countdown_set" # Countdown setting CRY_DETECTION_SWITCH = "cry_detection_switch" + CUML_E_EXPORT_OFFGRID1 = "cuml_e_export_offgrid1" + CUMULATIVE_ENERGY_CHARGED = "cumulative_energy_charged" + CUMULATIVE_ENERGY_DISCHARGED = "cumulative_energy_discharged" + CUMULATIVE_ENERGY_GENERATED_PV = "cumulative_energy_generated_pv" + CUMULATIVE_ENERGY_OUTPUT_INV = "cumulative_energy_output_inv" CUP_NUMBER = "cup_number" # NUmber of cups CUR_CURRENT = "cur_current" # Actual current CUR_NEUTRAL = "cur_neutral" # Total reverse energy CUR_POWER = "cur_power" # Actual power CUR_VOLTAGE = "cur_voltage" # Actual voltage + CURRENT_SOC = "current_soc" DECIBEL_SENSITIVITY = "decibel_sensitivity" DECIBEL_SWITCH = "decibel_switch" DEHUMIDITY_SET_ENUM = "dehumidify_set_enum" @@ -240,6 +247,7 @@ class DPCode(StrEnum): HUMIDITY_SET = "humidity_set" # Humidity setting HUMIDITY_VALUE = "humidity_value" # Humidity INSTALLATION_HEIGHT = "installation_height" + INVERTER_OUTPUT_POWER = "inverter_output_power" IPC_WORK_MODE = "ipc_work_mode" LED_TYPE_1 = "led_type_1" LED_TYPE_2 = "led_type_2" @@ -305,6 +313,9 @@ class DPCode(StrEnum): PUMP = "pump" PUMP_RESET = "pump_reset" # Water pump reset PUMP_TIME = "pump_time" # Water pump duration + PV_POWER_CHANNEL_1 = "pv_power_channel_1" + PV_POWER_CHANNEL_2 = "pv_power_channel_2" + PV_POWER_TOTAL = "pv_power_total" RAIN_24H = "rain_24h" # Total daily rainfall in mm RAIN_RATE = "rain_rate" # Rain intensity in mm/h RECORD_MODE = "record_mode" diff --git a/homeassistant/components/tuya/sensor.py b/homeassistant/components/tuya/sensor.py index 021830b2073426..0c2c1e8f9247bf 100644 --- a/homeassistant/components/tuya/sensor.py +++ b/homeassistant/components/tuya/sensor.py @@ -1413,6 +1413,79 @@ class TuyaSensorEntityDescription(SensorEntityDescription): # Wireless Switch # https://developer.tuya.com/en/docs/iot/s?id=Kbeoa9fkv6brp "wxkg": BATTERY_SENSORS, # Pressure Sensor + # Micro Storage Inverter + # Energy storage and solar PV inverter system with monitoring capabilities + "xnyjcn": ( + TuyaSensorEntityDescription( + key=DPCode.CURRENT_SOC, + translation_key="battery_soc", + device_class=SensorDeviceClass.BATTERY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + ), + TuyaSensorEntityDescription( + key=DPCode.PV_POWER_TOTAL, + translation_key="total_pv_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.PV_POWER_CHANNEL_1, + translation_key="pv_channel_power", + translation_placeholders={"index": "1"}, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.PV_POWER_CHANNEL_2, + translation_key="pv_channel_power", + translation_placeholders={"index": "2"}, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.BATTERY_POWER, + translation_key="battery_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.INVERTER_OUTPUT_POWER, + translation_key="inverter_output_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.CUMULATIVE_ENERGY_GENERATED_PV, + translation_key="lifetime_pv_energy", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + TuyaSensorEntityDescription( + key=DPCode.CUMULATIVE_ENERGY_OUTPUT_INV, + translation_key="lifetime_inverter_output_energy", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + TuyaSensorEntityDescription( + key=DPCode.CUMULATIVE_ENERGY_DISCHARGED, + translation_key="lifetime_battery_discharge_energy", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + TuyaSensorEntityDescription( + key=DPCode.CUMULATIVE_ENERGY_CHARGED, + translation_key="lifetime_battery_charge_energy", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + TuyaSensorEntityDescription( + key=DPCode.CUML_E_EXPORT_OFFGRID1, + translation_key="lifetime_offgrid_port_energy", + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + ), # https://developer.tuya.com/en/docs/iot/categoryylcg?id=Kaiuz3kc2e4gm "ylcg": ( TuyaSensorEntityDescription( diff --git a/homeassistant/components/tuya/strings.json b/homeassistant/components/tuya/strings.json index bdb10d7984b26d..816827d991d4f2 100644 --- a/homeassistant/components/tuya/strings.json +++ b/homeassistant/components/tuya/strings.json @@ -621,6 +621,36 @@ "battery_state": { "name": "Battery state" }, + "battery_soc": { + "name": "Battery SOC" + }, + "battery_power": { + "name": "Battery power" + }, + "total_pv_power": { + "name": "Total PV power" + }, + "pv_channel_power": { + "name": "PV channel {index} power" + }, + "inverter_output_power": { + "name": "Inverter output power" + }, + "lifetime_pv_energy": { + "name": "Lifetime PV energy" + }, + "lifetime_inverter_output_energy": { + "name": "Lifetime inverter output energy" + }, + "lifetime_battery_discharge_energy": { + "name": "Lifetime battery discharge energy" + }, + "lifetime_battery_charge_energy": { + "name": "Lifetime battery charge energy" + }, + "lifetime_offgrid_port_energy": { + "name": "Lifetime off-grid port energy" + }, "gas": { "name": "Gas" }, diff --git a/tests/components/tuya/snapshots/test_sensor.ambr b/tests/components/tuya/snapshots/test_sensor.ambr index 1ec5a6c32310de..0b428f8e30d74e 100644 --- a/tests/components/tuya/snapshots/test_sensor.ambr +++ b/tests/components/tuya/snapshots/test_sensor.ambr @@ -3178,6 +3178,619 @@ 'state': '1.0', }) # --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_battery_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cbe_pro_2_battery_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery power', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'battery_power', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxbattery_power', + 'unit_of_measurement': 'kW', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_battery_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'CBE Pro 2 Battery power', + 'state_class': , + 'unit_of_measurement': 'kW', + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_battery_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2.0', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_battery_soc-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.cbe_pro_2_battery_soc', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery SOC', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'battery_soc', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxcurrent_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_battery_soc-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'CBE Pro 2 Battery SOC', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_battery_soc', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '43.0', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_inverter_output_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cbe_pro_2_inverter_output_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Inverter output power', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'inverter_output_power', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxinverter_output_power', + 'unit_of_measurement': 'kW', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_inverter_output_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'CBE Pro 2 Inverter output power', + 'state_class': , + 'unit_of_measurement': 'kW', + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_inverter_output_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_lifetime_battery_charge_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cbe_pro_2_lifetime_battery_charge_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lifetime battery charge energy', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charge_energy', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxcumulative_energy_charged', + 'unit_of_measurement': , + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_lifetime_battery_charge_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'CBE Pro 2 Lifetime battery charge energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_lifetime_battery_charge_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '13.288', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_lifetime_battery_discharge_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cbe_pro_2_lifetime_battery_discharge_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lifetime battery discharge energy', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharge_energy', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxcumulative_energy_discharged', + 'unit_of_measurement': , + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_lifetime_battery_discharge_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'CBE Pro 2 Lifetime battery discharge energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_lifetime_battery_discharge_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.183', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_lifetime_inverter_output_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cbe_pro_2_lifetime_inverter_output_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lifetime inverter output energy', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_inverter_output_energy', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxcumulative_energy_output_inv', + 'unit_of_measurement': , + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_lifetime_inverter_output_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'CBE Pro 2 Lifetime inverter output energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_lifetime_inverter_output_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '13.46', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_lifetime_off_grid_port_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cbe_pro_2_lifetime_off_grid_port_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lifetime off-grid port energy', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_offgrid_port_energy', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxcuml_e_export_offgrid1', + 'unit_of_measurement': 'Wh', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_lifetime_off_grid_port_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'CBE Pro 2 Lifetime off-grid port energy', + 'state_class': , + 'unit_of_measurement': 'Wh', + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_lifetime_off_grid_port_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_lifetime_pv_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cbe_pro_2_lifetime_pv_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lifetime PV energy', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_pv_energy', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxcumulative_energy_generated_pv', + 'unit_of_measurement': , + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_lifetime_pv_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'CBE Pro 2 Lifetime PV energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_lifetime_pv_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '18.565', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_pv_channel_1_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cbe_pro_2_pv_channel_1_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PV channel 1 power', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'pv_channel_power', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxpv_power_channel_1', + 'unit_of_measurement': 'kW', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_pv_channel_1_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'CBE Pro 2 PV channel 1 power', + 'state_class': , + 'unit_of_measurement': 'kW', + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_pv_channel_1_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_pv_channel_2_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cbe_pro_2_pv_channel_2_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PV channel 2 power', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'pv_channel_power', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxpv_power_channel_2', + 'unit_of_measurement': 'kW', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_pv_channel_2_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'CBE Pro 2 PV channel 2 power', + 'state_class': , + 'unit_of_measurement': 'kW', + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_pv_channel_2_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_total_pv_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.cbe_pro_2_total_pv_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total PV power', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'total_pv_power', + 'unique_id': 'tuya.gbq8kiahk57ct0bpncjynxpv_power_total', + 'unit_of_measurement': 'kW', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.cbe_pro_2_total_pv_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'CBE Pro 2 Total PV power', + 'state_class': , + 'unit_of_measurement': 'kW', + }), + 'context': , + 'entity_id': 'sensor.cbe_pro_2_total_pv_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_platform_setup_and_discovery[sensor.cleverio_pf100_last_amount-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ From e70b147c0c826ed60dca1250581ff4374b385a8b Mon Sep 17 00:00:00 2001 From: Timothy <6560631+TimoPtr@users.noreply.github.com> Date: Tue, 16 Sep 2025 15:45:21 +0200 Subject: [PATCH 12/16] Add missing content type to backup http endpoint (#152433) --- homeassistant/components/backup/http.py | 5 +++-- tests/components/backup/test_http.py | 17 ++++++++++++++--- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/backup/http.py b/homeassistant/components/backup/http.py index 11d8199bdc55ed..b71859611b4287 100644 --- a/homeassistant/components/backup/http.py +++ b/homeassistant/components/backup/http.py @@ -8,7 +8,7 @@ from typing import IO, cast from aiohttp import BodyPartReader -from aiohttp.hdrs import CONTENT_DISPOSITION +from aiohttp.hdrs import CONTENT_DISPOSITION, CONTENT_TYPE from aiohttp.web import FileResponse, Request, Response, StreamResponse from multidict import istr @@ -76,7 +76,8 @@ async def get( return Response(status=HTTPStatus.NOT_FOUND) headers = { - CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar" + CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar", + CONTENT_TYPE: "application/x-tar", } try: diff --git a/tests/components/backup/test_http.py b/tests/components/backup/test_http.py index b3845b1209a9ab..0d5bdfd650444d 100644 --- a/tests/components/backup/test_http.py +++ b/tests/components/backup/test_http.py @@ -4,11 +4,13 @@ from collections.abc import AsyncIterator from io import BytesIO, StringIO import json +import re import tarfile from typing import Any from unittest.mock import patch from aiohttp import web +from aiohttp.hdrs import CONTENT_DISPOSITION, CONTENT_TYPE import pytest from homeassistant.components.backup import ( @@ -166,10 +168,19 @@ async def _test_downloading_encrypted_backup( agent_id: str, ) -> None: """Test downloading an encrypted backup file.""" + + def assert_tar_download_response(resp: web.Response) -> None: + assert resp.status == 200 + assert resp.headers.get(CONTENT_TYPE, "") == "application/x-tar" + assert re.match( + r"attachment; filename=.*\.tar", resp.headers.get(CONTENT_DISPOSITION, "") + ) + # Try downloading without supplying a password client = await hass_client() resp = await client.get(f"/api/backup/download/c0cb53bd?agent_id={agent_id}") - assert resp.status == 200 + assert_tar_download_response(resp) + backup = await resp.read() # We expect a valid outer tar file, but the inner tar file is encrypted and # can't be read @@ -187,7 +198,7 @@ async def _test_downloading_encrypted_backup( resp = await client.get( f"/api/backup/download/c0cb53bd?agent_id={agent_id}&password=wrong" ) - assert resp.status == 200 + assert_tar_download_response(resp) backup = await resp.read() # We expect a truncated outer tar file with ( @@ -200,7 +211,7 @@ async def _test_downloading_encrypted_backup( resp = await client.get( f"/api/backup/download/c0cb53bd?agent_id={agent_id}&password=hunter2" ) - assert resp.status == 200 + assert_tar_download_response(resp) backup = await resp.read() # We expect a valid outer tar file, the inner tar file is decrypted and can be read with ( From aadaf87c160ab21777edf5a019f6b67a8cba0b50 Mon Sep 17 00:00:00 2001 From: Retha Runolfsson <137745329+zerzhang@users.noreply.github.com> Date: Tue, 16 Sep 2025 21:59:13 +0800 Subject: [PATCH 13/16] Add switchbot relayswitch 2PM (#146140) --- .../components/switchbot/__init__.py | 2 + homeassistant/components/switchbot/const.py | 4 + homeassistant/components/switchbot/entity.py | 4 + homeassistant/components/switchbot/sensor.py | 42 +++++-- homeassistant/components/switchbot/switch.py | 63 +++++++++- tests/components/switchbot/__init__.py | 25 ++++ tests/components/switchbot/test_sensor.py | 111 ++++++++++++++++++ tests/components/switchbot/test_switch.py | 109 ++++++++++++++++- 8 files changed, 349 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/switchbot/__init__.py b/homeassistant/components/switchbot/__init__.py index f5e587f0d9c65c..ce0e8412b868cd 100644 --- a/homeassistant/components/switchbot/__init__.py +++ b/homeassistant/components/switchbot/__init__.py @@ -98,6 +98,7 @@ SupportedModels.RGBICWW_FLOOR_LAMP.value: [Platform.LIGHT, Platform.SENSOR], SupportedModels.RGBICWW_STRIP_LIGHT.value: [Platform.LIGHT, Platform.SENSOR], SupportedModels.PLUG_MINI_EU.value: [Platform.SWITCH, Platform.SENSOR], + SupportedModels.RELAY_SWITCH_2PM.value: [Platform.SWITCH, Platform.SENSOR], } CLASS_BY_DEVICE = { SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight, @@ -129,6 +130,7 @@ SupportedModels.RGBICWW_FLOOR_LAMP.value: switchbot.SwitchbotRgbicLight, SupportedModels.RGBICWW_STRIP_LIGHT.value: switchbot.SwitchbotRgbicLight, SupportedModels.PLUG_MINI_EU.value: switchbot.SwitchbotRelaySwitch, + SupportedModels.RELAY_SWITCH_2PM.value: switchbot.SwitchbotRelaySwitch2PM, } diff --git a/homeassistant/components/switchbot/const.py b/homeassistant/components/switchbot/const.py index 549a602c3ff569..c10609299d4bb9 100644 --- a/homeassistant/components/switchbot/const.py +++ b/homeassistant/components/switchbot/const.py @@ -54,6 +54,7 @@ class SupportedModels(StrEnum): RGBICWW_STRIP_LIGHT = "rgbicww_strip_light" RGBICWW_FLOOR_LAMP = "rgbicww_floor_lamp" PLUG_MINI_EU = "plug_mini_eu" + RELAY_SWITCH_2PM = "relay_switch_2pm" CONNECTABLE_SUPPORTED_MODEL_TYPES = { @@ -87,6 +88,7 @@ class SupportedModels(StrEnum): SwitchbotModel.RGBICWW_STRIP_LIGHT: SupportedModels.RGBICWW_STRIP_LIGHT, SwitchbotModel.RGBICWW_FLOOR_LAMP: SupportedModels.RGBICWW_FLOOR_LAMP, SwitchbotModel.PLUG_MINI_EU: SupportedModels.PLUG_MINI_EU, + SwitchbotModel.RELAY_SWITCH_2PM: SupportedModels.RELAY_SWITCH_2PM, } NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = { @@ -121,6 +123,7 @@ class SupportedModels(StrEnum): SwitchbotModel.RGBICWW_STRIP_LIGHT, SwitchbotModel.RGBICWW_FLOOR_LAMP, SwitchbotModel.PLUG_MINI_EU, + SwitchbotModel.RELAY_SWITCH_2PM, } ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[ @@ -140,6 +143,7 @@ class SupportedModels(StrEnum): SwitchbotModel.RGBICWW_STRIP_LIGHT: switchbot.SwitchbotRgbicLight, SwitchbotModel.RGBICWW_FLOOR_LAMP: switchbot.SwitchbotRgbicLight, SwitchbotModel.PLUG_MINI_EU: switchbot.SwitchbotRelaySwitch, + SwitchbotModel.RELAY_SWITCH_2PM: switchbot.SwitchbotRelaySwitch2PM, } HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = { diff --git a/homeassistant/components/switchbot/entity.py b/homeassistant/components/switchbot/entity.py index b7ee36fc1aec52..a64950c0f7d06b 100644 --- a/homeassistant/components/switchbot/entity.py +++ b/homeassistant/components/switchbot/entity.py @@ -6,6 +6,7 @@ import logging from typing import Any, Concatenate +import switchbot from switchbot import Switchbot, SwitchbotDevice from switchbot.devices.device import SwitchbotOperationError @@ -46,6 +47,7 @@ def __init__(self, coordinator: SwitchbotDataUpdateCoordinator) -> None: model=coordinator.model, # Sometimes the modelName is missing from the advertisement data name=coordinator.device_name, ) + self._channel: int | None = None if ":" not in self._address: # MacOS Bluetooth addresses are not mac addresses return @@ -60,6 +62,8 @@ def __init__(self, coordinator: SwitchbotDataUpdateCoordinator) -> None: @property def parsed_data(self) -> dict[str, Any]: """Return parsed device data for this entity.""" + if isinstance(self.coordinator.device, switchbot.SwitchbotRelaySwitch2PM): + return self.coordinator.device.get_parsed_data(self._channel) return self.coordinator.device.parsed_data @property diff --git a/homeassistant/components/switchbot/sensor.py b/homeassistant/components/switchbot/sensor.py index 9196453e98c8f5..ab400b5806512d 100644 --- a/homeassistant/components/switchbot/sensor.py +++ b/homeassistant/components/switchbot/sensor.py @@ -2,6 +2,7 @@ from __future__ import annotations +import switchbot from switchbot import HumidifierWaterLevel from switchbot.const.air_purifier import AirQualityLevel @@ -25,8 +26,10 @@ UnitOfTemperature, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from .const import DOMAIN from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator from .entity import SwitchbotEntity @@ -133,13 +136,22 @@ async def async_setup_entry( ) -> None: """Set up Switchbot sensor based on a config entry.""" coordinator = entry.runtime_data - entities = [ - SwitchBotSensor(coordinator, sensor) - for sensor in coordinator.device.parsed_data - if sensor in SENSOR_TYPES - ] - entities.append(SwitchbotRSSISensor(coordinator, "rssi")) - async_add_entities(entities) + sensor_entities: list[SensorEntity] = [] + if isinstance(coordinator.device, switchbot.SwitchbotRelaySwitch2PM): + sensor_entities.extend( + SwitchBotSensor(coordinator, sensor, channel) + for channel in range(1, coordinator.device.channel + 1) + for sensor in coordinator.device.get_parsed_data(channel) + if sensor in SENSOR_TYPES + ) + else: + sensor_entities.extend( + SwitchBotSensor(coordinator, sensor) + for sensor in coordinator.device.parsed_data + if sensor in SENSOR_TYPES + ) + sensor_entities.append(SwitchbotRSSISensor(coordinator, "rssi")) + async_add_entities(sensor_entities) class SwitchBotSensor(SwitchbotEntity, SensorEntity): @@ -149,13 +161,27 @@ def __init__( self, coordinator: SwitchbotDataUpdateCoordinator, sensor: str, + channel: int | None = None, ) -> None: """Initialize the Switchbot sensor.""" super().__init__(coordinator) self._sensor = sensor - self._attr_unique_id = f"{coordinator.base_unique_id}-{sensor}" + self._channel = channel self.entity_description = SENSOR_TYPES[sensor] + if channel: + self._attr_unique_id = f"{coordinator.base_unique_id}-{sensor}-{channel}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, f"{coordinator.base_unique_id}-channel-{channel}") + }, + manufacturer="SwitchBot", + model_id="RelaySwitch2PM", + name=f"{coordinator.device_name} Channel {channel}", + ) + else: + self._attr_unique_id = f"{coordinator.base_unique_id}-{sensor}" + @property def native_value(self) -> str | int | None: """Return the state of the sensor.""" diff --git a/homeassistant/components/switchbot/switch.py b/homeassistant/components/switchbot/switch.py index fd1e8bb6393e17..d67aaed3412c82 100644 --- a/homeassistant/components/switchbot/switch.py +++ b/homeassistant/components/switchbot/switch.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any import switchbot @@ -9,13 +10,16 @@ from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity +from .const import DOMAIN from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator -from .entity import SwitchbotSwitchedEntity +from .entity import SwitchbotSwitchedEntity, exception_handler PARALLEL_UPDATES = 0 +_LOGGER = logging.getLogger(__name__) async def async_setup_entry( @@ -24,7 +28,16 @@ async def async_setup_entry( async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up Switchbot based on a config entry.""" - async_add_entities([SwitchBotSwitch(entry.runtime_data)]) + coordinator = entry.runtime_data + + if isinstance(coordinator.device, switchbot.SwitchbotRelaySwitch2PM): + entries = [ + SwitchbotMultiChannelSwitch(coordinator, channel) + for channel in range(1, coordinator.device.channel + 1) + ] + async_add_entities(entries) + else: + async_add_entities([SwitchBotSwitch(coordinator)]) class SwitchBotSwitch(SwitchbotSwitchedEntity, SwitchEntity, RestoreEntity): @@ -67,3 +80,49 @@ def extra_state_attributes(self) -> dict[str, Any]: **super().extra_state_attributes, "switch_mode": self._device.switch_mode(), } + + +class SwitchbotMultiChannelSwitch(SwitchbotSwitchedEntity, SwitchEntity): + """Representation of a Switchbot multi-channel switch.""" + + _attr_device_class = SwitchDeviceClass.SWITCH + _device: switchbot.Switchbot + _attr_name = None + + def __init__( + self, coordinator: SwitchbotDataUpdateCoordinator, channel: int + ) -> None: + """Initialize the Switchbot.""" + super().__init__(coordinator) + self._channel = channel + self._attr_unique_id = f"{coordinator.base_unique_id}-{channel}" + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{coordinator.base_unique_id}-channel-{channel}")}, + manufacturer="SwitchBot", + model_id="RelaySwitch2PM", + name=f"{coordinator.device_name} Channel {channel}", + ) + + @property + def is_on(self) -> bool | None: + """Return true if device is on.""" + return self._device.is_on(self._channel) + + @exception_handler + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn device on.""" + _LOGGER.debug( + "Turn Switchbot device on %s, channel %d", self._address, self._channel + ) + await self._device.turn_on(self._channel) + self.async_write_ha_state() + + @exception_handler + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn device off.""" + _LOGGER.debug( + "Turn Switchbot device off %s, channel %d", self._address, self._channel + ) + await self._device.turn_off(self._channel) + self.async_write_ha_state() diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index 0cbab0f13bd369..72dc62b0b0965a 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -1080,3 +1080,28 @@ def make_advertisement( connectable=True, tx_power=-127, ) + + +RELAY_SWITCH_2PM_SERVICE_INFO = BluetoothServiceInfoBleak( + name="Relay Switch 2PM", + manufacturer_data={ + 2409: b"\xc0N0\xdd\xb9\xf2\x8a\xc1\x00\x00\x00\x00\x00F\x00\x00" + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"=\x00\x00\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + address="AA:BB:CC:DD:EE:FF", + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="Relay Switch 2PM", + manufacturer_data={ + 2409: b"\xc0N0\xdd\xb9\xf2\x8a\xc1\x00\x00\x00\x00\x00F\x00\x00" + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"=\x00\x00\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:FF", "Relay Switch 2PM"), + time=0, + connectable=True, + tx_power=-127, +) diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index c9c28b7d94eb95..0e46324076696d 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -29,6 +29,7 @@ HUBMINI_MATTER_SERVICE_INFO, LEAK_SERVICE_INFO, PLUG_MINI_EU_SERVICE_INFO, + RELAY_SWITCH_2PM_SERVICE_INFO, REMOTE_SERVICE_INFO, WOHAND_SERVICE_INFO, WOHUB2_SERVICE_INFO, @@ -617,3 +618,113 @@ async def test_plug_mini_eu_sensor(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_relay_switch_2pm_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the relay switch 2PM sensor.""" + await async_setup_component(hass, DOMAIN, {}) + inject_bluetooth_service_info(hass, RELAY_SWITCH_2PM_SERVICE_INFO) + + with patch( + "homeassistant.components.switchbot.switch.switchbot.SwitchbotRelaySwitch2PM.get_basic_info", + new=AsyncMock( + return_value={ + 1: { + "power": 4.9, + "current": 0.1, + "voltage": 25, + "energy": 0.2, + }, + 2: { + "power": 7.9, + "current": 0.6, + "voltage": 25, + "energy": 2.5, + }, + } + ), + ): + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_NAME: "test-name", + CONF_SENSOR_TYPE: "relay_switch_2pm", + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + }, + unique_id="aabbccddeeaa", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all("sensor")) == 9 + + power_sensor_1 = hass.states.get("sensor.test_name_channel_1_power") + power_sensor_attrs = power_sensor_1.attributes + assert power_sensor_1.state == "4.9" + assert power_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Channel 1 Power" + assert power_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "W" + assert power_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + voltage_sensor_1 = hass.states.get("sensor.test_name_channel_1_voltage") + voltage_sensor_1_attrs = voltage_sensor_1.attributes + assert voltage_sensor_1.state == "25" + assert voltage_sensor_1_attrs[ATTR_FRIENDLY_NAME] == "test-name Channel 1 Voltage" + assert voltage_sensor_1_attrs[ATTR_UNIT_OF_MEASUREMENT] == "V" + assert voltage_sensor_1_attrs[ATTR_STATE_CLASS] == "measurement" + + current_sensor_1 = hass.states.get("sensor.test_name_channel_1_current") + current_sensor_1_attrs = current_sensor_1.attributes + assert current_sensor_1.state == "0.1" + assert current_sensor_1_attrs[ATTR_FRIENDLY_NAME] == "test-name Channel 1 Current" + assert current_sensor_1_attrs[ATTR_UNIT_OF_MEASUREMENT] == "A" + assert current_sensor_1_attrs[ATTR_STATE_CLASS] == "measurement" + + energy_sensor_1 = hass.states.get("sensor.test_name_channel_1_energy") + energy_sensor_1_attrs = energy_sensor_1.attributes + assert energy_sensor_1.state == "0.2" + assert energy_sensor_1_attrs[ATTR_FRIENDLY_NAME] == "test-name Channel 1 Energy" + assert energy_sensor_1_attrs[ATTR_UNIT_OF_MEASUREMENT] == "kWh" + assert energy_sensor_1_attrs[ATTR_STATE_CLASS] == "total_increasing" + + power_sensor_2 = hass.states.get("sensor.test_name_channel_2_power") + power_sensor_2_attrs = power_sensor_2.attributes + assert power_sensor_2.state == "7.9" + assert power_sensor_2_attrs[ATTR_FRIENDLY_NAME] == "test-name Channel 2 Power" + assert power_sensor_2_attrs[ATTR_UNIT_OF_MEASUREMENT] == "W" + assert power_sensor_2_attrs[ATTR_STATE_CLASS] == "measurement" + + voltage_sensor_2 = hass.states.get("sensor.test_name_channel_2_voltage") + voltage_sensor_2_attrs = voltage_sensor_2.attributes + assert voltage_sensor_2.state == "25" + assert voltage_sensor_2_attrs[ATTR_FRIENDLY_NAME] == "test-name Channel 2 Voltage" + assert voltage_sensor_2_attrs[ATTR_UNIT_OF_MEASUREMENT] == "V" + assert voltage_sensor_2_attrs[ATTR_STATE_CLASS] == "measurement" + + current_sensor_2 = hass.states.get("sensor.test_name_channel_2_current") + current_sensor_2_attrs = current_sensor_2.attributes + assert current_sensor_2.state == "0.6" + assert current_sensor_2_attrs[ATTR_FRIENDLY_NAME] == "test-name Channel 2 Current" + assert current_sensor_2_attrs[ATTR_UNIT_OF_MEASUREMENT] == "A" + assert current_sensor_2_attrs[ATTR_STATE_CLASS] == "measurement" + + energy_sensor_2 = hass.states.get("sensor.test_name_channel_2_energy") + energy_sensor_2_attrs = energy_sensor_2.attributes + assert energy_sensor_2.state == "2.5" + assert energy_sensor_2_attrs[ATTR_FRIENDLY_NAME] == "test-name Channel 2 Energy" + assert energy_sensor_2_attrs[ATTR_UNIT_OF_MEASUREMENT] == "kWh" + assert energy_sensor_2_attrs[ATTR_STATE_CLASS] == "total_increasing" + + rssi_sensor = hass.states.get("sensor.test_name_bluetooth_signal") + rssi_sensor_attrs = rssi_sensor.attributes + assert rssi_sensor.state == "-60" + assert rssi_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Bluetooth signal" + assert rssi_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "dBm" + assert rssi_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/switchbot/test_switch.py b/tests/components/switchbot/test_switch.py index c3740eb8b8e762..edab2fdaddcf6e 100644 --- a/tests/components/switchbot/test_switch.py +++ b/tests/components/switchbot/test_switch.py @@ -17,7 +17,11 @@ from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError -from . import PLUG_MINI_EU_SERVICE_INFO, WOHAND_SERVICE_INFO +from . import ( + PLUG_MINI_EU_SERVICE_INFO, + RELAY_SWITCH_2PM_SERVICE_INFO, + WOHAND_SERVICE_INFO, +) from tests.common import MockConfigEntry, mock_restore_cache from tests.components.bluetooth import inject_bluetooth_service_info @@ -152,3 +156,106 @@ async def test_relay_switch_control( ) mocked_instance.assert_awaited_once() + + +@pytest.mark.parametrize( + ("service", "mock_method"), + [(SERVICE_TURN_ON, "turn_on"), (SERVICE_TURN_OFF, "turn_off")], +) +async def test_relay_switch_2pm_control( + hass: HomeAssistant, + mock_entry_encrypted_factory: Callable[[str], MockConfigEntry], + service: str, + mock_method: str, +) -> None: + """Test Relay Switch 2PM control.""" + inject_bluetooth_service_info(hass, RELAY_SWITCH_2PM_SERVICE_INFO) + + entry = mock_entry_encrypted_factory(sensor_type="relay_switch_2pm") + entry.add_to_hass(hass) + + mocked_instance = AsyncMock(return_value=True) + with patch.multiple( + "homeassistant.components.switchbot.switch.switchbot.SwitchbotRelaySwitch2PM", + update=AsyncMock(return_value=None), + **{mock_method: mocked_instance}, + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + entity_id_1 = "switch.test_name_channel_1" + + await hass.services.async_call( + SWITCH_DOMAIN, + service, + {ATTR_ENTITY_ID: entity_id_1}, + blocking=True, + ) + + mocked_instance.assert_called_with(1) + + entity_id_2 = "switch.test_name_channel_2" + + await hass.services.async_call( + SWITCH_DOMAIN, + service, + {ATTR_ENTITY_ID: entity_id_2}, + blocking=True, + ) + + mocked_instance.assert_called_with(2) + + +@pytest.mark.parametrize( + ("exception", "error_message"), + [ + ( + SwitchbotOperationError("Operation failed"), + "An error occurred while performing the action: Operation failed", + ), + ], +) +@pytest.mark.parametrize( + ("service", "mock_method"), + [ + (SERVICE_TURN_ON, "turn_on"), + (SERVICE_TURN_OFF, "turn_off"), + ], +) +@pytest.mark.parametrize( + "entry_id", + [ + "switch.test_name_channel_1", + "switch.test_name_channel_2", + ], +) +async def test_relay_switch_2pm_exception( + hass: HomeAssistant, + mock_entry_encrypted_factory: Callable[[str], MockConfigEntry], + exception: Exception, + error_message: str, + service: str, + mock_method: str, + entry_id: str, +) -> None: + """Test Relay Switch 2PM exception handling.""" + inject_bluetooth_service_info(hass, RELAY_SWITCH_2PM_SERVICE_INFO) + + entry = mock_entry_encrypted_factory(sensor_type="relay_switch_2pm") + entry.add_to_hass(hass) + + with patch.multiple( + "homeassistant.components.switchbot.switch.switchbot.SwitchbotRelaySwitch2PM", + update=AsyncMock(return_value=None), + **{mock_method: AsyncMock(side_effect=exception)}, + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + with pytest.raises(HomeAssistantError, match=error_message): + await hass.services.async_call( + SWITCH_DOMAIN, + service, + {ATTR_ENTITY_ID: entry_id}, + blocking=True, + ) From d65e7048239cc719ac971d3e0b81abc08905028e Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Tue, 16 Sep 2025 10:33:46 -0400 Subject: [PATCH 14/16] Add usage_prediction integration (#151206) Co-authored-by: J. Nick Koston Co-authored-by: J. Nick Koston --- CODEOWNERS | 2 + .../components/default_config/manifest.json | 1 + .../components/usage_prediction/__init__.py | 89 +++++ .../usage_prediction/common_control.py | 241 ++++++++++++ .../components/usage_prediction/const.py | 13 + .../components/usage_prediction/manifest.json | 10 + .../components/usage_prediction/models.py | 24 ++ .../components/usage_prediction/strings.json | 3 + script/hassfest/quality_scale.py | 1 + tests/components/usage_prediction/__init__.py | 1 + .../usage_prediction/test_common_control.py | 366 ++++++++++++++++++ .../components/usage_prediction/test_init.py | 63 +++ .../usage_prediction/test_websocket.py | 115 ++++++ 13 files changed, 929 insertions(+) create mode 100644 homeassistant/components/usage_prediction/__init__.py create mode 100644 homeassistant/components/usage_prediction/common_control.py create mode 100644 homeassistant/components/usage_prediction/const.py create mode 100644 homeassistant/components/usage_prediction/manifest.json create mode 100644 homeassistant/components/usage_prediction/models.py create mode 100644 homeassistant/components/usage_prediction/strings.json create mode 100644 tests/components/usage_prediction/__init__.py create mode 100644 tests/components/usage_prediction/test_common_control.py create mode 100644 tests/components/usage_prediction/test_init.py create mode 100644 tests/components/usage_prediction/test_websocket.py diff --git a/CODEOWNERS b/CODEOWNERS index 67436a81addb86..620388ebc95bfa 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1677,6 +1677,8 @@ build.json @home-assistant/supervisor /tests/components/uptime_kuma/ @tr4nt0r /homeassistant/components/uptimerobot/ @ludeeus @chemelli74 /tests/components/uptimerobot/ @ludeeus @chemelli74 +/homeassistant/components/usage_prediction/ @home-assistant/core +/tests/components/usage_prediction/ @home-assistant/core /homeassistant/components/usb/ @bdraco /tests/components/usb/ @bdraco /homeassistant/components/usgs_earthquakes_feed/ @exxamalte diff --git a/homeassistant/components/default_config/manifest.json b/homeassistant/components/default_config/manifest.json index 8299fe43f0966e..3d845066251e90 100644 --- a/homeassistant/components/default_config/manifest.json +++ b/homeassistant/components/default_config/manifest.json @@ -19,6 +19,7 @@ "ssdp", "stream", "sun", + "usage_prediction", "usb", "webhook", "zeroconf" diff --git a/homeassistant/components/usage_prediction/__init__.py b/homeassistant/components/usage_prediction/__init__.py new file mode 100644 index 00000000000000..0388591c323ce7 --- /dev/null +++ b/homeassistant/components/usage_prediction/__init__.py @@ -0,0 +1,89 @@ +"""The usage prediction integration.""" + +from __future__ import annotations + +import asyncio +from datetime import timedelta +from typing import Any + +from homeassistant.components import websocket_api +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.typing import ConfigType +from homeassistant.util import dt as dt_util + +from . import common_control +from .const import DATA_CACHE, DOMAIN +from .models import EntityUsageDataCache, EntityUsagePredictions + +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) + +CACHE_DURATION = timedelta(hours=24) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the usage prediction integration.""" + websocket_api.async_register_command(hass, ws_common_control) + hass.data[DATA_CACHE] = {} + return True + + +@websocket_api.websocket_command({"type": f"{DOMAIN}/common_control"}) +@websocket_api.async_response +async def ws_common_control( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Handle usage prediction common control WebSocket API.""" + result = await get_cached_common_control(hass, connection.user.id) + time_category = common_control.time_category(dt_util.now().hour) + connection.send_result( + msg["id"], + { + "entities": getattr(result, time_category), + }, + ) + + +async def get_cached_common_control( + hass: HomeAssistant, user_id: str +) -> EntityUsagePredictions: + """Get cached common control predictions or fetch new ones. + + Returns cached data if it's less than 24 hours old, + otherwise fetches new data and caches it. + """ + # Create a unique storage key for this user + storage_key = user_id + + cached_data = hass.data[DATA_CACHE].get(storage_key) + + if isinstance(cached_data, asyncio.Task): + # If there's an ongoing task to fetch data, await its result + return await cached_data + + # Check if cache is valid (less than 24 hours old) + if cached_data is not None: + if (dt_util.utcnow() - cached_data.timestamp) < CACHE_DURATION: + # Cache is still valid, return the cached predictions + return cached_data.predictions + + # Create task fetching data + task = hass.async_create_task( + common_control.async_predict_common_control(hass, user_id) + ) + hass.data[DATA_CACHE][storage_key] = task + + try: + predictions = await task + except Exception: + # If the task fails, remove it from cache to allow retries + hass.data[DATA_CACHE].pop(storage_key) + raise + + hass.data[DATA_CACHE][storage_key] = EntityUsageDataCache( + predictions=predictions, + ) + + return predictions diff --git a/homeassistant/components/usage_prediction/common_control.py b/homeassistant/components/usage_prediction/common_control.py new file mode 100644 index 00000000000000..4d51b2b655fd4c --- /dev/null +++ b/homeassistant/components/usage_prediction/common_control.py @@ -0,0 +1,241 @@ +"""Code to generate common control usage patterns.""" + +from __future__ import annotations + +from collections import Counter +from collections.abc import Callable +from datetime import datetime, timedelta +from functools import cache +import logging +from typing import Any, Literal, cast + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from homeassistant.components.recorder import get_instance +from homeassistant.components.recorder.db_schema import EventData, Events, EventTypes +from homeassistant.components.recorder.models import uuid_hex_to_bytes_or_none +from homeassistant.components.recorder.util import session_scope +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util +from homeassistant.util.json import json_loads_object + +from .models import EntityUsagePredictions + +_LOGGER = logging.getLogger(__name__) + +# Time categories for usage patterns +TIME_CATEGORIES = ["morning", "afternoon", "evening", "night"] + +RESULTS_TO_INCLUDE = 8 + +# List of domains for which we want to track usage +ALLOWED_DOMAINS = { + # Entity platforms + Platform.AIR_QUALITY, + Platform.ALARM_CONTROL_PANEL, + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CALENDAR, + Platform.CAMERA, + Platform.CLIMATE, + Platform.COVER, + Platform.DATE, + Platform.DATETIME, + Platform.FAN, + Platform.HUMIDIFIER, + Platform.IMAGE, + Platform.LAWN_MOWER, + Platform.LIGHT, + Platform.LOCK, + Platform.MEDIA_PLAYER, + Platform.NUMBER, + Platform.SCENE, + Platform.SELECT, + Platform.SENSOR, + Platform.SIREN, + Platform.SWITCH, + Platform.TEXT, + Platform.TIME, + Platform.TODO, + Platform.UPDATE, + Platform.VACUUM, + Platform.VALVE, + Platform.WAKE_WORD, + Platform.WATER_HEATER, + Platform.WEATHER, + # Helpers with own domain + "counter", + "group", + "input_boolean", + "input_button", + "input_datetime", + "input_number", + "input_select", + "input_text", + "schedule", + "timer", +} + + +@cache +def time_category(hour: int) -> Literal["morning", "afternoon", "evening", "night"]: + """Determine the time category for a given hour.""" + if 6 <= hour < 12: + return "morning" + if 12 <= hour < 18: + return "afternoon" + if 18 <= hour < 22: + return "evening" + return "night" + + +async def async_predict_common_control( + hass: HomeAssistant, user_id: str +) -> EntityUsagePredictions: + """Generate a list of commonly used entities for a user. + + Args: + hass: Home Assistant instance + user_id: User ID to filter events by. + + Returns: + Dictionary with time categories as keys and lists of most common entity IDs as values + """ + # Get the recorder instance to ensure it's ready + recorder = get_instance(hass) + + # Execute the database operation in the recorder's executor + return await recorder.async_add_executor_job( + _fetch_with_session, hass, _fetch_and_process_data, user_id + ) + + +def _fetch_and_process_data(session: Session, user_id: str) -> EntityUsagePredictions: + """Fetch and process service call events from the database.""" + # Prepare a dictionary to track results + results: dict[str, Counter[str]] = { + time_cat: Counter() for time_cat in TIME_CATEGORIES + } + + # Keep track of contexts that we processed so that we will only process + # the first service call in a context, and not subsequent calls. + context_processed: set[bytes] = set() + thirty_days_ago_ts = (dt_util.utcnow() - timedelta(days=30)).timestamp() + user_id_bytes = uuid_hex_to_bytes_or_none(user_id) + if not user_id_bytes: + raise ValueError("Invalid user_id format") + + # Build the main query for events with their data + query = ( + select( + Events.context_id_bin, + Events.time_fired_ts, + EventData.shared_data, + ) + .select_from(Events) + .outerjoin(EventData, Events.data_id == EventData.data_id) + .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) + .where(Events.time_fired_ts >= thirty_days_ago_ts) + .where(Events.context_user_id_bin == user_id_bytes) + .where(EventTypes.event_type == "call_service") + .order_by(Events.time_fired_ts) + ) + + # Execute the query + context_id: bytes + time_fired_ts: float + shared_data: str | None + local_time_zone = dt_util.get_default_time_zone() + for context_id, time_fired_ts, shared_data in ( + session.connection().execute(query).all() + ): + # Skip if we have already processed an event that was part of this context + if context_id in context_processed: + continue + + # Mark this context as processed + context_processed.add(context_id) + + # Parse the event data + if not shared_data: + continue + + try: + event_data = json_loads_object(shared_data) + except (ValueError, TypeError) as err: + _LOGGER.debug("Failed to parse event data: %s", err) + continue + + # Empty event data, skipping + if not event_data: + continue + + service_data = cast(dict[str, Any] | None, event_data.get("service_data")) + + # No service data found, skipping + if not service_data: + continue + + entity_ids: str | list[str] | None + if (target := service_data.get("target")) and ( + target_entity_ids := target.get("entity_id") + ): + entity_ids = target_entity_ids + else: + entity_ids = service_data.get("entity_id") + + # No entity IDs found, skip this event + if entity_ids is None: + continue + + if not isinstance(entity_ids, list): + entity_ids = [entity_ids] + + # Filter out entity IDs that are not in allowed domains + entity_ids = [ + entity_id + for entity_id in entity_ids + if entity_id.split(".")[0] in ALLOWED_DOMAINS + ] + + if not entity_ids: + continue + + # Convert timestamp to datetime and determine time category + if time_fired_ts: + # Convert to local time for time category determination + period = time_category( + datetime.fromtimestamp(time_fired_ts, local_time_zone).hour + ) + + # Count entity usage + for entity_id in entity_ids: + results[period][entity_id] += 1 + + return EntityUsagePredictions( + morning=[ + ent_id for (ent_id, _) in results["morning"].most_common(RESULTS_TO_INCLUDE) + ], + afternoon=[ + ent_id + for (ent_id, _) in results["afternoon"].most_common(RESULTS_TO_INCLUDE) + ], + evening=[ + ent_id for (ent_id, _) in results["evening"].most_common(RESULTS_TO_INCLUDE) + ], + night=[ + ent_id for (ent_id, _) in results["night"].most_common(RESULTS_TO_INCLUDE) + ], + ) + + +def _fetch_with_session( + hass: HomeAssistant, + fetch_func: Callable[[Session], EntityUsagePredictions], + *args: object, +) -> EntityUsagePredictions: + """Execute a fetch function with a database session.""" + with session_scope(hass=hass, read_only=True) as session: + return fetch_func(session, *args) diff --git a/homeassistant/components/usage_prediction/const.py b/homeassistant/components/usage_prediction/const.py new file mode 100644 index 00000000000000..65aeb1773fead2 --- /dev/null +++ b/homeassistant/components/usage_prediction/const.py @@ -0,0 +1,13 @@ +"""Constants for the usage prediction integration.""" + +import asyncio + +from homeassistant.util.hass_dict import HassKey + +from .models import EntityUsageDataCache, EntityUsagePredictions + +DOMAIN = "usage_prediction" + +DATA_CACHE: HassKey[ + dict[str, asyncio.Task[EntityUsagePredictions] | EntityUsageDataCache] +] = HassKey("usage_prediction") diff --git a/homeassistant/components/usage_prediction/manifest.json b/homeassistant/components/usage_prediction/manifest.json new file mode 100644 index 00000000000000..a1f4d4e7cf25d2 --- /dev/null +++ b/homeassistant/components/usage_prediction/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "usage_prediction", + "name": "Usage Prediction", + "codeowners": ["@home-assistant/core"], + "dependencies": ["http", "recorder"], + "documentation": "https://www.home-assistant.io/integrations/usage_prediction", + "integration_type": "system", + "iot_class": "calculated", + "quality_scale": "internal" +} diff --git a/homeassistant/components/usage_prediction/models.py b/homeassistant/components/usage_prediction/models.py new file mode 100644 index 00000000000000..53f976f89e4af0 --- /dev/null +++ b/homeassistant/components/usage_prediction/models.py @@ -0,0 +1,24 @@ +"""Models for the usage prediction integration.""" + +from dataclasses import dataclass, field +from datetime import datetime + +from homeassistant.util import dt as dt_util + + +@dataclass +class EntityUsagePredictions: + """Prediction which entities are likely to be used in each time category.""" + + morning: list[str] = field(default_factory=list) + afternoon: list[str] = field(default_factory=list) + evening: list[str] = field(default_factory=list) + night: list[str] = field(default_factory=list) + + +@dataclass +class EntityUsageDataCache: + """Data model for entity usage prediction.""" + + predictions: EntityUsagePredictions + timestamp: datetime = field(default_factory=dt_util.utcnow) diff --git a/homeassistant/components/usage_prediction/strings.json b/homeassistant/components/usage_prediction/strings.json new file mode 100644 index 00000000000000..56ab70d236052a --- /dev/null +++ b/homeassistant/components/usage_prediction/strings.json @@ -0,0 +1,3 @@ +{ + "title": "Usage Prediction" +} diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 978cea6f627c61..dcb45c70f56e46 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -2243,6 +2243,7 @@ class Rule: "tag", "timer", "trace", + "usage_prediction", "webhook", "websocket_api", "zone", diff --git a/tests/components/usage_prediction/__init__.py b/tests/components/usage_prediction/__init__.py new file mode 100644 index 00000000000000..124766b0c396a6 --- /dev/null +++ b/tests/components/usage_prediction/__init__.py @@ -0,0 +1 @@ +"""Tests for the usage_prediction integration.""" diff --git a/tests/components/usage_prediction/test_common_control.py b/tests/components/usage_prediction/test_common_control.py new file mode 100644 index 00000000000000..75beeadb9d5132 --- /dev/null +++ b/tests/components/usage_prediction/test_common_control.py @@ -0,0 +1,366 @@ +"""Test the common control usage prediction.""" + +from __future__ import annotations + +from unittest.mock import patch +import uuid + +from freezegun import freeze_time +import pytest + +from homeassistant.components.usage_prediction.common_control import ( + async_predict_common_control, + time_category, +) +from homeassistant.components.usage_prediction.models import EntityUsagePredictions +from homeassistant.const import EVENT_CALL_SERVICE +from homeassistant.core import Context, HomeAssistant + +from tests.components.recorder.common import async_wait_recording_done + + +def test_time_category() -> None: + """Test the time category calculation logic.""" + for hour in range(6): + assert time_category(hour) == "night", hour + for hour in range(7, 12): + assert time_category(hour) == "morning", hour + for hour in range(13, 18): + assert time_category(hour) == "afternoon", hour + for hour in range(19, 22): + assert time_category(hour) == "evening", hour + + +@pytest.mark.usefixtures("recorder_mock") +async def test_empty_database(hass: HomeAssistant) -> None: + """Test function with empty database returns empty results.""" + user_id = str(uuid.uuid4()) + + # Call the function with empty database + results = await async_predict_common_control(hass, user_id) + + # Should return empty lists for all time categories + assert results == EntityUsagePredictions( + morning=[], + afternoon=[], + evening=[], + night=[], + ) + + +@pytest.mark.usefixtures("recorder_mock") +async def test_invalid_user_id(hass: HomeAssistant) -> None: + """Test function with invalid user ID returns empty results.""" + # Invalid user ID format (not a valid UUID) + with pytest.raises(ValueError, match=r"Invalid user_id format"): + await async_predict_common_control(hass, "invalid-user-id") + + +@pytest.mark.usefixtures("recorder_mock") +async def test_with_service_calls(hass: HomeAssistant) -> None: + """Test function with actual service call events in database.""" + user_id = str(uuid.uuid4()) + + # Create service call events at different times of day + # Morning events - use separate service calls to get around context deduplication + with freeze_time("2023-07-01 07:00:00+00:00"): # Morning + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "light", + "service": "turn_on", + "service_data": {"entity_id": ["light.living_room", "light.kitchen"]}, + }, + context=Context(user_id=user_id), + ) + await hass.async_block_till_done() + + # Afternoon events + with freeze_time("2023-07-01 14:00:00+00:00"): # Afternoon + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "climate", + "service": "set_temperature", + "service_data": {"entity_id": "climate.thermostat"}, + }, + context=Context(user_id=user_id), + ) + await hass.async_block_till_done() + + # Evening events + with freeze_time("2023-07-01 19:00:00+00:00"): # Evening + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "light", + "service": "turn_off", + "service_data": {"entity_id": "light.bedroom"}, + }, + context=Context(user_id=user_id), + ) + await hass.async_block_till_done() + + # Night events + with freeze_time("2023-07-01 23:00:00+00:00"): # Night + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "lock", + "service": "lock", + "service_data": {"entity_id": "lock.front_door"}, + }, + context=Context(user_id=user_id), + ) + await hass.async_block_till_done() + + # Wait for events to be recorded + await async_wait_recording_done(hass) + + # Get predictions - make sure we're still in a reasonable timeframe + with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent + results = await async_predict_common_control(hass, user_id) + + # Verify results contain the expected entities in the correct time periods + assert results == EntityUsagePredictions( + morning=["climate.thermostat"], + afternoon=["light.bedroom", "lock.front_door"], + evening=[], + night=["light.living_room", "light.kitchen"], + ) + + +@pytest.mark.usefixtures("recorder_mock") +async def test_multiple_entities_in_one_call(hass: HomeAssistant) -> None: + """Test handling of service calls with multiple entity IDs.""" + user_id = str(uuid.uuid4()) + + with freeze_time("2023-07-01 10:00:00+00:00"): # Morning + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "light", + "service": "turn_on", + "service_data": { + "entity_id": [ + "light.living_room", + "light.kitchen", + "light.hallway", + "not_allowed.domain", + ] + }, + }, + context=Context(user_id=user_id), + ) + await hass.async_block_till_done() + + await async_wait_recording_done(hass) + + with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent + results = await async_predict_common_control(hass, user_id) + + # All three lights should be counted (10:00 UTC = 02:00 local = night) + assert results.night == ["light.living_room", "light.kitchen", "light.hallway"] + assert results.morning == [] + assert results.afternoon == [] + assert results.evening == [] + + +@pytest.mark.usefixtures("recorder_mock") +async def test_context_deduplication(hass: HomeAssistant) -> None: + """Test that multiple events with the same context are deduplicated.""" + user_id = str(uuid.uuid4()) + context = Context(user_id=user_id) + + with freeze_time("2023-07-01 10:00:00+00:00"): # Morning + # Fire multiple events with the same context + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "light", + "service": "turn_on", + "service_data": {"entity_id": "light.living_room"}, + }, + context=context, + ) + await hass.async_block_till_done() + + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "switch", + "service": "turn_on", + "service_data": {"entity_id": "switch.coffee_maker"}, + }, + context=context, # Same context + ) + await hass.async_block_till_done() + + await async_wait_recording_done(hass) + + with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent + results = await async_predict_common_control(hass, user_id) + + # Only the first event should be processed (10:00 UTC = 02:00 local = night) + assert results == EntityUsagePredictions( + morning=[], + afternoon=[], + evening=[], + night=["light.living_room"], + ) + + +@pytest.mark.usefixtures("recorder_mock") +async def test_old_events_excluded(hass: HomeAssistant) -> None: + """Test that events older than 30 days are excluded.""" + user_id = str(uuid.uuid4()) + + # Create an old event (35 days ago) + with freeze_time("2023-05-27 10:00:00+00:00"): # 35 days before July 1st + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "light", + "service": "turn_on", + "service_data": {"entity_id": "light.old_event"}, + }, + context=Context(user_id=user_id), + ) + await hass.async_block_till_done() + + # Create a recent event (5 days ago) + with freeze_time("2023-06-26 10:00:00+00:00"): # 5 days before July 1st + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "light", + "service": "turn_on", + "service_data": {"entity_id": "light.recent_event"}, + }, + context=Context(user_id=user_id), + ) + await hass.async_block_till_done() + + await async_wait_recording_done(hass) + + # Query with current time + with freeze_time("2023-07-01 10:00:00+00:00"): + results = await async_predict_common_control(hass, user_id) + + # Only recent event should be included (10:00 UTC = 02:00 local = night) + assert results == EntityUsagePredictions( + morning=[], + afternoon=[], + evening=[], + night=["light.recent_event"], + ) + + +@pytest.mark.usefixtures("recorder_mock") +async def test_entities_limit(hass: HomeAssistant) -> None: + """Test that only top entities are returned per time category.""" + user_id = str(uuid.uuid4()) + + # Create more than 5 different entities in morning + with freeze_time("2023-07-01 08:00:00+00:00"): + # Create entities with different frequencies + entities_with_counts = [ + ("light.most_used", 10), + ("light.second", 8), + ("light.third", 6), + ("light.fourth", 4), + ("light.fifth", 2), + ("light.sixth", 1), + ("light.seventh", 1), + ] + + for entity_id, count in entities_with_counts: + for _ in range(count): + # Use different context for each call + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "light", + "service": "toggle", + "service_data": {"entity_id": entity_id}, + }, + context=Context(user_id=user_id), + ) + await hass.async_block_till_done() + + await async_wait_recording_done(hass) + + with ( + freeze_time("2023-07-02 10:00:00+00:00"), + patch( + "homeassistant.components.usage_prediction.common_control.RESULTS_TO_INCLUDE", + 5, + ), + ): # Next day, so events are recent + results = await async_predict_common_control(hass, user_id) + + # Should be the top 5 most used (08:00 UTC = 00:00 local = night) + assert results.night == [ + "light.most_used", + "light.second", + "light.third", + "light.fourth", + "light.fifth", + ] + assert results.morning == [] + assert results.afternoon == [] + assert results.evening == [] + + +@pytest.mark.usefixtures("recorder_mock") +async def test_different_users_separated(hass: HomeAssistant) -> None: + """Test that events from different users are properly separated.""" + user_id_1 = str(uuid.uuid4()) + user_id_2 = str(uuid.uuid4()) + + with freeze_time("2023-07-01 10:00:00+00:00"): + # User 1 events + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "light", + "service": "turn_on", + "service_data": {"entity_id": "light.user1_light"}, + }, + context=Context(user_id=user_id_1), + ) + await hass.async_block_till_done() + + # User 2 events + hass.bus.async_fire( + EVENT_CALL_SERVICE, + { + "domain": "light", + "service": "turn_on", + "service_data": {"entity_id": "light.user2_light"}, + }, + context=Context(user_id=user_id_2), + ) + await hass.async_block_till_done() + + await async_wait_recording_done(hass) + + # Get results for each user + with freeze_time("2023-07-02 10:00:00+00:00"): # Next day, so events are recent + results_user1 = await async_predict_common_control(hass, user_id_1) + results_user2 = await async_predict_common_control(hass, user_id_2) + + # Each user should only see their own entities (10:00 UTC = 02:00 local = night) + assert results_user1 == EntityUsagePredictions( + morning=[], + afternoon=[], + evening=[], + night=["light.user1_light"], + ) + + assert results_user2 == EntityUsagePredictions( + morning=[], + afternoon=[], + evening=[], + night=["light.user2_light"], + ) diff --git a/tests/components/usage_prediction/test_init.py b/tests/components/usage_prediction/test_init.py new file mode 100644 index 00000000000000..44c1ba32b55f56 --- /dev/null +++ b/tests/components/usage_prediction/test_init.py @@ -0,0 +1,63 @@ +"""Test usage_prediction integration.""" + +import asyncio +from unittest.mock import patch + +import pytest + +from homeassistant.components.usage_prediction import get_cached_common_control +from homeassistant.components.usage_prediction.models import EntityUsagePredictions +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + + +@pytest.mark.usefixtures("recorder_mock") +async def test_usage_prediction_caching(hass: HomeAssistant) -> None: + """Test that usage prediction results are cached for 24 hours.""" + + assert await async_setup_component(hass, "usage_prediction", {}) + + finish_event = asyncio.Event() + + async def mock_common_control_error(*args) -> EntityUsagePredictions: + await finish_event.wait() + raise Exception("Boom") # noqa: TRY002 + + with patch( + "homeassistant.components.usage_prediction.common_control.async_predict_common_control", + mock_common_control_error, + ): + # First call, should trigger prediction + task1 = asyncio.create_task(get_cached_common_control(hass, "user_1")) + task2 = asyncio.create_task(get_cached_common_control(hass, "user_1")) + await asyncio.sleep(0) + finish_event.set() + with pytest.raises(Exception, match="Boom"): + await task2 + with pytest.raises(Exception, match="Boom"): + await task1 + + finish_event.clear() + results = EntityUsagePredictions( + morning=["light.kitchen"], + afternoon=["climate.thermostat"], + evening=["light.bedroom"], + night=["lock.front_door"], + ) + + # The exception is not cached, we hit the method again. + async def mock_common_control(*args) -> EntityUsagePredictions: + await finish_event.wait() + return results + + with patch( + "homeassistant.components.usage_prediction.common_control.async_predict_common_control", + mock_common_control, + ): + # First call, should trigger prediction + task1 = asyncio.create_task(get_cached_common_control(hass, "user_1")) + task2 = asyncio.create_task(get_cached_common_control(hass, "user_1")) + await asyncio.sleep(0) + finish_event.set() + assert await task2 is results + assert await task1 is results diff --git a/tests/components/usage_prediction/test_websocket.py b/tests/components/usage_prediction/test_websocket.py new file mode 100644 index 00000000000000..d20999ed67b09b --- /dev/null +++ b/tests/components/usage_prediction/test_websocket.py @@ -0,0 +1,115 @@ +"""Test usage_prediction WebSocket API.""" + +from collections.abc import Generator +from copy import deepcopy +from datetime import datetime, timedelta +from unittest.mock import Mock, patch + +from freezegun import freeze_time +import pytest + +from homeassistant.components.usage_prediction.models import EntityUsagePredictions +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util + +from tests.common import MockUser +from tests.typing import WebSocketGenerator + +NOW = datetime(2026, 8, 26, 15, 0, 0, tzinfo=dt_util.UTC) + + +@pytest.fixture +def mock_predict_common_control() -> Generator[Mock]: + """Return a mock result for common control.""" + with patch( + "homeassistant.components.usage_prediction.common_control.async_predict_common_control", + return_value=EntityUsagePredictions( + morning=["light.kitchen"], + afternoon=["climate.thermostat"], + evening=["light.bedroom"], + night=["lock.front_door"], + ), + ) as mock_predict: + yield mock_predict + + +@pytest.mark.usefixtures("recorder_mock") +async def test_common_control( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + hass_admin_user: MockUser, + mock_predict_common_control: Mock, +) -> None: + """Test usage_prediction common control WebSocket command.""" + assert await async_setup_component(hass, "usage_prediction", {}) + + client = await hass_ws_client(hass) + + with freeze_time(NOW): + await client.send_json({"id": 1, "type": "usage_prediction/common_control"}) + msg = await client.receive_json() + + assert msg["id"] == 1 + assert msg["type"] == "result" + assert msg["success"] is True + assert msg["result"] == { + "entities": [ + "light.kitchen", + ] + } + assert mock_predict_common_control.call_count == 1 + assert mock_predict_common_control.mock_calls[0][1][1] == hass_admin_user.id + + +@pytest.mark.usefixtures("recorder_mock") +async def test_caching_behavior( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_predict_common_control: Mock, +) -> None: + """Test that results are cached for 24 hours.""" + assert await async_setup_component(hass, "usage_prediction", {}) + + client = await hass_ws_client(hass) + + # First call should fetch from database + with freeze_time(NOW): + await client.send_json({"id": 1, "type": "usage_prediction/common_control"}) + msg = await client.receive_json() + + assert msg["success"] is True + assert msg["result"] == { + "entities": [ + "light.kitchen", + ] + } + assert mock_predict_common_control.call_count == 1 + + new_result = deepcopy(mock_predict_common_control.return_value) + new_result.morning.append("light.bla") + mock_predict_common_control.return_value = new_result + + # Second call within 24 hours should use cache + with freeze_time(NOW + timedelta(hours=23)): + await client.send_json({"id": 2, "type": "usage_prediction/common_control"}) + msg = await client.receive_json() + + assert msg["success"] is True + assert msg["result"] == { + "entities": [ + "light.kitchen", + ] + } + # Should still be 1 (no new database call) + assert mock_predict_common_control.call_count == 1 + + # Third call after 24 hours should fetch from database again + with freeze_time(NOW + timedelta(hours=25)): + await client.send_json({"id": 3, "type": "usage_prediction/common_control"}) + msg = await client.receive_json() + + assert msg["success"] is True + assert msg["result"] == {"entities": ["light.kitchen", "light.bla"]} + # Should now be 2 (new database call) + assert mock_predict_common_control.call_count == 2 From 6e4258c8a9fd1a19bf117ab7d75e2485b5237e8e Mon Sep 17 00:00:00 2001 From: Tom Matheussen Date: Tue, 16 Sep 2025 17:24:15 +0200 Subject: [PATCH 15/16] Add Satel Integra config flow (#138946) Co-authored-by: Shay Levy --- CODEOWNERS | 2 + .../components/satel_integra/__init__.py | 218 ++++--- .../satel_integra/alarm_control_panel.py | 51 +- .../components/satel_integra/binary_sensor.py | 98 +-- .../components/satel_integra/config_flow.py | 496 +++++++++++++++ .../components/satel_integra/const.py | 38 ++ .../components/satel_integra/manifest.json | 6 +- .../components/satel_integra/strings.json | 210 +++++++ .../components/satel_integra/switch.py | 60 +- homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 5 +- requirements_test_all.txt | 3 + tests/components/satel_integra/__init__.py | 1 + tests/components/satel_integra/conftest.py | 49 ++ .../satel_integra/test_config_flow.py | 593 ++++++++++++++++++ 15 files changed, 1655 insertions(+), 176 deletions(-) create mode 100644 homeassistant/components/satel_integra/config_flow.py create mode 100644 homeassistant/components/satel_integra/const.py create mode 100644 homeassistant/components/satel_integra/strings.json create mode 100644 tests/components/satel_integra/__init__.py create mode 100644 tests/components/satel_integra/conftest.py create mode 100644 tests/components/satel_integra/test_config_flow.py diff --git a/CODEOWNERS b/CODEOWNERS index 620388ebc95bfa..b484721b20900c 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1350,6 +1350,8 @@ build.json @home-assistant/supervisor /tests/components/samsungtv/ @chemelli74 @epenet /homeassistant/components/sanix/ @tomaszsluszniak /tests/components/sanix/ @tomaszsluszniak +/homeassistant/components/satel_integra/ @Tommatheussen +/tests/components/satel_integra/ @Tommatheussen /homeassistant/components/scene/ @home-assistant/core /tests/components/scene/ @home-assistant/core /homeassistant/components/schedule/ @home-assistant/core diff --git a/homeassistant/components/satel_integra/__init__.py b/homeassistant/components/satel_integra/__init__.py index 466faf27b12a44..bf387cff96c094 100644 --- a/homeassistant/components/satel_integra/__init__.py +++ b/homeassistant/components/satel_integra/__init__.py @@ -1,59 +1,67 @@ """Support for Satel Integra devices.""" -import collections import logging from satel_integra.satel_integra import AsyncSatel import voluptuous as vol -from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP, Platform -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.discovery import async_load_platform +from homeassistant.config_entries import SOURCE_IMPORT +from homeassistant.const import ( + CONF_CODE, + CONF_HOST, + CONF_NAME, + CONF_PORT, + EVENT_HOMEASSISTANT_STOP, + Platform, +) +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType -DEFAULT_ALARM_NAME = "satel_integra" -DEFAULT_PORT = 7094 -DEFAULT_CONF_ARM_HOME_MODE = 1 -DEFAULT_DEVICE_PARTITION = 1 -DEFAULT_ZONE_TYPE = "motion" +from .const import ( + CONF_ARM_HOME_MODE, + CONF_DEVICE_PARTITIONS, + CONF_OUTPUT_NUMBER, + CONF_OUTPUTS, + CONF_PARTITION_NUMBER, + CONF_SWITCHABLE_OUTPUT_NUMBER, + CONF_SWITCHABLE_OUTPUTS, + CONF_ZONE_NUMBER, + CONF_ZONE_TYPE, + CONF_ZONES, + DEFAULT_CONF_ARM_HOME_MODE, + DEFAULT_PORT, + DEFAULT_ZONE_TYPE, + DOMAIN, + SIGNAL_OUTPUTS_UPDATED, + SIGNAL_PANEL_MESSAGE, + SIGNAL_ZONES_UPDATED, + SUBENTRY_TYPE_OUTPUT, + SUBENTRY_TYPE_PARTITION, + SUBENTRY_TYPE_SWITCHABLE_OUTPUT, + SUBENTRY_TYPE_ZONE, + ZONES, + SatelConfigEntry, +) _LOGGER = logging.getLogger(__name__) -DOMAIN = "satel_integra" - -DATA_SATEL = "satel_integra" - -CONF_DEVICE_CODE = "code" -CONF_DEVICE_PARTITIONS = "partitions" -CONF_ARM_HOME_MODE = "arm_home_mode" -CONF_ZONE_NAME = "name" -CONF_ZONE_TYPE = "type" -CONF_ZONES = "zones" -CONF_OUTPUTS = "outputs" -CONF_SWITCHABLE_OUTPUTS = "switchable_outputs" - -ZONES = "zones" +PLATFORMS = [Platform.ALARM_CONTROL_PANEL, Platform.BINARY_SENSOR, Platform.SWITCH] -SIGNAL_PANEL_MESSAGE = "satel_integra.panel_message" -SIGNAL_PANEL_ARM_AWAY = "satel_integra.panel_arm_away" -SIGNAL_PANEL_ARM_HOME = "satel_integra.panel_arm_home" -SIGNAL_PANEL_DISARM = "satel_integra.panel_disarm" - -SIGNAL_ZONES_UPDATED = "satel_integra.zones_updated" -SIGNAL_OUTPUTS_UPDATED = "satel_integra.outputs_updated" ZONE_SCHEMA = vol.Schema( { - vol.Required(CONF_ZONE_NAME): cv.string, + vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): cv.string, } ) -EDITABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_ZONE_NAME): cv.string}) +EDITABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string}) PARTITION_SCHEMA = vol.Schema( { - vol.Required(CONF_ZONE_NAME): cv.string, + vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_ARM_HOME_MODE, default=DEFAULT_CONF_ARM_HOME_MODE): vol.In( [1, 2, 3] ), @@ -63,7 +71,7 @@ def is_alarm_code_necessary(value): """Check if alarm code must be configured.""" - if value.get(CONF_SWITCHABLE_OUTPUTS) and CONF_DEVICE_CODE not in value: + if value.get(CONF_SWITCHABLE_OUTPUTS) and CONF_CODE not in value: raise vol.Invalid("You need to specify alarm code to use switchable_outputs") return value @@ -75,7 +83,7 @@ def is_alarm_code_necessary(value): { vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - vol.Optional(CONF_DEVICE_CODE): cv.string, + vol.Optional(CONF_CODE): cv.string, vol.Optional(CONF_DEVICE_PARTITIONS, default={}): { vol.Coerce(int): PARTITION_SCHEMA }, @@ -92,64 +100,106 @@ def is_alarm_code_necessary(value): ) -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Satel Integra component.""" - conf = config[DOMAIN] +async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool: + """Set up Satel Integra from YAML.""" + + if config := hass_config.get(DOMAIN): + hass.async_create_task(_async_import(hass, config)) + + return True + - zones = conf.get(CONF_ZONES) - outputs = conf.get(CONF_OUTPUTS) - switchable_outputs = conf.get(CONF_SWITCHABLE_OUTPUTS) - host = conf.get(CONF_HOST) - port = conf.get(CONF_PORT) - partitions = conf.get(CONF_DEVICE_PARTITIONS) +async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: + """Process YAML import.""" - monitored_outputs = collections.OrderedDict( - list(outputs.items()) + list(switchable_outputs.items()) + if not hass.config_entries.async_entries(DOMAIN): + # Start import flow + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + + if result.get("type") == FlowResultType.ABORT: + ir.async_create_issue( + hass, + DOMAIN, + "deprecated_yaml_import_issue_cannot_connect", + breaks_in_ha_version="2026.4.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_yaml_import_issue_cannot_connect", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Satel Integra", + }, + ) + return + + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2026.4.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Satel Integra", + }, ) - controller = AsyncSatel(host, port, hass.loop, zones, monitored_outputs, partitions) - hass.data[DATA_SATEL] = controller +async def async_setup_entry(hass: HomeAssistant, entry: SatelConfigEntry) -> bool: + """Set up Satel Integra from a config entry.""" + + host = entry.data[CONF_HOST] + port = entry.data[CONF_PORT] + + # Make sure we initialize the Satel controller with the configured entries to monitor + partitions = [ + subentry.data[CONF_PARTITION_NUMBER] + for subentry in entry.subentries.values() + if subentry.subentry_type == SUBENTRY_TYPE_PARTITION + ] + + zones = [ + subentry.data[CONF_ZONE_NUMBER] + for subentry in entry.subentries.values() + if subentry.subentry_type == SUBENTRY_TYPE_ZONE + ] + + outputs = [ + subentry.data[CONF_OUTPUT_NUMBER] + for subentry in entry.subentries.values() + if subentry.subentry_type == SUBENTRY_TYPE_OUTPUT + ] + + switchable_outputs = [ + subentry.data[CONF_SWITCHABLE_OUTPUT_NUMBER] + for subentry in entry.subentries.values() + if subentry.subentry_type == SUBENTRY_TYPE_SWITCHABLE_OUTPUT + ] + + monitored_outputs = outputs + switchable_outputs + + controller = AsyncSatel(host, port, hass.loop, zones, monitored_outputs, partitions) result = await controller.connect() if not result: - return False + raise ConfigEntryNotReady("Controller failed to connect") + + entry.runtime_data = controller @callback def _close(*_): controller.close() - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close) - - _LOGGER.debug("Arm home config: %s, mode: %s ", conf, conf.get(CONF_ARM_HOME_MODE)) + entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close)) - hass.async_create_task( - async_load_platform(hass, Platform.ALARM_CONTROL_PANEL, DOMAIN, conf, config) - ) - - hass.async_create_task( - async_load_platform( - hass, - Platform.BINARY_SENSOR, - DOMAIN, - {CONF_ZONES: zones, CONF_OUTPUTS: outputs}, - config, - ) - ) - - hass.async_create_task( - async_load_platform( - hass, - Platform.SWITCH, - DOMAIN, - { - CONF_SWITCHABLE_OUTPUTS: switchable_outputs, - CONF_DEVICE_CODE: conf.get(CONF_DEVICE_CODE), - }, - config, - ) - ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @callback def alarm_status_update_callback(): @@ -179,3 +229,13 @@ def outputs_update_callback(status): ) return True + + +async def async_unload_entry(hass: HomeAssistant, entry: SatelConfigEntry) -> bool: + """Unloading the Satel platforms.""" + + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + controller = entry.runtime_data + controller.close() + + return unload_ok diff --git a/homeassistant/components/satel_integra/alarm_control_panel.py b/homeassistant/components/satel_integra/alarm_control_panel.py index 41b2d0d561bb72..b00741e1971219 100644 --- a/homeassistant/components/satel_integra/alarm_control_panel.py +++ b/homeassistant/components/satel_integra/alarm_control_panel.py @@ -14,46 +14,49 @@ AlarmControlPanelState, CodeFormat, ) +from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from . import ( +from .const import ( CONF_ARM_HOME_MODE, - CONF_DEVICE_PARTITIONS, - CONF_ZONE_NAME, - DATA_SATEL, + CONF_PARTITION_NUMBER, SIGNAL_PANEL_MESSAGE, + SUBENTRY_TYPE_PARTITION, + SatelConfigEntry, ) _LOGGER = logging.getLogger(__name__) -async def async_setup_platform( +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, + config_entry: SatelConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up for Satel Integra alarm panels.""" - if not discovery_info: - return - configured_partitions = discovery_info[CONF_DEVICE_PARTITIONS] - controller = hass.data[DATA_SATEL] + controller = config_entry.runtime_data - devices = [] + partition_subentries = filter( + lambda entry: entry.subentry_type == SUBENTRY_TYPE_PARTITION, + config_entry.subentries.values(), + ) - for partition_num, device_config_data in configured_partitions.items(): - zone_name = device_config_data[CONF_ZONE_NAME] - arm_home_mode = device_config_data.get(CONF_ARM_HOME_MODE) - device = SatelIntegraAlarmPanel( - controller, zone_name, arm_home_mode, partition_num - ) - devices.append(device) + for subentry in partition_subentries: + partition_num = subentry.data[CONF_PARTITION_NUMBER] + zone_name = subentry.data[CONF_NAME] + arm_home_mode = subentry.data[CONF_ARM_HOME_MODE] - async_add_entities(devices) + async_add_entities( + [ + SatelIntegraAlarmPanel( + controller, zone_name, arm_home_mode, partition_num + ) + ], + config_subentry_id=subentry.subentry_id, + ) class SatelIntegraAlarmPanel(AlarmControlPanelEntity): @@ -66,7 +69,7 @@ class SatelIntegraAlarmPanel(AlarmControlPanelEntity): | AlarmControlPanelEntityFeature.ARM_AWAY ) - def __init__(self, controller, name, arm_home_mode, partition_id): + def __init__(self, controller, name, arm_home_mode, partition_id) -> None: """Initialize the alarm panel.""" self._attr_name = name self._attr_unique_id = f"satel_alarm_panel_{partition_id}" diff --git a/homeassistant/components/satel_integra/binary_sensor.py b/homeassistant/components/satel_integra/binary_sensor.py index 8ff549406350ee..7cea005cd5ecdc 100644 --- a/homeassistant/components/satel_integra/binary_sensor.py +++ b/homeassistant/components/satel_integra/binary_sensor.py @@ -6,61 +6,79 @@ BinarySensorDeviceClass, BinarySensorEntity, ) +from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from . import ( - CONF_OUTPUTS, - CONF_ZONE_NAME, +from .const import ( + CONF_OUTPUT_NUMBER, + CONF_ZONE_NUMBER, CONF_ZONE_TYPE, - CONF_ZONES, - DATA_SATEL, SIGNAL_OUTPUTS_UPDATED, SIGNAL_ZONES_UPDATED, + SUBENTRY_TYPE_OUTPUT, + SUBENTRY_TYPE_ZONE, + SatelConfigEntry, ) -async def async_setup_platform( +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, + config_entry: SatelConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the Satel Integra binary sensor devices.""" - if not discovery_info: - return - configured_zones = discovery_info[CONF_ZONES] - controller = hass.data[DATA_SATEL] - - devices = [] - - for zone_num, device_config_data in configured_zones.items(): - zone_type = device_config_data[CONF_ZONE_TYPE] - zone_name = device_config_data[CONF_ZONE_NAME] - device = SatelIntegraBinarySensor( - controller, zone_num, zone_name, zone_type, CONF_ZONES, SIGNAL_ZONES_UPDATED + controller = config_entry.runtime_data + + zone_subentries = filter( + lambda entry: entry.subentry_type == SUBENTRY_TYPE_ZONE, + config_entry.subentries.values(), + ) + + for subentry in zone_subentries: + zone_num = subentry.data[CONF_ZONE_NUMBER] + zone_type = subentry.data[CONF_ZONE_TYPE] + zone_name = subentry.data[CONF_NAME] + + async_add_entities( + [ + SatelIntegraBinarySensor( + controller, + zone_num, + zone_name, + zone_type, + SUBENTRY_TYPE_ZONE, + SIGNAL_ZONES_UPDATED, + ) + ], + config_subentry_id=subentry.subentry_id, ) - devices.append(device) - - configured_outputs = discovery_info[CONF_OUTPUTS] - - for zone_num, device_config_data in configured_outputs.items(): - zone_type = device_config_data[CONF_ZONE_TYPE] - zone_name = device_config_data[CONF_ZONE_NAME] - device = SatelIntegraBinarySensor( - controller, - zone_num, - zone_name, - zone_type, - CONF_OUTPUTS, - SIGNAL_OUTPUTS_UPDATED, - ) - devices.append(device) - async_add_entities(devices) + output_subentries = filter( + lambda entry: entry.subentry_type == SUBENTRY_TYPE_OUTPUT, + config_entry.subentries.values(), + ) + + for subentry in output_subentries: + output_num = subentry.data[CONF_OUTPUT_NUMBER] + ouput_type = subentry.data[CONF_ZONE_TYPE] + output_name = subentry.data[CONF_NAME] + + async_add_entities( + [ + SatelIntegraBinarySensor( + controller, + output_num, + output_name, + ouput_type, + SUBENTRY_TYPE_OUTPUT, + SIGNAL_OUTPUTS_UPDATED, + ) + ], + config_subentry_id=subentry.subentry_id, + ) class SatelIntegraBinarySensor(BinarySensorEntity): diff --git a/homeassistant/components/satel_integra/config_flow.py b/homeassistant/components/satel_integra/config_flow.py new file mode 100644 index 00000000000000..d5427488fc74ad --- /dev/null +++ b/homeassistant/components/satel_integra/config_flow.py @@ -0,0 +1,496 @@ +"""Config flow for Satel Integra.""" + +from __future__ import annotations + +import logging +from typing import Any + +from satel_integra.satel_integra import AsyncSatel +import voluptuous as vol + +from homeassistant.components.binary_sensor import BinarySensorDeviceClass +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + ConfigSubentryData, + ConfigSubentryFlow, + OptionsFlowWithReload, + SubentryFlowResult, +) +from homeassistant.const import CONF_CODE, CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.core import callback +from homeassistant.helpers import config_validation as cv, selector + +from .const import ( + CONF_ARM_HOME_MODE, + CONF_DEVICE_PARTITIONS, + CONF_OUTPUT_NUMBER, + CONF_OUTPUTS, + CONF_PARTITION_NUMBER, + CONF_SWITCHABLE_OUTPUT_NUMBER, + CONF_SWITCHABLE_OUTPUTS, + CONF_ZONE_NUMBER, + CONF_ZONE_TYPE, + CONF_ZONES, + DEFAULT_CONF_ARM_HOME_MODE, + DEFAULT_PORT, + DOMAIN, + SUBENTRY_TYPE_OUTPUT, + SUBENTRY_TYPE_PARTITION, + SUBENTRY_TYPE_SWITCHABLE_OUTPUT, + SUBENTRY_TYPE_ZONE, + SatelConfigEntry, +) + +_LOGGER = logging.getLogger(__package__) + +CONNECTION_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port, + vol.Optional(CONF_CODE): cv.string, + } +) + +CODE_SCHEMA = vol.Schema( + { + vol.Optional(CONF_CODE): cv.string, + } +) + +PARTITION_SCHEMA = vol.Schema( + { + vol.Required(CONF_NAME): cv.string, + vol.Required(CONF_ARM_HOME_MODE, default=DEFAULT_CONF_ARM_HOME_MODE): vol.In( + [1, 2, 3] + ), + } +) + +ZONE_AND_OUTPUT_SCHEMA = vol.Schema( + { + vol.Required(CONF_NAME): cv.string, + vol.Required( + CONF_ZONE_TYPE, default=BinarySensorDeviceClass.MOTION + ): selector.SelectSelector( + selector.SelectSelectorConfig( + options=[cls.value for cls in BinarySensorDeviceClass], + mode=selector.SelectSelectorMode.DROPDOWN, + translation_key="binary_sensor_device_class", + sort=True, + ), + ), + } +) + +SWITCHABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string}) + + +class SatelConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a Satel Integra config flow.""" + + VERSION = 1 + + @staticmethod + @callback + def async_get_options_flow( + config_entry: SatelConfigEntry, + ) -> SatelOptionsFlow: + """Create the options flow.""" + return SatelOptionsFlow() + + @classmethod + @callback + def async_get_supported_subentry_types( + cls, config_entry: ConfigEntry + ) -> dict[str, type[ConfigSubentryFlow]]: + """Return subentries supported by this integration.""" + return { + SUBENTRY_TYPE_PARTITION: PartitionSubentryFlowHandler, + SUBENTRY_TYPE_ZONE: ZoneSubentryFlowHandler, + SUBENTRY_TYPE_OUTPUT: OutputSubentryFlowHandler, + SUBENTRY_TYPE_SWITCHABLE_OUTPUT: SwitchableOutputSubentryFlowHandler, + } + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + errors: dict[str, str] = {} + + if user_input is not None: + valid = await self.test_connection( + user_input[CONF_HOST], user_input[CONF_PORT] + ) + + if valid: + return self.async_create_entry( + title=user_input[CONF_HOST], + data={ + CONF_HOST: user_input[CONF_HOST], + CONF_PORT: user_input[CONF_PORT], + }, + options={CONF_CODE: user_input.get(CONF_CODE)}, + ) + + errors["base"] = "cannot_connect" + + return self.async_show_form( + step_id="user", data_schema=CONNECTION_SCHEMA, errors=errors + ) + + async def async_step_import( + self, import_config: dict[str, Any] + ) -> ConfigFlowResult: + """Handle a flow initialized by import.""" + + valid = await self.test_connection( + import_config[CONF_HOST], import_config.get(CONF_PORT, DEFAULT_PORT) + ) + + if valid: + subentries: list[ConfigSubentryData] = [] + + for partition_number, partition_data in import_config.get( + CONF_DEVICE_PARTITIONS, {} + ).items(): + subentries.append( + { + "subentry_type": SUBENTRY_TYPE_PARTITION, + "title": partition_data[CONF_NAME], + "unique_id": f"{SUBENTRY_TYPE_PARTITION}_{partition_number}", + "data": { + CONF_NAME: partition_data[CONF_NAME], + CONF_ARM_HOME_MODE: partition_data.get( + CONF_ARM_HOME_MODE, DEFAULT_CONF_ARM_HOME_MODE + ), + CONF_PARTITION_NUMBER: partition_number, + }, + } + ) + + for zone_number, zone_data in import_config.get(CONF_ZONES, {}).items(): + subentries.append( + { + "subentry_type": SUBENTRY_TYPE_ZONE, + "title": zone_data[CONF_NAME], + "unique_id": f"{SUBENTRY_TYPE_ZONE}_{zone_number}", + "data": { + CONF_NAME: zone_data[CONF_NAME], + CONF_ZONE_NUMBER: zone_number, + CONF_ZONE_TYPE: zone_data.get( + CONF_ZONE_TYPE, BinarySensorDeviceClass.MOTION + ), + }, + } + ) + + for output_number, output_data in import_config.get( + CONF_OUTPUTS, {} + ).items(): + subentries.append( + { + "subentry_type": SUBENTRY_TYPE_OUTPUT, + "title": output_data[CONF_NAME], + "unique_id": f"{SUBENTRY_TYPE_OUTPUT}_{output_number}", + "data": { + CONF_NAME: output_data[CONF_NAME], + CONF_OUTPUT_NUMBER: output_number, + CONF_ZONE_TYPE: output_data.get( + CONF_ZONE_TYPE, BinarySensorDeviceClass.MOTION + ), + }, + } + ) + + for switchable_output_number, switchable_output_data in import_config.get( + CONF_SWITCHABLE_OUTPUTS, {} + ).items(): + subentries.append( + { + "subentry_type": SUBENTRY_TYPE_SWITCHABLE_OUTPUT, + "title": switchable_output_data[CONF_NAME], + "unique_id": f"{SUBENTRY_TYPE_SWITCHABLE_OUTPUT}_{switchable_output_number}", + "data": { + CONF_NAME: switchable_output_data[CONF_NAME], + CONF_SWITCHABLE_OUTPUT_NUMBER: switchable_output_number, + }, + } + ) + + return self.async_create_entry( + title=import_config[CONF_HOST], + data={ + CONF_HOST: import_config[CONF_HOST], + CONF_PORT: import_config.get(CONF_PORT, DEFAULT_PORT), + }, + options={CONF_CODE: import_config.get(CONF_CODE)}, + subentries=subentries, + ) + + return self.async_abort(reason="cannot_connect") + + async def test_connection(self, host: str, port: int) -> bool: + """Test a connection to the Satel alarm.""" + controller = AsyncSatel(host, port, self.hass.loop) + + result = await controller.connect() + + # Make sure we close the connection again + controller.close() + + return result + + +class SatelOptionsFlow(OptionsFlowWithReload): + """Handle Satel options flow.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Init step.""" + if user_input is not None: + return self.async_create_entry(data={CONF_CODE: user_input.get(CONF_CODE)}) + + return self.async_show_form( + step_id="init", + data_schema=self.add_suggested_values_to_schema( + CODE_SCHEMA, self.config_entry.options + ), + ) + + +class PartitionSubentryFlowHandler(ConfigSubentryFlow): + """Handle subentry flow for adding and modifying a partition.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """User flow to add new partition.""" + errors: dict[str, str] = {} + + if user_input is not None: + unique_id = f"{SUBENTRY_TYPE_PARTITION}_{user_input[CONF_PARTITION_NUMBER]}" + + for existing_subentry in self._get_entry().subentries.values(): + if existing_subentry.unique_id == unique_id: + errors[CONF_PARTITION_NUMBER] = "already_configured" + + if not errors: + return self.async_create_entry( + title=user_input[CONF_NAME], data=user_input, unique_id=unique_id + ) + + return self.async_show_form( + step_id="user", + errors=errors, + data_schema=vol.Schema( + { + vol.Required(CONF_PARTITION_NUMBER): vol.All( + vol.Coerce(int), vol.Range(min=1) + ), + } + ).extend(PARTITION_SCHEMA.schema), + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Reconfigure existing partition.""" + subconfig_entry = self._get_reconfigure_subentry() + + if user_input is not None: + return self.async_update_and_abort( + self._get_entry(), + subconfig_entry, + title=user_input[CONF_NAME], + data_updates=user_input, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema( + PARTITION_SCHEMA, + subconfig_entry.data, + ), + description_placeholders={ + CONF_PARTITION_NUMBER: subconfig_entry.data[CONF_PARTITION_NUMBER] + }, + ) + + +class ZoneSubentryFlowHandler(ConfigSubentryFlow): + """Handle subentry flow for adding and modifying a zone.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """User flow to add new zone.""" + errors: dict[str, str] = {} + + if user_input is not None: + unique_id = f"{SUBENTRY_TYPE_ZONE}_{user_input[CONF_ZONE_NUMBER]}" + + for existing_subentry in self._get_entry().subentries.values(): + if existing_subentry.unique_id == unique_id: + errors[CONF_ZONE_NUMBER] = "already_configured" + + if not errors: + return self.async_create_entry( + title=user_input[CONF_NAME], data=user_input, unique_id=unique_id + ) + + return self.async_show_form( + step_id="user", + errors=errors, + data_schema=vol.Schema( + { + vol.Required(CONF_ZONE_NUMBER): vol.All( + vol.Coerce(int), vol.Range(min=1) + ), + } + ).extend(ZONE_AND_OUTPUT_SCHEMA.schema), + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Reconfigure existing zone.""" + subconfig_entry = self._get_reconfigure_subentry() + + if user_input is not None: + return self.async_update_and_abort( + self._get_entry(), + subconfig_entry, + title=user_input[CONF_NAME], + data_updates=user_input, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema( + ZONE_AND_OUTPUT_SCHEMA, subconfig_entry.data + ), + description_placeholders={ + CONF_ZONE_NUMBER: subconfig_entry.data[CONF_ZONE_NUMBER] + }, + ) + + +class OutputSubentryFlowHandler(ConfigSubentryFlow): + """Handle subentry flow for adding and modifying a output.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """User flow to add new output.""" + errors: dict[str, str] = {} + + if user_input is not None: + unique_id = f"{SUBENTRY_TYPE_OUTPUT}_{user_input[CONF_OUTPUT_NUMBER]}" + + for existing_subentry in self._get_entry().subentries.values(): + if existing_subentry.unique_id == unique_id: + errors[CONF_OUTPUT_NUMBER] = "already_configured" + + if not errors: + return self.async_create_entry( + title=user_input[CONF_NAME], data=user_input, unique_id=unique_id + ) + + return self.async_show_form( + step_id="user", + errors=errors, + data_schema=vol.Schema( + { + vol.Required(CONF_OUTPUT_NUMBER): vol.All( + vol.Coerce(int), vol.Range(min=1) + ), + } + ).extend(ZONE_AND_OUTPUT_SCHEMA.schema), + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Reconfigure existing output.""" + subconfig_entry = self._get_reconfigure_subentry() + + if user_input is not None: + return self.async_update_and_abort( + self._get_entry(), + subconfig_entry, + title=user_input[CONF_NAME], + data_updates=user_input, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema( + ZONE_AND_OUTPUT_SCHEMA, subconfig_entry.data + ), + description_placeholders={ + CONF_OUTPUT_NUMBER: subconfig_entry.data[CONF_OUTPUT_NUMBER] + }, + ) + + +class SwitchableOutputSubentryFlowHandler(ConfigSubentryFlow): + """Handle subentry flow for adding and modifying a switchable output.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """User flow to add new switchable output.""" + errors: dict[str, str] = {} + + if user_input is not None: + unique_id = f"{SUBENTRY_TYPE_SWITCHABLE_OUTPUT}_{user_input[CONF_SWITCHABLE_OUTPUT_NUMBER]}" + + for existing_subentry in self._get_entry().subentries.values(): + if existing_subentry.unique_id == unique_id: + errors[CONF_SWITCHABLE_OUTPUT_NUMBER] = "already_configured" + + if not errors: + return self.async_create_entry( + title=user_input[CONF_NAME], data=user_input, unique_id=unique_id + ) + + return self.async_show_form( + step_id="user", + errors=errors, + data_schema=vol.Schema( + { + vol.Required(CONF_SWITCHABLE_OUTPUT_NUMBER): vol.All( + vol.Coerce(int), vol.Range(min=1) + ), + } + ).extend(SWITCHABLE_OUTPUT_SCHEMA.schema), + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Reconfigure existing switchable output.""" + subconfig_entry = self._get_reconfigure_subentry() + + if user_input is not None: + return self.async_update_and_abort( + self._get_entry(), + subconfig_entry, + title=user_input[CONF_NAME], + data_updates=user_input, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema( + SWITCHABLE_OUTPUT_SCHEMA, subconfig_entry.data + ), + description_placeholders={ + CONF_SWITCHABLE_OUTPUT_NUMBER: subconfig_entry.data[ + CONF_SWITCHABLE_OUTPUT_NUMBER + ] + }, + ) diff --git a/homeassistant/components/satel_integra/const.py b/homeassistant/components/satel_integra/const.py new file mode 100644 index 00000000000000..822fbe7594b23d --- /dev/null +++ b/homeassistant/components/satel_integra/const.py @@ -0,0 +1,38 @@ +"""Constants for the Satel Integra integration.""" + +from satel_integra.satel_integra import AsyncSatel + +from homeassistant.config_entries import ConfigEntry + +DEFAULT_CONF_ARM_HOME_MODE = 1 +DEFAULT_PORT = 7094 +DEFAULT_ZONE_TYPE = "motion" + +DOMAIN = "satel_integra" + +SUBENTRY_TYPE_PARTITION = "partition" +SUBENTRY_TYPE_ZONE = "zone" +SUBENTRY_TYPE_OUTPUT = "output" +SUBENTRY_TYPE_SWITCHABLE_OUTPUT = "switchable_output" + +CONF_PARTITION_NUMBER = "partition_number" +CONF_ZONE_NUMBER = "zone_number" +CONF_OUTPUT_NUMBER = "output_number" +CONF_SWITCHABLE_OUTPUT_NUMBER = "switchable_output_number" + +CONF_DEVICE_PARTITIONS = "partitions" +CONF_ARM_HOME_MODE = "arm_home_mode" +CONF_ZONE_TYPE = "type" +CONF_ZONES = "zones" +CONF_OUTPUTS = "outputs" +CONF_SWITCHABLE_OUTPUTS = "switchable_outputs" + +ZONES = "zones" + + +SIGNAL_PANEL_MESSAGE = "satel_integra.panel_message" + +SIGNAL_ZONES_UPDATED = "satel_integra.zones_updated" +SIGNAL_OUTPUTS_UPDATED = "satel_integra.outputs_updated" + +type SatelConfigEntry = ConfigEntry[AsyncSatel] diff --git a/homeassistant/components/satel_integra/manifest.json b/homeassistant/components/satel_integra/manifest.json index a90ea1db5a59ab..71691b67981c92 100644 --- a/homeassistant/components/satel_integra/manifest.json +++ b/homeassistant/components/satel_integra/manifest.json @@ -1,10 +1,12 @@ { "domain": "satel_integra", "name": "Satel Integra", - "codeowners": [], + "codeowners": ["@Tommatheussen"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/satel_integra", "iot_class": "local_push", "loggers": ["satel_integra"], "quality_scale": "legacy", - "requirements": ["satel-integra==0.3.7"] + "requirements": ["satel-integra==0.3.7"], + "single_config_entry": true } diff --git a/homeassistant/components/satel_integra/strings.json b/homeassistant/components/satel_integra/strings.json new file mode 100644 index 00000000000000..1d6655145b5624 --- /dev/null +++ b/homeassistant/components/satel_integra/strings.json @@ -0,0 +1,210 @@ +{ + "common": { + "code_input_description": "Code to toggle switchable outputs", + "code": "Access code" + }, + "config": { + "step": { + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]", + "code": "[%key:component::satel_integra::common::code%]" + }, + "data_description": { + "host": "The IP address of the alarm panel", + "port": "The port of the alarm panel", + "code": "[%key:component::satel_integra::common::code_input_description%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + } + }, + "config_subentries": { + "partition": { + "initiate_flow": { + "user": "Add partition" + }, + "step": { + "user": { + "title": "Configure partition", + "data": { + "partition_number": "Partition number", + "name": "[%key:common::config_flow::data::name%]", + "arm_home_mode": "Arm home mode" + }, + "data_description": { + "partition_number": "Enter partition number to configure", + "name": "The name to give to the alarm panel", + "arm_home_mode": "The mode in which the partition is armed when 'arm home' is used. For more information on what the differences are between them, please refer to Satel Integra manual." + } + }, + "reconfigure": { + "title": "Reconfigure partition {partition_number}", + "data": { + "name": "[%key:common::config_flow::data::name%]", + "arm_home_mode": "[%key:component::satel_integra::config_subentries::partition::step::user::data::arm_home_mode%]" + }, + "data_description": { + "arm_home_mode": "[%key:component::satel_integra::config_subentries::partition::step::user::data_description::arm_home_mode%]" + } + } + }, + "error": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "zone": { + "initiate_flow": { + "user": "Add zone" + }, + "step": { + "user": { + "title": "Configure zone", + "data": { + "zone_number": "Zone number", + "name": "[%key:common::config_flow::data::name%]", + "type": "Zone type" + }, + "data_description": { + "zone_number": "Enter zone number to configure", + "name": "The name to give to the sensor", + "type": "Choose the device class you would like the sensor to show as" + } + }, + "reconfigure": { + "title": "Reconfigure zone {zone_number}", + "data": { + "name": "[%key:common::config_flow::data::name%]", + "type": "[%key:component::satel_integra::config_subentries::zone::step::user::data::type%]" + }, + "data_description": { + "name": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::name%]", + "type": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::type%]" + } + } + }, + "error": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "output": { + "initiate_flow": { + "user": "Add output" + }, + "step": { + "user": { + "title": "Configure output", + "data": { + "output_number": "Output number", + "name": "[%key:common::config_flow::data::name%]", + "type": "Output type" + }, + "data_description": { + "output_number": "Enter output number to configure", + "name": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::name%]", + "type": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::type%]" + } + }, + "reconfigure": { + "title": "Reconfigure output {output_number}", + "data": { + "name": "[%key:common::config_flow::data::name%]", + "type": "[%key:component::satel_integra::config_subentries::output::step::user::data::type%]" + }, + "data_description": { + "name": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::name%]", + "type": "[%key:component::satel_integra::config_subentries::zone::step::user::data_description::type%]" + } + } + }, + "error": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "switchable_output": { + "initiate_flow": { + "user": "Add switchable output" + }, + "step": { + "user": { + "title": "Configure switchable output", + "data": { + "switchable_output_number": "Switchable output number", + "name": "[%key:common::config_flow::data::name%]" + }, + "data_description": { + "switchable_output_number": "Enter switchable output number to configure", + "name": "The name to give to the switch" + } + }, + "reconfigure": { + "title": "Reconfigure switchable output {switchable_output_number}", + "data": { + "name": "[%key:common::config_flow::data::name%]" + }, + "data_description": { + "name": "[%key:component::satel_integra::config_subentries::switchable_output::step::user::data_description::name%]" + } + } + }, + "error": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } + }, + "options": { + "step": { + "init": { + "data": { + "code": "[%key:component::satel_integra::common::code%]" + }, + "data_description": { + "code": "[%key:component::satel_integra::common::code_input_description%]" + } + } + } + }, + "issues": { + "deprecated_yaml_import_issue_cannot_connect": { + "title": "YAML import failed due to a connection error", + "description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your existing configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the `{domain}` YAML configuration from your configuration.yaml file and add the {integration_title} integration manually." + } + }, + "selector": { + "binary_sensor_device_class": { + "options": { + "battery": "[%key:component::binary_sensor::entity_component::battery::name%]", + "battery_charging": "[%key:component::binary_sensor::entity_component::battery_charging::name%]", + "carbon_monoxide": "[%key:component::binary_sensor::entity_component::carbon_monoxide::name%]", + "cold": "[%key:component::binary_sensor::entity_component::cold::name%]", + "connectivity": "[%key:component::binary_sensor::entity_component::connectivity::name%]", + "door": "[%key:component::binary_sensor::entity_component::door::name%]", + "garage_door": "[%key:component::binary_sensor::entity_component::garage_door::name%]", + "gas": "[%key:component::binary_sensor::entity_component::gas::name%]", + "heat": "[%key:component::binary_sensor::entity_component::heat::name%]", + "light": "[%key:component::binary_sensor::entity_component::light::name%]", + "lock": "[%key:component::binary_sensor::entity_component::lock::name%]", + "moisture": "[%key:component::binary_sensor::entity_component::moisture::name%]", + "motion": "[%key:component::binary_sensor::entity_component::motion::name%]", + "moving": "[%key:component::binary_sensor::entity_component::moving::name%]", + "occupancy": "[%key:component::binary_sensor::entity_component::occupancy::name%]", + "opening": "[%key:component::binary_sensor::entity_component::opening::name%]", + "plug": "[%key:component::binary_sensor::entity_component::plug::name%]", + "power": "[%key:component::binary_sensor::entity_component::power::name%]", + "presence": "[%key:component::binary_sensor::entity_component::presence::name%]", + "problem": "[%key:component::binary_sensor::entity_component::problem::name%]", + "running": "[%key:component::binary_sensor::entity_component::running::name%]", + "safety": "[%key:component::binary_sensor::entity_component::safety::name%]", + "smoke": "[%key:component::binary_sensor::entity_component::smoke::name%]", + "sound": "[%key:component::binary_sensor::entity_component::sound::name%]", + "tamper": "[%key:component::binary_sensor::entity_component::tamper::name%]", + "update": "[%key:component::binary_sensor::entity_component::update::name%]", + "vibration": "[%key:component::binary_sensor::entity_component::vibration::name%]", + "window": "[%key:component::binary_sensor::entity_component::window::name%]" + } + } + } +} diff --git a/homeassistant/components/satel_integra/switch.py b/homeassistant/components/satel_integra/switch.py index 9135b58bc50e25..85139069ce649e 100644 --- a/homeassistant/components/satel_integra/switch.py +++ b/homeassistant/components/satel_integra/switch.py @@ -6,48 +6,50 @@ from typing import Any from homeassistant.components.switch import SwitchEntity +from homeassistant.const import CONF_CODE, CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from . import ( - CONF_DEVICE_CODE, - CONF_SWITCHABLE_OUTPUTS, - CONF_ZONE_NAME, - DATA_SATEL, +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import ( + CONF_SWITCHABLE_OUTPUT_NUMBER, SIGNAL_OUTPUTS_UPDATED, + SUBENTRY_TYPE_SWITCHABLE_OUTPUT, + SatelConfigEntry, ) _LOGGER = logging.getLogger(__name__) -DEPENDENCIES = ["satel_integra"] - -async def async_setup_platform( +async def async_setup_entry( hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, + config_entry: SatelConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the Satel Integra switch devices.""" - if not discovery_info: - return - - configured_zones = discovery_info[CONF_SWITCHABLE_OUTPUTS] - controller = hass.data[DATA_SATEL] - devices = [] - - for zone_num, device_config_data in configured_zones.items(): - zone_name = device_config_data[CONF_ZONE_NAME] - - device = SatelIntegraSwitch( - controller, zone_num, zone_name, discovery_info[CONF_DEVICE_CODE] + controller = config_entry.runtime_data + + switchable_output_subentries = filter( + lambda entry: entry.subentry_type == SUBENTRY_TYPE_SWITCHABLE_OUTPUT, + config_entry.subentries.values(), + ) + + for subentry in switchable_output_subentries: + switchable_output_num = subentry.data[CONF_SWITCHABLE_OUTPUT_NUMBER] + switchable_output_name = subentry.data[CONF_NAME] + + async_add_entities( + [ + SatelIntegraSwitch( + controller, + switchable_output_num, + switchable_output_name, + config_entry.options.get(CONF_CODE), + ), + ], + config_subentry_id=subentry.subentry_id, ) - devices.append(device) - - async_add_entities(devices) class SatelIntegraSwitch(SwitchEntity): diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index e82915e03a13dc..e99cd50afa9c2b 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -559,6 +559,7 @@ "sabnzbd", "samsungtv", "sanix", + "satel_integra", "schlage", "scrape", "screenlogic", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 9831102742341b..6e95c970404f4c 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5728,8 +5728,9 @@ "satel_integra": { "name": "Satel Integra", "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push" + "config_flow": true, + "iot_class": "local_push", + "single_config_entry": true }, "schlage": { "name": "Schlage", diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 759cf0f794b1a6..1aeb9f2991a639 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2272,6 +2272,9 @@ samsungtvws[async,encrypted]==2.7.2 # homeassistant.components.sanix sanix==1.0.6 +# homeassistant.components.satel_integra +satel-integra==0.3.7 + # homeassistant.components.screenlogic screenlogicpy==0.10.2 diff --git a/tests/components/satel_integra/__init__.py b/tests/components/satel_integra/__init__.py new file mode 100644 index 00000000000000..561eec238afbba --- /dev/null +++ b/tests/components/satel_integra/__init__.py @@ -0,0 +1 @@ +"""The tests for Satel Integra integration.""" diff --git a/tests/components/satel_integra/conftest.py b/tests/components/satel_integra/conftest.py new file mode 100644 index 00000000000000..e91a79b96b501d --- /dev/null +++ b/tests/components/satel_integra/conftest.py @@ -0,0 +1,49 @@ +"""Satel Integra tests configuration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.satel_integra.const import DEFAULT_PORT, DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override integration setup.""" + with patch( + "homeassistant.components.satel_integra.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_satel() -> Generator[AsyncMock]: + """Override the satel test.""" + with ( + patch( + "homeassistant.components.satel_integra.AsyncSatel", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.satel_integra.config_flow.AsyncSatel", + new=mock_client, + ), + ): + client = mock_client.return_value + + yield client + + +@pytest.fixture(name="config_entry") +def mock_config_entry() -> MockConfigEntry: + """Mock satel configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="192.168.0.2", + data={CONF_HOST: "192.168.0.2", CONF_PORT: DEFAULT_PORT}, + ) diff --git a/tests/components/satel_integra/test_config_flow.py b/tests/components/satel_integra/test_config_flow.py new file mode 100644 index 00000000000000..db493a3dade4e6 --- /dev/null +++ b/tests/components/satel_integra/test_config_flow.py @@ -0,0 +1,593 @@ +"""Test the satel integra config flow.""" + +from typing import Any +from unittest.mock import AsyncMock + +import pytest + +from homeassistant.components.binary_sensor import BinarySensorDeviceClass +from homeassistant.components.satel_integra.const import ( + CONF_ARM_HOME_MODE, + CONF_DEVICE_PARTITIONS, + CONF_OUTPUT_NUMBER, + CONF_OUTPUTS, + CONF_PARTITION_NUMBER, + CONF_SWITCHABLE_OUTPUT_NUMBER, + CONF_SWITCHABLE_OUTPUTS, + CONF_ZONE_NUMBER, + CONF_ZONE_TYPE, + CONF_ZONES, + DEFAULT_PORT, + DOMAIN, + SUBENTRY_TYPE_OUTPUT, + SUBENTRY_TYPE_PARTITION, + SUBENTRY_TYPE_SWITCHABLE_OUTPUT, + SUBENTRY_TYPE_ZONE, +) +from homeassistant.config_entries import ( + SOURCE_IMPORT, + SOURCE_RECONFIGURE, + SOURCE_USER, + ConfigSubentry, + ConfigSubentryData, +) +from homeassistant.const import CONF_CODE, CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +CONST_HOST = "192.168.0.2" +CONST_PORT = 7095 +CONST_CODE = "1234" + + +@pytest.mark.parametrize( + ("user_input", "entry_data", "entry_options"), + [ + ( + {CONF_HOST: CONST_HOST, CONF_PORT: CONST_PORT, CONF_CODE: CONST_CODE}, + {CONF_HOST: CONST_HOST, CONF_PORT: CONST_PORT}, + {CONF_CODE: CONST_CODE}, + ), + ( + { + CONF_HOST: CONST_HOST, + }, + {CONF_HOST: CONST_HOST, CONF_PORT: DEFAULT_PORT}, + {CONF_CODE: None}, + ), + ], +) +async def test_setup_flow( + hass: HomeAssistant, + mock_satel: AsyncMock, + mock_setup_entry: AsyncMock, + user_input: dict[str, Any], + entry_data: dict[str, Any], + entry_options: dict[str, Any], +) -> None: + """Test the setup flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == CONST_HOST + assert result["data"] == entry_data + assert result["options"] == entry_options + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_setup_connection_failed( + hass: HomeAssistant, mock_satel: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the setup flow when connection fails.""" + user_input = {CONF_HOST: CONST_HOST, CONF_PORT: CONST_PORT, CONF_CODE: CONST_CODE} + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + mock_satel.connect.return_value = False + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_satel.connect.return_value = True + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("import_input", "entry_data", "entry_options"), + [ + ( + { + CONF_HOST: CONST_HOST, + CONF_PORT: CONST_PORT, + CONF_CODE: CONST_CODE, + CONF_DEVICE_PARTITIONS: { + "1": {CONF_NAME: "Partition Import 1", CONF_ARM_HOME_MODE: 1} + }, + CONF_ZONES: { + "1": {CONF_NAME: "Zone Import 1", CONF_ZONE_TYPE: "motion"}, + "2": {CONF_NAME: "Zone Import 2", CONF_ZONE_TYPE: "door"}, + }, + CONF_OUTPUTS: { + "1": {CONF_NAME: "Output Import 1", CONF_ZONE_TYPE: "light"}, + "2": {CONF_NAME: "Output Import 2", CONF_ZONE_TYPE: "safety"}, + }, + CONF_SWITCHABLE_OUTPUTS: { + "1": {CONF_NAME: "Switchable output Import 1"}, + "2": {CONF_NAME: "Switchable output Import 2"}, + }, + }, + {CONF_HOST: CONST_HOST, CONF_PORT: CONST_PORT}, + {CONF_CODE: CONST_CODE}, + ) + ], +) +async def test_import_flow( + hass: HomeAssistant, + mock_satel: AsyncMock, + mock_setup_entry: AsyncMock, + import_input: dict[str, Any], + entry_data: dict[str, Any], + entry_options: dict[str, Any], +) -> None: + """Test the import flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=import_input + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == CONST_HOST + assert result["data"] == entry_data + assert result["options"] == entry_options + + assert len(result["subentries"]) == 7 + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import_flow_connection_failure( + hass: HomeAssistant, mock_satel: AsyncMock +) -> None: + """Test the import flow.""" + + mock_satel.connect.return_value = False + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: CONST_HOST, CONF_PORT: CONST_PORT, CONF_CODE: CONST_CODE}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +@pytest.mark.parametrize( + ("user_input", "entry_options"), + [ + ( + {CONF_CODE: CONST_CODE}, + {CONF_CODE: CONST_CODE}, + ), + ({}, {CONF_CODE: None}), + ], +) +async def test_options_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + user_input: dict[str, Any], + entry_options: dict[str, Any], +) -> None: + """Test general options flow.""" + + entry = MockConfigEntry(domain=DOMAIN) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert entry.options == entry_options + + +@pytest.mark.parametrize( + ("subentry_type", "user_input", "subentry"), + [ + ( + SUBENTRY_TYPE_PARTITION, + {CONF_NAME: "Home", CONF_PARTITION_NUMBER: 1, CONF_ARM_HOME_MODE: 1}, + { + "data": { + CONF_NAME: "Home", + CONF_ARM_HOME_MODE: 1, + CONF_PARTITION_NUMBER: 1, + }, + "subentry_type": SUBENTRY_TYPE_PARTITION, + "title": "Home", + "unique_id": "partition_1", + }, + ), + ( + SUBENTRY_TYPE_ZONE, + { + CONF_NAME: "Backdoor", + CONF_ZONE_TYPE: BinarySensorDeviceClass.DOOR, + CONF_ZONE_NUMBER: 2, + }, + { + "data": { + CONF_NAME: "Backdoor", + CONF_ZONE_TYPE: BinarySensorDeviceClass.DOOR, + CONF_ZONE_NUMBER: 2, + }, + "subentry_type": SUBENTRY_TYPE_ZONE, + "title": "Backdoor", + "unique_id": "zone_2", + }, + ), + ( + SUBENTRY_TYPE_OUTPUT, + { + CONF_NAME: "Power outage", + CONF_ZONE_TYPE: BinarySensorDeviceClass.SAFETY, + CONF_OUTPUT_NUMBER: 1, + }, + { + "data": { + CONF_NAME: "Power outage", + CONF_ZONE_TYPE: BinarySensorDeviceClass.SAFETY, + CONF_OUTPUT_NUMBER: 1, + }, + "subentry_type": SUBENTRY_TYPE_OUTPUT, + "title": "Power outage", + "unique_id": "output_1", + }, + ), + ( + SUBENTRY_TYPE_SWITCHABLE_OUTPUT, + { + CONF_NAME: "Gate", + CONF_SWITCHABLE_OUTPUT_NUMBER: 3, + }, + { + "data": { + CONF_NAME: "Gate", + CONF_SWITCHABLE_OUTPUT_NUMBER: 3, + }, + "subentry_type": SUBENTRY_TYPE_SWITCHABLE_OUTPUT, + "title": "Gate", + "unique_id": "switchable_output_3", + }, + ), + ], +) +async def test_subentry_creation( + hass: HomeAssistant, + mock_satel: AsyncMock, + config_entry: MockConfigEntry, + subentry_type: str, + user_input: dict[str, Any], + subentry: dict[str, Any], +) -> None: + """Test partitions options flow.""" + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.subentries.async_init( + (config_entry.entry_id, subentry_type), + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input, + ) + + assert len(config_entry.subentries) == 1 + + subentry_id = list(config_entry.subentries)[0] + + subentry["subentry_id"] = subentry_id + assert config_entry.subentries == {subentry_id: ConfigSubentry(**subentry)} + + +@pytest.mark.parametrize( + ( + "user_input", + "default_subentry_info", + "subentry", + "updated_subentry", + ), + [ + ( + {CONF_NAME: "New Home", CONF_ARM_HOME_MODE: 3}, + { + "subentry_id": "ABCD", + "subentry_type": SUBENTRY_TYPE_PARTITION, + "unique_id": "partition_1", + }, + ConfigSubentryData( + data={ + CONF_NAME: "Home", + CONF_ARM_HOME_MODE: 1, + CONF_PARTITION_NUMBER: 1, + }, + title="Home", + ), + ConfigSubentryData( + data={ + CONF_NAME: "New Home", + CONF_ARM_HOME_MODE: 3, + CONF_PARTITION_NUMBER: 1, + }, + title="New Home", + ), + ), + ( + {CONF_NAME: "Backdoor", CONF_ZONE_TYPE: BinarySensorDeviceClass.DOOR}, + { + "subentry_id": "ABCD", + "subentry_type": SUBENTRY_TYPE_ZONE, + "unique_id": "zone_1", + }, + ConfigSubentryData( + data={ + CONF_NAME: "Zone 1", + CONF_ZONE_TYPE: BinarySensorDeviceClass.MOTION, + CONF_ZONE_NUMBER: 1, + }, + title="Zone 1", + ), + ConfigSubentryData( + data={ + CONF_NAME: "Backdoor", + CONF_ZONE_TYPE: BinarySensorDeviceClass.DOOR, + CONF_ZONE_NUMBER: 1, + }, + title="Backdoor", + ), + ), + ( + { + CONF_NAME: "Alarm Triggered", + CONF_ZONE_TYPE: BinarySensorDeviceClass.PROBLEM, + }, + { + "subentry_id": "ABCD", + "subentry_type": SUBENTRY_TYPE_OUTPUT, + "unique_id": "output_1", + }, + ConfigSubentryData( + data={ + CONF_NAME: "Output 1", + CONF_ZONE_TYPE: BinarySensorDeviceClass.SAFETY, + CONF_OUTPUT_NUMBER: 1, + }, + title="Output 1", + ), + ConfigSubentryData( + data={ + CONF_NAME: "Alarm Triggered", + CONF_ZONE_TYPE: BinarySensorDeviceClass.PROBLEM, + CONF_OUTPUT_NUMBER: 1, + }, + title="Alarm Triggered", + ), + ), + ( + {CONF_NAME: "Gate Lock"}, + { + "subentry_id": "ABCD", + "subentry_type": SUBENTRY_TYPE_SWITCHABLE_OUTPUT, + "unique_id": "switchable_output_1", + }, + ConfigSubentryData( + data={ + CONF_NAME: "Switchable Output 1", + CONF_SWITCHABLE_OUTPUT_NUMBER: 1, + }, + title="Switchable Output 1", + ), + ConfigSubentryData( + data={ + CONF_NAME: "Gate Lock", + CONF_SWITCHABLE_OUTPUT_NUMBER: 1, + }, + title="Gate Lock", + ), + ), + ], +) +async def test_subentry_reconfigure( + hass: HomeAssistant, + mock_satel: AsyncMock, + mock_setup_entry: AsyncMock, + config_entry: MockConfigEntry, + user_input: dict[str, Any], + default_subentry_info: dict[str, Any], + subentry: ConfigSubentryData, + updated_subentry: ConfigSubentryData, +) -> None: + """Test subentry reconfiguration.""" + + config_entry.add_to_hass(hass) + config_entry.subentries = { + default_subentry_info["subentry_id"]: ConfigSubentry( + **default_subentry_info, **subentry + ) + } + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.subentries.async_init( + (config_entry.entry_id, default_subentry_info["subentry_type"]), + context={ + "source": SOURCE_RECONFIGURE, + "subentry_id": default_subentry_info["subentry_id"], + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert len(config_entry.subentries) == 1 + + assert config_entry.subentries == { + default_subentry_info["subentry_id"]: ConfigSubentry( + **default_subentry_info, **updated_subentry + ) + } + + +@pytest.mark.parametrize( + ("subentry", "user_input", "error_field"), + [ + ( + { + "subentry_type": SUBENTRY_TYPE_PARTITION, + "unique_id": "partition_1", + "title": "Home", + }, + { + CONF_NAME: "Home", + CONF_ARM_HOME_MODE: 1, + CONF_PARTITION_NUMBER: 1, + }, + CONF_PARTITION_NUMBER, + ), + ( + { + "subentry_type": SUBENTRY_TYPE_ZONE, + "unique_id": "zone_1", + "title": "Zone 1", + }, + { + CONF_NAME: "Zone 1", + CONF_ZONE_TYPE: BinarySensorDeviceClass.MOTION, + CONF_ZONE_NUMBER: 1, + }, + CONF_ZONE_NUMBER, + ), + ( + { + "subentry_type": SUBENTRY_TYPE_OUTPUT, + "unique_id": "output_1", + "title": "Output 1", + }, + { + CONF_NAME: "Output 1", + CONF_ZONE_TYPE: BinarySensorDeviceClass.SAFETY, + CONF_OUTPUT_NUMBER: 1, + }, + CONF_OUTPUT_NUMBER, + ), + ( + { + "subentry_type": SUBENTRY_TYPE_SWITCHABLE_OUTPUT, + "unique_id": "switchable_output_1", + "title": "Switchable Output 1", + }, + { + CONF_NAME: "Switchable Output 1", + CONF_SWITCHABLE_OUTPUT_NUMBER: 1, + }, + CONF_SWITCHABLE_OUTPUT_NUMBER, + ), + ], +) +async def test_cannot_create_same_subentry( + hass: HomeAssistant, + mock_satel: AsyncMock, + mock_setup_entry: AsyncMock, + config_entry: MockConfigEntry, + subentry: dict[str, any], + user_input: dict[str, any], + error_field: str, +) -> None: + """Test subentry reconfiguration.""" + config_entry.add_to_hass(hass) + config_entry.subentries = { + "ABCD": ConfigSubentry(**subentry, **ConfigSubentryData({"data": user_input})) + } + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.subentries.async_init( + (config_entry.entry_id, subentry["subentry_type"]), + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {error_field: "already_configured"} + assert len(config_entry.subentries) == 1 + + +async def test_one_config_allowed( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test that only one Satel Integra configuration is allowed.""" + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" From 9ee9bb368db652d8da39b24116d670ac26d3c4e6 Mon Sep 17 00:00:00 2001 From: Mike Degatano Date: Tue, 16 Sep 2025 11:24:48 -0400 Subject: [PATCH 16/16] Move Supervisor created persistent notifications into repairs (#152066) --- homeassistant/components/hassio/const.py | 21 +++ homeassistant/components/hassio/issues.py | 35 ++++- homeassistant/components/hassio/repairs.py | 16 +- homeassistant/components/hassio/strings.json | 8 + tests/components/hassio/conftest.py | 1 + tests/components/hassio/test_issues.py | 154 +++++++++++++++++++ 6 files changed, 217 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/hassio/const.py b/homeassistant/components/hassio/const.py index a639833c3811ae..1653c33e5ecb26 100644 --- a/homeassistant/components/hassio/const.py +++ b/homeassistant/components/hassio/const.py @@ -112,11 +112,14 @@ PLACEHOLDER_KEY_ADDON_URL = "addon_url" PLACEHOLDER_KEY_REFERENCE = "reference" PLACEHOLDER_KEY_COMPONENTS = "components" +PLACEHOLDER_KEY_FREE_SPACE = "free_space" ISSUE_KEY_ADDON_BOOT_FAIL = "issue_addon_boot_fail" ISSUE_KEY_SYSTEM_DOCKER_CONFIG = "issue_system_docker_config" ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING = "issue_addon_detached_addon_missing" ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED = "issue_addon_detached_addon_removed" +ISSUE_KEY_ADDON_PWNED = "issue_addon_pwned" +ISSUE_KEY_SYSTEM_FREE_SPACE = "issue_system_free_space" CORE_CONTAINER = "homeassistant" SUPERVISOR_CONTAINER = "hassio_supervisor" @@ -137,6 +140,24 @@ REQUEST_REFRESH_DELAY = 10 +HELP_URLS = { + "help_url": "https://www.home-assistant.io/help/", + "community_url": "https://community.home-assistant.io/", +} + +EXTRA_PLACEHOLDERS = { + "issue_mount_mount_failed": { + "storage_url": "/config/storage", + }, + ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED: HELP_URLS, + ISSUE_KEY_SYSTEM_FREE_SPACE: { + "more_info_free_space": "https://www.home-assistant.io/more-info/free-space", + }, + ISSUE_KEY_ADDON_PWNED: { + "more_info_pwned": "https://www.home-assistant.io/more-info/pwned-passwords", + }, +} + class SupervisorEntityModel(StrEnum): """Supervisor entity model.""" diff --git a/homeassistant/components/hassio/issues.py b/homeassistant/components/hassio/issues.py index 0486dc1f85f9fd..df1ca87fe0b6d9 100644 --- a/homeassistant/components/hassio/issues.py +++ b/homeassistant/components/hassio/issues.py @@ -41,17 +41,21 @@ EVENT_SUPERVISOR_EVENT, EVENT_SUPERVISOR_UPDATE, EVENT_SUPPORTED_CHANGED, + EXTRA_PLACEHOLDERS, ISSUE_KEY_ADDON_BOOT_FAIL, ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING, ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, + ISSUE_KEY_ADDON_PWNED, ISSUE_KEY_SYSTEM_DOCKER_CONFIG, + ISSUE_KEY_SYSTEM_FREE_SPACE, PLACEHOLDER_KEY_ADDON, PLACEHOLDER_KEY_ADDON_URL, + PLACEHOLDER_KEY_FREE_SPACE, PLACEHOLDER_KEY_REFERENCE, REQUEST_REFRESH_DELAY, UPDATE_KEY_SUPERVISOR, ) -from .coordinator import get_addons_info +from .coordinator import get_addons_info, get_host_info from .handler import HassIO, get_supervisor_client ISSUE_KEY_UNHEALTHY = "unhealthy" @@ -78,6 +82,8 @@ ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING, ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, "issue_system_disk_lifetime", + ISSUE_KEY_SYSTEM_FREE_SPACE, + ISSUE_KEY_ADDON_PWNED, } _LOGGER = logging.getLogger(__name__) @@ -241,11 +247,17 @@ def issues(self) -> set[Issue]: def add_issue(self, issue: Issue) -> None: """Add or update an issue in the list. Create or update a repair if necessary.""" if issue.key in ISSUE_KEYS_FOR_REPAIRS: - placeholders: dict[str, str] | None = None + placeholders: dict[str, str] = {} + if not issue.suggestions and issue.key in EXTRA_PLACEHOLDERS: + placeholders |= EXTRA_PLACEHOLDERS[issue.key] + if issue.reference: - placeholders = {PLACEHOLDER_KEY_REFERENCE: issue.reference} + placeholders[PLACEHOLDER_KEY_REFERENCE] = issue.reference - if issue.key == ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING: + if issue.key in { + ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING, + ISSUE_KEY_ADDON_PWNED, + }: placeholders[PLACEHOLDER_KEY_ADDON_URL] = ( f"/hassio/addon/{issue.reference}" ) @@ -257,6 +269,19 @@ def add_issue(self, issue: Issue) -> None: else: placeholders[PLACEHOLDER_KEY_ADDON] = issue.reference + elif issue.key == ISSUE_KEY_SYSTEM_FREE_SPACE: + host_info = get_host_info(self._hass) + if ( + host_info + and "data" in host_info + and "disk_free" in host_info["data"] + ): + placeholders[PLACEHOLDER_KEY_FREE_SPACE] = str( + host_info["data"]["disk_free"] + ) + else: + placeholders[PLACEHOLDER_KEY_FREE_SPACE] = "<2" + async_create_issue( self._hass, DOMAIN, @@ -264,7 +289,7 @@ def add_issue(self, issue: Issue) -> None: is_fixable=bool(issue.suggestions), severity=IssueSeverity.WARNING, translation_key=issue.key, - translation_placeholders=placeholders, + translation_placeholders=placeholders or None, ) self._issues[issue.uuid] = issue diff --git a/homeassistant/components/hassio/repairs.py b/homeassistant/components/hassio/repairs.py index 0e8122c08b995d..ff32e2cbab9768 100644 --- a/homeassistant/components/hassio/repairs.py +++ b/homeassistant/components/hassio/repairs.py @@ -16,8 +16,10 @@ from . import get_addons_info, get_issues_info from .const import ( + EXTRA_PLACEHOLDERS, ISSUE_KEY_ADDON_BOOT_FAIL, ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, + ISSUE_KEY_ADDON_PWNED, ISSUE_KEY_SYSTEM_DOCKER_CONFIG, PLACEHOLDER_KEY_ADDON, PLACEHOLDER_KEY_COMPONENTS, @@ -26,11 +28,6 @@ from .handler import get_supervisor_client from .issues import Issue, Suggestion -HELP_URLS = { - "help_url": "https://www.home-assistant.io/help/", - "community_url": "https://community.home-assistant.io/", -} - SUGGESTION_CONFIRMATION_REQUIRED = { "addon_execute_remove", "system_adopt_data_disk", @@ -38,14 +35,6 @@ } -EXTRA_PLACEHOLDERS = { - "issue_mount_mount_failed": { - "storage_url": "/config/storage", - }, - ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED: HELP_URLS, -} - - class SupervisorIssueRepairFlow(RepairsFlow): """Handler for an issue fixing flow.""" @@ -219,6 +208,7 @@ async def async_create_fix_flow( if issue and issue.key in { ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, ISSUE_KEY_ADDON_BOOT_FAIL, + ISSUE_KEY_ADDON_PWNED, }: return AddonIssueRepairFlow(hass, issue_id) diff --git a/homeassistant/components/hassio/strings.json b/homeassistant/components/hassio/strings.json index 96855097b8b3f2..b6f3d90f3ef879 100644 --- a/homeassistant/components/hassio/strings.json +++ b/homeassistant/components/hassio/strings.json @@ -52,6 +52,10 @@ } } }, + "issue_addon_pwned": { + "title": "Insecure secrets detected in add-on configuration", + "description": "Add-on {addon} uses secrets/passwords in its configuration which are detected as not secure. See [pwned passwords and secrets]({more_info_pwned}) for more information on this issue." + }, "issue_mount_mount_failed": { "title": "Network storage device failed", "fix_flow": { @@ -119,6 +123,10 @@ "title": "Disk lifetime exceeding 90%", "description": "The data disk has exceeded 90% of its expected lifespan. The disk may soon malfunction which can lead to data loss. You should replace it soon and migrate your data." }, + "issue_system_free_space": { + "title": "Data disk is running low on free space", + "description": "The data disk has only {free_space}GB free space left. This may cause issues with system stability and interfere with functionality such as backups and updates. See [clear up storage]({more_info_free_space}) for tips on how to free up space." + }, "unhealthy": { "title": "Unhealthy system - {reason}", "description": "System is currently unhealthy due to {reason}. For troubleshooting information, select Learn more." diff --git a/tests/components/hassio/conftest.py b/tests/components/hassio/conftest.py index a71ee370b32314..476062ab6afd93 100644 --- a/tests/components/hassio/conftest.py +++ b/tests/components/hassio/conftest.py @@ -108,6 +108,7 @@ def all_setup_requests( "chassis": "vm", "operating_system": "Debian GNU/Linux 10 (buster)", "kernel": "4.19.0-6-amd64", + "disk_free": 1.6, }, }, }, diff --git a/tests/components/hassio/test_issues.py b/tests/components/hassio/test_issues.py index ddcbe5708c66fb..20473ff4041639 100644 --- a/tests/components/hassio/test_issues.py +++ b/tests/components/hassio/test_issues.py @@ -950,3 +950,157 @@ async def test_supervisor_issues_disk_lifetime( fixable=False, placeholders=None, ) + + +@pytest.mark.usefixtures("all_setup_requests") +async def test_supervisor_issues_free_space( + hass: HomeAssistant, + supervisor_client: AsyncMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test supervisor issue for too little free space remaining.""" + mock_resolution_info(supervisor_client) + + result = await async_setup_component(hass, "hassio", {}) + assert result + + client = await hass_ws_client(hass) + + await client.send_json( + { + "id": 1, + "type": "supervisor/event", + "data": { + "event": "issue_changed", + "data": { + "uuid": (issue_uuid := uuid4().hex), + "type": "free_space", + "context": "system", + "reference": None, + }, + }, + } + ) + msg = await client.receive_json() + assert msg["success"] + await hass.async_block_till_done() + + await client.send_json({"id": 2, "type": "repairs/list_issues"}) + msg = await client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 1 + assert_issue_repair_in_list( + msg["result"]["issues"], + uuid=issue_uuid, + context="system", + type_="free_space", + fixable=False, + placeholders={ + "more_info_free_space": "https://www.home-assistant.io/more-info/free-space", + "free_space": "1.6", + }, + ) + + +async def test_supervisor_issues_free_space_host_info_fail( + hass: HomeAssistant, + supervisor_client: AsyncMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test supervisor issue for too little free space remaining without host info.""" + mock_resolution_info(supervisor_client) + + result = await async_setup_component(hass, "hassio", {}) + assert result + + client = await hass_ws_client(hass) + + await client.send_json( + { + "id": 1, + "type": "supervisor/event", + "data": { + "event": "issue_changed", + "data": { + "uuid": (issue_uuid := uuid4().hex), + "type": "free_space", + "context": "system", + "reference": None, + }, + }, + } + ) + msg = await client.receive_json() + assert msg["success"] + await hass.async_block_till_done() + + await client.send_json({"id": 2, "type": "repairs/list_issues"}) + msg = await client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 1 + assert_issue_repair_in_list( + msg["result"]["issues"], + uuid=issue_uuid, + context="system", + type_="free_space", + fixable=False, + placeholders={ + "more_info_free_space": "https://www.home-assistant.io/more-info/free-space", + "free_space": "<2", + }, + ) + + +@pytest.mark.parametrize( + "all_setup_requests", [{"include_addons": True}], indirect=True +) +@pytest.mark.usefixtures("all_setup_requests") +async def test_supervisor_issues_addon_pwned( + hass: HomeAssistant, + supervisor_client: AsyncMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test supervisor issue for pwned secret in an addon.""" + mock_resolution_info(supervisor_client) + + result = await async_setup_component(hass, "hassio", {}) + assert result + + client = await hass_ws_client(hass) + + await client.send_json( + { + "id": 1, + "type": "supervisor/event", + "data": { + "event": "issue_changed", + "data": { + "uuid": (issue_uuid := uuid4().hex), + "type": "pwned", + "context": "addon", + "reference": "test", + }, + }, + } + ) + msg = await client.receive_json() + assert msg["success"] + await hass.async_block_till_done() + + await client.send_json({"id": 2, "type": "repairs/list_issues"}) + msg = await client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 1 + assert_issue_repair_in_list( + msg["result"]["issues"], + uuid=issue_uuid, + context="addon", + type_="pwned", + fixable=False, + placeholders={ + "reference": "test", + "addon": "test", + "addon_url": "/hassio/addon/test", + "more_info_pwned": "https://www.home-assistant.io/more-info/pwned-passwords", + }, + )