diff --git a/.strict-typing b/.strict-typing index ce06d00c697928..bf5b90b00917dd 100644 --- a/.strict-typing +++ b/.strict-typing @@ -460,6 +460,7 @@ homeassistant.components.sensorpush_cloud.* homeassistant.components.sensoterra.* homeassistant.components.senz.* homeassistant.components.sfr_box.* +homeassistant.components.sftp_storage.* homeassistant.components.shell_command.* homeassistant.components.shelly.* homeassistant.components.shopping_list.* diff --git a/CODEOWNERS b/CODEOWNERS index d467439cae7a7b..133700b75a405d 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -154,10 +154,10 @@ build.json @home-assistant/supervisor /tests/components/arve/ @ikalnyi /homeassistant/components/aseko_pool_live/ @milanmeu /tests/components/aseko_pool_live/ @milanmeu -/homeassistant/components/assist_pipeline/ @balloob @synesthesiam -/tests/components/assist_pipeline/ @balloob @synesthesiam -/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam -/tests/components/assist_satellite/ @home-assistant/core @synesthesiam +/homeassistant/components/assist_pipeline/ @balloob @synesthesiam @arturpragacz +/tests/components/assist_pipeline/ @balloob @synesthesiam @arturpragacz +/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz +/tests/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz /homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi /tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi /homeassistant/components/atag/ @MatsNL @@ -298,8 +298,8 @@ build.json @home-assistant/supervisor /tests/components/configurator/ @home-assistant/core /homeassistant/components/control4/ @lawtancool /tests/components/control4/ @lawtancool -/homeassistant/components/conversation/ @home-assistant/core @synesthesiam -/tests/components/conversation/ @home-assistant/core @synesthesiam +/homeassistant/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz +/tests/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz /homeassistant/components/cookidoo/ @miaucl /tests/components/cookidoo/ @miaucl /homeassistant/components/coolmaster/ @OnFreund @@ -751,8 +751,8 @@ build.json @home-assistant/supervisor /tests/components/integration/ @dgomes /homeassistant/components/intellifire/ @jeeftor /tests/components/intellifire/ @jeeftor -/homeassistant/components/intent/ @home-assistant/core @synesthesiam -/tests/components/intent/ @home-assistant/core @synesthesiam +/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz +/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz /homeassistant/components/intesishome/ @jnimmo /homeassistant/components/iometer/ @MaestroOnICe /tests/components/iometer/ @MaestroOnICe @@ -1394,6 +1394,8 @@ build.json @home-assistant/supervisor /tests/components/seventeentrack/ @shaiu /homeassistant/components/sfr_box/ @epenet /tests/components/sfr_box/ @epenet +/homeassistant/components/sftp_storage/ @maretodoric +/tests/components/sftp_storage/ @maretodoric /homeassistant/components/sharkiq/ @JeffResc @funkybunch /tests/components/sharkiq/ @JeffResc @funkybunch /homeassistant/components/shell_command/ @home-assistant/core diff --git a/homeassistant/components/analytics/analytics.py b/homeassistant/components/analytics/analytics.py index b1641e8dd48c61..60d810e198f363 100644 --- a/homeassistant/components/analytics/analytics.py +++ b/homeassistant/components/analytics/analytics.py @@ -24,7 +24,12 @@ get_instance as get_recorder_instance, ) from homeassistant.config_entries import SOURCE_IGNORE -from homeassistant.const import ATTR_DOMAIN, BASE_PLATFORMS, __version__ as HA_VERSION +from homeassistant.const import ( + ATTR_ASSUMED_STATE, + ATTR_DOMAIN, + BASE_PLATFORMS, + __version__ as HA_VERSION, +) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -389,66 +394,117 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]: async def async_devices_payload(hass: HomeAssistant) -> dict: - """Return the devices payload.""" - devices: list[dict[str, Any]] = [] + """Return detailed information about entities and devices.""" + integrations_info: dict[str, dict[str, Any]] = {} + dev_reg = dr.async_get(hass) - # Devices that need via device info set - new_indexes: dict[str, int] = {} - via_devices: dict[str, str] = {} - seen_integrations = set() + # We need to refer to other devices, for example in `via_device` field. + # We don't however send the original device ids outside of Home Assistant, + # instead we refer to devices by (integration_domain, index_in_integration_device_list). + device_id_mapping: dict[str, tuple[str, int]] = {} - for device in dev_reg.devices.values(): - if not device.primary_config_entry: + for device_entry in dev_reg.devices.values(): + if not device_entry.primary_config_entry: continue - config_entry = hass.config_entries.async_get_entry(device.primary_config_entry) + config_entry = hass.config_entries.async_get_entry( + device_entry.primary_config_entry + ) if config_entry is None: continue - seen_integrations.add(config_entry.domain) + integration_domain = config_entry.domain + integration_info = integrations_info.setdefault( + integration_domain, {"devices": [], "entities": []} + ) + + devices_info = integration_info["devices"] + + device_id_mapping[device_entry.id] = (integration_domain, len(devices_info)) - new_indexes[device.id] = len(devices) - devices.append( + devices_info.append( { - "integration": config_entry.domain, - "manufacturer": device.manufacturer, - "model_id": device.model_id, - "model": device.model, - "sw_version": device.sw_version, - "hw_version": device.hw_version, - "has_configuration_url": device.configuration_url is not None, - "via_device": None, - "entry_type": device.entry_type.value if device.entry_type else None, + "entities": [], + "entry_type": device_entry.entry_type, + "has_configuration_url": device_entry.configuration_url is not None, + "hw_version": device_entry.hw_version, + "manufacturer": device_entry.manufacturer, + "model": device_entry.model, + "model_id": device_entry.model_id, + "sw_version": device_entry.sw_version, + "via_device": device_entry.via_device_id, } ) - if device.via_device_id: - via_devices[device.id] = device.via_device_id + # Fill out via_device with new device ids + for integration_info in integrations_info.values(): + for device_info in integration_info["devices"]: + if device_info["via_device"] is None: + continue + device_info["via_device"] = device_id_mapping.get(device_info["via_device"]) - for from_device, via_device in via_devices.items(): - if via_device not in new_indexes: - continue - devices[new_indexes[from_device]]["via_device"] = new_indexes[via_device] + ent_reg = er.async_get(hass) + + for entity_entry in ent_reg.entities.values(): + integration_domain = entity_entry.platform + integration_info = integrations_info.setdefault( + integration_domain, {"devices": [], "entities": []} + ) + + devices_info = integration_info["devices"] + entities_info = integration_info["entities"] + + entity_state = hass.states.get(entity_entry.entity_id) + + entity_info = { + # LIMITATION: `assumed_state` can be overridden by users; + # we should replace it with the original value in the future. + # It is also not present, if entity is not in the state machine, + # which can happen for disabled entities. + "assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False) + if entity_state is not None + else None, + "capabilities": entity_entry.capabilities, + "domain": entity_entry.domain, + "entity_category": entity_entry.entity_category, + "has_entity_name": entity_entry.has_entity_name, + "original_device_class": entity_entry.original_device_class, + # LIMITATION: `unit_of_measurement` can be overridden by users; + # we should replace it with the original value in the future. + "unit_of_measurement": entity_entry.unit_of_measurement, + } + + if ( + ((device_id := entity_entry.device_id) is not None) + and ((new_device_id := device_id_mapping.get(device_id)) is not None) + and (new_device_id[0] == integration_domain) + ): + device_info = devices_info[new_device_id[1]] + device_info["entities"].append(entity_info) + else: + entities_info.append(entity_info) integrations = { domain: integration for domain, integration in ( - await async_get_integrations(hass, seen_integrations) + await async_get_integrations(hass, integrations_info.keys()) ).items() if isinstance(integration, Integration) } - for device_info in devices: - if integration := integrations.get(device_info["integration"]): - device_info["is_custom_integration"] = not integration.is_built_in + for domain, integration_info in integrations_info.items(): + if integration := integrations.get(domain): + integration_info["is_custom_integration"] = not integration.is_built_in # Include version for custom integrations if not integration.is_built_in and integration.version: - device_info["custom_integration_version"] = str(integration.version) + integration_info["custom_integration_version"] = str( + integration.version + ) return { "version": "home-assistant:1", "home_assistant": HA_VERSION, - "devices": devices, + "integrations": integrations_info, } diff --git a/homeassistant/components/androidtv_remote/config_flow.py b/homeassistant/components/androidtv_remote/config_flow.py index 0a236c7c9ef6cd..cddc9e8ff7c1c7 100644 --- a/homeassistant/components/androidtv_remote/config_flow.py +++ b/homeassistant/components/androidtv_remote/config_flow.py @@ -66,9 +66,14 @@ async def async_step_user( if user_input is not None: self.host = user_input[CONF_HOST] api = create_api(self.hass, self.host, enable_ime=False) + await api.async_generate_cert_if_missing() try: - await api.async_generate_cert_if_missing() self.name, self.mac = await api.async_get_name_and_mac() + except CannotConnect: + # Likely invalid IP address or device is network unreachable. Stay + # in the user step allowing the user to enter a different host. + errors["base"] = "cannot_connect" + else: await self.async_set_unique_id(format_mac(self.mac)) if self.source == SOURCE_RECONFIGURE: self._abort_if_unique_id_mismatch() @@ -81,11 +86,10 @@ async def async_step_user( }, ) self._abort_if_unique_id_configured(updates={CONF_HOST: self.host}) - return await self._async_start_pair() - except (CannotConnect, ConnectionClosed): - # Likely invalid IP address or device is network unreachable. Stay - # in the user step allowing the user to enter a different host. - errors["base"] = "cannot_connect" + try: + return await self._async_start_pair() + except (CannotConnect, ConnectionClosed): + errors["base"] = "cannot_connect" else: user_input = {} default_host = user_input.get(CONF_HOST, vol.UNDEFINED) @@ -112,22 +116,9 @@ async def async_step_pair( """Handle the pair step.""" errors: dict[str, str] = {} if user_input is not None: + pin = user_input["pin"] try: - pin = user_input["pin"] await self.api.async_finish_pairing(pin) - if self.source == SOURCE_REAUTH: - return self.async_update_reload_and_abort( - self._get_reauth_entry(), reload_even_if_entry_is_unchanged=True - ) - - return self.async_create_entry( - title=self.name, - data={ - CONF_HOST: self.host, - CONF_NAME: self.name, - CONF_MAC: self.mac, - }, - ) except InvalidAuth: # Invalid PIN. Stay in the pair step allowing the user to enter # a different PIN. @@ -145,6 +136,20 @@ async def async_step_pair( # them to enter a new IP address but we cannot do that for the zeroconf # flow. Simpler to abort for both flows. return self.async_abort(reason="cannot_connect") + else: + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), reload_even_if_entry_is_unchanged=True + ) + + return self.async_create_entry( + title=self.name, + data={ + CONF_HOST: self.host, + CONF_NAME: self.name, + CONF_MAC: self.mac, + }, + ) return self.async_show_form( step_id="pair", data_schema=STEP_PAIR_DATA_SCHEMA, diff --git a/homeassistant/components/androidtv_remote/entity.py b/homeassistant/components/androidtv_remote/entity.py index 7a1e2d6bf069b8..a006118afff330 100644 --- a/homeassistant/components/androidtv_remote/entity.py +++ b/homeassistant/components/androidtv_remote/entity.py @@ -6,7 +6,7 @@ from androidtvremote2 import AndroidTVRemote, ConnectionClosed -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME +from homeassistant.const import CONF_MAC, CONF_NAME from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo @@ -28,8 +28,6 @@ def __init__( ) -> None: """Initialize the entity.""" self._api = api - self._host = config_entry.data[CONF_HOST] - self._name = config_entry.data[CONF_NAME] self._apps: dict[str, Any] = config_entry.options.get(CONF_APPS, {}) self._attr_unique_id = config_entry.unique_id self._attr_is_on = api.is_on @@ -39,7 +37,7 @@ def __init__( self._attr_device_info = DeviceInfo( connections={(CONNECTION_NETWORK_MAC, config_entry.data[CONF_MAC])}, identifiers={(DOMAIN, config_entry.unique_id)}, - name=self._name, + name=config_entry.data[CONF_NAME], manufacturer=device_info["manufacturer"], model=device_info["model"], ) diff --git a/homeassistant/components/androidtv_remote/media_player.py b/homeassistant/components/androidtv_remote/media_player.py index e4f653cbcf123a..371c97cc33eda2 100644 --- a/homeassistant/components/androidtv_remote/media_player.py +++ b/homeassistant/components/androidtv_remote/media_player.py @@ -175,7 +175,11 @@ async def async_play_media( """Play a piece of media.""" if media_type == MediaType.CHANNEL: if not media_id.isnumeric(): - raise ValueError(f"Channel must be numeric: {media_id}") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="invalid_channel", + translation_placeholders={"media_id": media_id}, + ) if self._channel_set_task: self._channel_set_task.cancel() self._channel_set_task = asyncio.create_task( @@ -188,7 +192,11 @@ async def async_play_media( self._send_launch_app_command(media_id) return - raise ValueError(f"Invalid media type: {media_type}") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="invalid_media_type", + translation_placeholders={"media_type": media_type}, + ) async def async_browse_media( self, diff --git a/homeassistant/components/androidtv_remote/strings.json b/homeassistant/components/androidtv_remote/strings.json index b1a220e2a32ca1..0014958717a6eb 100644 --- a/homeassistant/components/androidtv_remote/strings.json +++ b/homeassistant/components/androidtv_remote/strings.json @@ -85,6 +85,12 @@ "exceptions": { "connection_closed": { "message": "Connection to the Android TV device is closed" + }, + "invalid_channel": { + "message": "Channel must be numeric: {media_id}" + }, + "invalid_media_type": { + "message": "Invalid media type: {media_type}" } } } diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index 3a59d8f87f169c..9bdb221e615a04 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -2,7 +2,7 @@ "domain": "assist_pipeline", "name": "Assist pipeline", "after_dependencies": ["repairs"], - "codeowners": ["@balloob", "@synesthesiam"], + "codeowners": ["@balloob", "@synesthesiam", "@arturpragacz"], "dependencies": ["conversation", "stt", "tts", "wake_word"], "documentation": "https://www.home-assistant.io/integrations/assist_pipeline", "integration_type": "system", diff --git a/homeassistant/components/assist_satellite/manifest.json b/homeassistant/components/assist_satellite/manifest.json index b5636e0286da17..5164df9d808d40 100644 --- a/homeassistant/components/assist_satellite/manifest.json +++ b/homeassistant/components/assist_satellite/manifest.json @@ -1,7 +1,7 @@ { "domain": "assist_satellite", "name": "Assist Satellite", - "codeowners": ["@home-assistant/core", "@synesthesiam"], + "codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"], "dependencies": ["assist_pipeline", "http", "stt", "tts"], "documentation": "https://www.home-assistant.io/integrations/assist_satellite", "integration_type": "entity", diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index 81928a59a52bc9..327b47bbea29f7 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive", "iot_class": "cloud_polling", "loggers": ["bimmer_connected"], - "requirements": ["bimmer-connected[china]==0.17.2"] + "requirements": ["bimmer-connected[china]==0.17.3"] } diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 4b056ead2c29bf..938889955e9601 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -35,7 +35,7 @@ ) from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity from hassil.trie import Trie -from hassil.util import merge_dict +from hassil.util import merge_dict, remove_punctuation from home_assistant_intents import ( ErrorKey, FuzzyConfig, @@ -327,12 +327,10 @@ async def async_recognize_intent( if self._exposed_names_trie is not None: # Filter by input string - text_lower = user_input.text.strip().lower() + text = remove_punctuation(user_input.text).strip().lower() slot_lists["name"] = TextSlotList( name="name", - values=[ - result[2] for result in self._exposed_names_trie.find(text_lower) - ], + values=[result[2] for result in self._exposed_names_trie.find(text)], ) start = time.monotonic() @@ -1263,7 +1261,7 @@ async def _make_slot_lists(self) -> dict[str, SlotList]: name_list = TextSlotList.from_tuples(exposed_entity_names, allow_template=False) for name_value in name_list.values: assert isinstance(name_value.text_in, TextChunk) - name_text = name_value.text_in.text.strip().lower() + name_text = remove_punctuation(name_value.text_in.text).strip().lower() self._exposed_names_trie.insert(name_text, name_value) self._slot_lists = { diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index d09fecb52c1f4d..36db24ce5453d3 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -1,7 +1,7 @@ { "domain": "conversation", "name": "Conversation", - "codeowners": ["@home-assistant/core", "@synesthesiam"], + "codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"], "dependencies": ["http", "intent"], "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", diff --git a/homeassistant/components/ecowitt/sensor.py b/homeassistant/components/ecowitt/sensor.py index ccaaeaae3de391..6620f61961fff5 100644 --- a/homeassistant/components/ecowitt/sensor.py +++ b/homeassistant/components/ecowitt/sensor.py @@ -218,6 +218,12 @@ native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, ), + EcoWittSensorTypes.SOIL_MOISTURE: SensorEntityDescription( + key="SOIL_MOISTURE", + device_class=SensorDeviceClass.MOISTURE, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), } diff --git a/homeassistant/components/homematic/strings.json b/homeassistant/components/homematic/strings.json index 78159189db8d76..3ce4c1f544d86e 100644 --- a/homeassistant/components/homematic/strings.json +++ b/homeassistant/components/homematic/strings.json @@ -42,7 +42,7 @@ }, "set_device_value": { "name": "Set device value", - "description": "Sets a device property on RPC XML interface.", + "description": "Controls a device manually. Equivalent to setValue-method from XML-RPC.", "fields": { "address": { "name": "Address", @@ -80,11 +80,11 @@ "fields": { "interface": { "name": "Interface", - "description": "Select the given interface into install mode." + "description": "The interface to set into install mode." }, "mode": { "name": "[%key:common::config_flow::data::mode%]", - "description": "1= Normal mode / 2= Remove exists old links." + "description": "1= Normal mode / 2= Remove existing old links." }, "time": { "name": "Time", @@ -98,11 +98,11 @@ }, "put_paramset": { "name": "Put paramset", - "description": "Calls to putParamset in the RPC XML interface.", + "description": "Manually changes a device’s paramset. Equivalent to putParamset-method from XML-RPC.", "fields": { "interface": { "name": "Interface", - "description": "The interfaces name from the config." + "description": "The interface's name from the config." }, "address": { "name": "Address", diff --git a/homeassistant/components/hue/config_flow.py b/homeassistant/components/hue/config_flow.py index bec443526138c8..3328b5ab659413 100644 --- a/homeassistant/components/hue/config_flow.py +++ b/homeassistant/components/hue/config_flow.py @@ -9,7 +9,9 @@ import aiohttp from aiohue import LinkButtonNotPressed, create_app_key from aiohue.discovery import DiscoveredHueBridge, discover_bridge, discover_nupnp +from aiohue.errors import AiohueException from aiohue.util import normalize_bridge_id +from aiohue.v2 import HueBridgeV2 import slugify as unicode_slug import voluptuous as vol @@ -40,6 +42,9 @@ HUE_IGNORED_BRIDGE_NAMES = ["Home Assistant Bridge", "Espalexa"] HUE_MANUAL_BRIDGE_ID = "manual" +BSB002_MODEL_ID = "BSB002" +BSB003_MODEL_ID = "BSB003" + class HueFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a Hue config flow.""" @@ -74,7 +79,14 @@ async def _get_bridge( """Return a DiscoveredHueBridge object.""" try: bridge = await discover_bridge( - host, websession=aiohttp_client.async_get_clientsession(self.hass) + host, + websession=aiohttp_client.async_get_clientsession( + # NOTE: we disable SSL verification for now due to the fact that the (BSB003) + # Hue bridge uses a certificate from a on-bridge root authority. + # We need to specifically handle this case in a follow-up update. + self.hass, + verify_ssl=False, + ), ) except aiohttp.ClientError as err: LOGGER.warning( @@ -110,7 +122,9 @@ async def async_step_init( try: async with asyncio.timeout(5): bridges = await discover_nupnp( - websession=aiohttp_client.async_get_clientsession(self.hass) + websession=aiohttp_client.async_get_clientsession( + self.hass, verify_ssl=False + ) ) except TimeoutError: bridges = [] @@ -178,7 +192,9 @@ async def async_step_link( app_key = await create_app_key( bridge.host, f"home-assistant#{device_name}", - websession=aiohttp_client.async_get_clientsession(self.hass), + websession=aiohttp_client.async_get_clientsession( + self.hass, verify_ssl=False + ), ) except LinkButtonNotPressed: errors["base"] = "register_failed" @@ -228,7 +244,6 @@ async def async_step_zeroconf( self._abort_if_unique_id_configured( updates={CONF_HOST: discovery_info.host}, reload_on_update=True ) - # we need to query the other capabilities too bridge = await self._get_bridge( discovery_info.host, discovery_info.properties["bridgeid"] @@ -236,6 +251,14 @@ async def async_step_zeroconf( if bridge is None: return self.async_abort(reason="cannot_connect") self.bridge = bridge + if ( + bridge.supports_v2 + and discovery_info.properties.get("modelid") == BSB003_MODEL_ID + ): + # try to handle migration of BSB002 --> BSB003 + if await self._check_migrated_bridge(bridge): + return self.async_abort(reason="migrated_bridge") + return await self.async_step_link() async def async_step_homekit( @@ -272,6 +295,55 @@ async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResu self.bridge = bridge return await self.async_step_link() + async def _check_migrated_bridge(self, bridge: DiscoveredHueBridge) -> bool: + """Check if the discovered bridge is a migrated bridge.""" + # Try to handle migration of BSB002 --> BSB003. + # Once we detect a BSB003 bridge on the network which has not yet been + # configured in HA (otherwise we would have had a unique id match), + # we check if we have any existing (BSB002) entries and if we can connect to the + # new bridge with our previously stored api key. + # If that succeeds, we migrate the entry to the new bridge. + for conf_entry in self.hass.config_entries.async_entries( + DOMAIN, include_ignore=False, include_disabled=False + ): + if conf_entry.data[CONF_API_VERSION] != 2: + continue + if conf_entry.data[CONF_HOST] == bridge.host: + continue + # found an existing (BSB002) bridge entry, + # check if we can connect to the new BSB003 bridge using the old credentials + api = HueBridgeV2(bridge.host, conf_entry.data[CONF_API_KEY]) + try: + await api.fetch_full_state() + except (AiohueException, aiohttp.ClientError): + continue + old_bridge_id = conf_entry.unique_id + assert old_bridge_id is not None + # found a matching entry, migrate it + self.hass.config_entries.async_update_entry( + conf_entry, + data={ + **conf_entry.data, + CONF_HOST: bridge.host, + }, + unique_id=bridge.id, + ) + # also update the bridge device + dev_reg = dr.async_get(self.hass) + if bridge_device := dev_reg.async_get_device( + identifiers={(DOMAIN, old_bridge_id)} + ): + dev_reg.async_update_device( + bridge_device.id, + # overwrite identifiers with new bridge id + new_identifiers={(DOMAIN, bridge.id)}, + # overwrite mac addresses with empty set to drop the old (incorrect) addresses + # this will be auto corrected once the integration is loaded + new_connections=set(), + ) + return True + return False + class HueV1OptionsFlowHandler(OptionsFlow): """Handle Hue options for V1 implementation.""" diff --git a/homeassistant/components/imeon_inverter/manifest.json b/homeassistant/components/imeon_inverter/manifest.json index a9a37f3fd9c7cc..837b7351241750 100644 --- a/homeassistant/components/imeon_inverter/manifest.json +++ b/homeassistant/components/imeon_inverter/manifest.json @@ -7,7 +7,7 @@ "integration_type": "device", "iot_class": "local_polling", "quality_scale": "bronze", - "requirements": ["imeon_inverter_api==0.3.14"], + "requirements": ["imeon_inverter_api==0.3.16"], "ssdp": [ { "manufacturer": "IMEON", diff --git a/homeassistant/components/intent/manifest.json b/homeassistant/components/intent/manifest.json index 90f7a34e624bb2..9bb580dd842f8e 100644 --- a/homeassistant/components/intent/manifest.json +++ b/homeassistant/components/intent/manifest.json @@ -1,7 +1,7 @@ { "domain": "intent", "name": "Intent", - "codeowners": ["@home-assistant/core", "@synesthesiam"], + "codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"], "config_flow": false, "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/intent", diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index d8e55b7b1ff9da..37e21d5cb75cf9 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -1385,4 +1385,16 @@ def _update_from_device(self) -> None: clusters.ValveConfigurationAndControl.Attributes.AutoCloseTime, ), ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="ServiceAreaEstimatedEndTime", + translation_key="estimated_end_time", + device_class=SensorDeviceClass.TIMESTAMP, + state_class=None, + device_to_ha=(lambda x: dt_util.utc_from_timestamp(x) if x > 0 else None), + ), + entity_class=MatterSensor, + required_attributes=(clusters.ServiceArea.Attributes.EstimatedEndTime,), + ), ] diff --git a/homeassistant/components/modbus/entity.py b/homeassistant/components/modbus/entity.py index 38622c4c197440..8667bc17a796a5 100644 --- a/homeassistant/components/modbus/entity.py +++ b/homeassistant/components/modbus/entity.py @@ -62,7 +62,6 @@ CONF_VIRTUAL_COUNT, CONF_WRITE_TYPE, CONF_ZERO_SUPPRESS, - SIGNAL_START_ENTITY, SIGNAL_STOP_ENTITY, DataType, ) @@ -143,7 +142,6 @@ def async_disable(self) -> None: self._cancel_call() self._cancel_call = None self._attr_available = False - self.async_write_ha_state() async def async_await_connection(self, _now: Any) -> None: """Wait for first connect.""" @@ -162,11 +160,6 @@ async def async_base_added_to_hass(self) -> None: self.async_on_remove( async_dispatcher_connect(self.hass, SIGNAL_STOP_ENTITY, self.async_disable) ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, SIGNAL_START_ENTITY, self.async_local_update - ) - ) class BaseStructPlatform(BasePlatform, RestoreEntity): diff --git a/homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py b/homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py index 041571f7b5f279..709d93bb2b434e 100644 --- a/homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py +++ b/homeassistant/components/overkiz/climate/atlantic_electrical_heater_with_adjustable_temperature_setpoint.py @@ -52,6 +52,7 @@ OverkizCommandParam.OFF: HVACMode.OFF, OverkizCommandParam.AUTO: HVACMode.AUTO, OverkizCommandParam.BASIC: HVACMode.HEAT, + OverkizCommandParam.MANUAL: HVACMode.HEAT, OverkizCommandParam.STANDBY: HVACMode.OFF, OverkizCommandParam.EXTERNAL: HVACMode.AUTO, OverkizCommandParam.INTERNAL: HVACMode.AUTO, diff --git a/homeassistant/components/sftp_storage/__init__.py b/homeassistant/components/sftp_storage/__init__.py new file mode 100644 index 00000000000000..9b095c2decfd94 --- /dev/null +++ b/homeassistant/components/sftp_storage/__init__.py @@ -0,0 +1,155 @@ +"""Integration for SFTP Storage.""" + +from __future__ import annotations + +import contextlib +from dataclasses import dataclass, field +import errno +import logging +from pathlib import Path + +from homeassistant.components.backup import BackupAgentError +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError + +from .client import BackupAgentClient +from .const import ( + CONF_BACKUP_LOCATION, + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_PRIVATE_KEY_FILE, + CONF_USERNAME, + DATA_BACKUP_AGENT_LISTENERS, + DOMAIN, + LOGGER, +) + +type SFTPConfigEntry = ConfigEntry[SFTPConfigEntryData] + + +@dataclass(kw_only=True) +class SFTPConfigEntryData: + """Dataclass holding all config entry data for an SFTP Storage entry.""" + + host: str + port: int + username: str + password: str | None = field(repr=False) + private_key_file: str | None + backup_location: str + + +async def async_setup_entry(hass: HomeAssistant, entry: SFTPConfigEntry) -> bool: + """Set up SFTP Storage from a config entry.""" + + cfg = SFTPConfigEntryData( + host=entry.data[CONF_HOST], + port=entry.data[CONF_PORT], + username=entry.data[CONF_USERNAME], + password=entry.data.get(CONF_PASSWORD), + private_key_file=entry.data.get(CONF_PRIVATE_KEY_FILE, []), + backup_location=entry.data[CONF_BACKUP_LOCATION], + ) + entry.runtime_data = cfg + + # Establish a connection during setup. + # This will raise exception if there is something wrong with either + # SSH server or config. + try: + client = BackupAgentClient(entry, hass) + await client.open() + except BackupAgentError as e: + raise ConfigEntryError from e + + # Notify backup listeners + def _async_notify_backup_listeners() -> None: + for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []): + listener() + + entry.async_on_unload(entry.async_on_state_change(_async_notify_backup_listeners)) + + return True + + +async def async_remove_entry(hass: HomeAssistant, entry: SFTPConfigEntry) -> None: + """Remove an SFTP Storage config entry.""" + + def remove_files(entry: SFTPConfigEntry) -> None: + pkey = Path(entry.data[CONF_PRIVATE_KEY_FILE]) + + if pkey.exists(): + LOGGER.debug( + "Removing private key (%s) for %s integration for host %s@%s", + pkey, + DOMAIN, + entry.data[CONF_USERNAME], + entry.data[CONF_HOST], + ) + try: + pkey.unlink() + except OSError as e: + LOGGER.warning( + "Failed to remove private key %s for %s integration for host %s@%s. %s", + pkey.name, + DOMAIN, + entry.data[CONF_USERNAME], + entry.data[CONF_HOST], + str(e), + ) + + try: + pkey.parent.rmdir() + except OSError as e: + if e.errno == errno.ENOTEMPTY: # Directory not empty + if LOGGER.isEnabledFor(logging.DEBUG): + leftover_files = [] + # If we get an exception while gathering leftover files, make sure to log plain message. + with contextlib.suppress(OSError): + leftover_files = [f.name for f in pkey.parent.iterdir()] + + LOGGER.debug( + "Storage directory for %s integration is not empty (%s)%s", + DOMAIN, + str(pkey.parent), + f", files: {', '.join(leftover_files)}" + if leftover_files + else "", + ) + else: + LOGGER.warning( + "Error occurred while removing directory %s for integration %s: %s at host %s@%s", + str(pkey.parent), + DOMAIN, + str(e), + entry.data[CONF_USERNAME], + entry.data[CONF_HOST], + ) + else: + LOGGER.debug( + "Removed storage directory for %s integration", + DOMAIN, + entry.data[CONF_USERNAME], + entry.data[CONF_HOST], + ) + + if bool(entry.data.get(CONF_PRIVATE_KEY_FILE)): + LOGGER.debug( + "Cleaning up after %s integration for host %s@%s", + DOMAIN, + entry.data[CONF_USERNAME], + entry.data[CONF_HOST], + ) + await hass.async_add_executor_job(remove_files, entry) + + +async def async_unload_entry(hass: HomeAssistant, entry: SFTPConfigEntry) -> bool: + """Unload SFTP Storage config entry.""" + LOGGER.debug( + "Unloading %s integration for host %s@%s", + DOMAIN, + entry.data[CONF_USERNAME], + entry.data[CONF_HOST], + ) + return True diff --git a/homeassistant/components/sftp_storage/backup.py b/homeassistant/components/sftp_storage/backup.py new file mode 100644 index 00000000000000..4859f2d2f2afb5 --- /dev/null +++ b/homeassistant/components/sftp_storage/backup.py @@ -0,0 +1,153 @@ +"""Backup platform for the SFTP Storage integration.""" + +from __future__ import annotations + +from collections.abc import AsyncIterator, Callable, Coroutine +from typing import Any + +from asyncssh.sftp import SFTPError + +from homeassistant.components.backup import ( + AgentBackup, + BackupAgent, + BackupAgentError, + BackupNotFound, +) +from homeassistant.core import HomeAssistant, callback + +from . import SFTPConfigEntry +from .client import BackupAgentClient +from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN, LOGGER + + +async def async_get_backup_agents( + hass: HomeAssistant, +) -> list[BackupAgent]: + """Register the backup agents.""" + entries: list[SFTPConfigEntry] = hass.config_entries.async_loaded_entries(DOMAIN) + return [SFTPBackupAgent(hass, entry) for entry in entries] + + +@callback +def async_register_backup_agents_listener( + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, +) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed.""" + hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener) + + @callback + def remove_listener() -> None: + """Remove the listener.""" + hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener) + if not hass.data[DATA_BACKUP_AGENT_LISTENERS]: + del hass.data[DATA_BACKUP_AGENT_LISTENERS] + + return remove_listener + + +class SFTPBackupAgent(BackupAgent): + """SFTP Backup Storage agent.""" + + domain = DOMAIN + + def __init__(self, hass: HomeAssistant, entry: SFTPConfigEntry) -> None: + """Initialize the SFTPBackupAgent backup sync agent.""" + super().__init__() + self._entry: SFTPConfigEntry = entry + self._hass: HomeAssistant = hass + self.name: str = entry.title + self.unique_id: str = entry.entry_id + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file from SFTP.""" + LOGGER.debug( + "Establishing SFTP connection to remote host in order to download backup id: %s", + backup_id, + ) + try: + # Will raise BackupAgentError if failure to authenticate or SFTP Permissions + async with BackupAgentClient(self._entry, self._hass) as client: + return await client.iter_file(backup_id) + except FileNotFoundError as e: + raise BackupNotFound( + f"Unable to initiate download of backup id: {backup_id}. {e}" + ) from e + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + LOGGER.debug("Received request to upload backup: %s", backup) + iterator = await open_stream() + + LOGGER.debug( + "Establishing SFTP connection to remote host in order to upload backup" + ) + + # Will raise BackupAgentError if failure to authenticate or SFTP Permissions + async with BackupAgentClient(self._entry, self._hass) as client: + LOGGER.debug("Uploading backup: %s", backup.backup_id) + await client.async_upload_backup(iterator, backup) + LOGGER.debug("Successfully uploaded backup id: %s", backup.backup_id) + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file from SFTP Storage.""" + LOGGER.debug("Received request to delete backup id: %s", backup_id) + + try: + LOGGER.debug( + "Establishing SFTP connection to remote host in order to delete backup" + ) + # Will raise BackupAgentError if failure to authenticate or SFTP Permissions + async with BackupAgentClient(self._entry, self._hass) as client: + await client.async_delete_backup(backup_id) + except FileNotFoundError as err: + raise BackupNotFound(str(err)) from err + except SFTPError as err: + raise BackupAgentError( + f"Failed to delete backup id: {backup_id}: {err}" + ) from err + + LOGGER.debug("Successfully removed backup id: %s", backup_id) + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups stored on SFTP Storage.""" + + # Will raise BackupAgentError if failure to authenticate or SFTP Permissions + async with BackupAgentClient(self._entry, self._hass) as client: + try: + return await client.async_list_backups() + except SFTPError as err: + raise BackupAgentError( + f"Remote server error while attempting to list backups: {err}" + ) from err + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup: + """Return a backup.""" + backups = await self.async_list_backups() + + for backup in backups: + if backup.backup_id == backup_id: + LOGGER.debug("Returning backup id: %s. %s", backup_id, backup) + return backup + + raise BackupNotFound(f"Backup id: {backup_id} not found") diff --git a/homeassistant/components/sftp_storage/client.py b/homeassistant/components/sftp_storage/client.py new file mode 100644 index 00000000000000..246862f8551013 --- /dev/null +++ b/homeassistant/components/sftp_storage/client.py @@ -0,0 +1,311 @@ +"""Client for SFTP Storage integration.""" + +from __future__ import annotations + +from collections.abc import AsyncIterator +from dataclasses import dataclass +import json +from types import TracebackType +from typing import TYPE_CHECKING, Self + +from asyncssh import ( + SFTPClient, + SFTPClientFile, + SSHClientConnection, + SSHClientConnectionOptions, + connect, +) +from asyncssh.misc import PermissionDenied +from asyncssh.sftp import SFTPNoSuchFile, SFTPPermissionDenied + +from homeassistant.components.backup import ( + AgentBackup, + BackupAgentError, + suggested_filename, +) +from homeassistant.core import HomeAssistant + +from .const import BUF_SIZE, LOGGER + +if TYPE_CHECKING: + from . import SFTPConfigEntry, SFTPConfigEntryData + + +def get_client_options(cfg: SFTPConfigEntryData) -> SSHClientConnectionOptions: + """Use this function with `hass.async_add_executor_job` to asynchronously get `SSHClientConnectionOptions`.""" + + return SSHClientConnectionOptions( + known_hosts=None, + username=cfg.username, + password=cfg.password, + client_keys=cfg.private_key_file, + ) + + +class AsyncFileIterator: + """Returns iterator of remote file located in SFTP Server. + + This exists in order to properly close remote file after operation is completed + and to avoid premature closing of file and session if `BackupAgentClient` is used + as context manager. + """ + + _client: BackupAgentClient + _fileobj: SFTPClientFile + + def __init__( + self, + cfg: SFTPConfigEntry, + hass: HomeAssistant, + file_path: str, + buffer_size: int = BUF_SIZE, + ) -> None: + """Initialize `AsyncFileIterator`.""" + self.cfg: SFTPConfigEntry = cfg + self.hass: HomeAssistant = hass + self.file_path: str = file_path + self.buffer_size = buffer_size + self._initialized: bool = False + LOGGER.debug("Opening file: %s in Async File Iterator", file_path) + + async def _initialize(self) -> None: + """Load file object.""" + self._client: BackupAgentClient = await BackupAgentClient( + self.cfg, self.hass + ).open() + self._fileobj: SFTPClientFile = await self._client.sftp.open( + self.file_path, "rb" + ) + + self._initialized = True + + def __aiter__(self) -> AsyncIterator[bytes]: + """Return self as iterator.""" + return self + + async def __anext__(self) -> bytes: + """Return next bytes as provided in buffer size.""" + if not self._initialized: + await self._initialize() + + chunk: bytes = await self._fileobj.read(self.buffer_size) + if not chunk: + try: + await self._fileobj.close() + await self._client.close() + finally: + raise StopAsyncIteration + return chunk + + +@dataclass(kw_only=True) +class BackupMetadata: + """Represent single backup file metadata.""" + + file_path: str + metadata: dict[str, str | dict[str, list[str]]] + metadata_file: str + + +class BackupAgentClient: + """Helper class that manages SSH and SFTP Server connections.""" + + sftp: SFTPClient + + def __init__(self, config: SFTPConfigEntry, hass: HomeAssistant) -> None: + """Initialize `BackupAgentClient`.""" + self.cfg: SFTPConfigEntry = config + self.hass: HomeAssistant = hass + self._ssh: SSHClientConnection | None = None + LOGGER.debug("Initialized with config: %s", self.cfg.runtime_data) + + async def __aenter__(self) -> Self: + """Async context manager entrypoint.""" + + return await self.open() # type: ignore[return-value] # mypy will otherwise raise an error + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + traceback: TracebackType | None, + ) -> None: + """Async Context Manager exit routine.""" + if self.sftp: + self.sftp.exit() + await self.sftp.wait_closed() + + if self._ssh: + self._ssh.close() + + await self._ssh.wait_closed() + + async def _load_metadata(self, backup_id: str) -> BackupMetadata: + """Return `BackupMetadata` object`. + + Raises: + ------ + `FileNotFoundError` -- if metadata file is not found. + + """ + + # Test for metadata file existence. + metadata_file = ( + f"{self.cfg.runtime_data.backup_location}/.{backup_id}.metadata.json" + ) + if not await self.sftp.exists(metadata_file): + raise FileNotFoundError( + f"Metadata file not found at remote location: {metadata_file}" + ) + + async with self.sftp.open(metadata_file, "r") as f: + return BackupMetadata( + **json.loads(await f.read()), metadata_file=metadata_file + ) + + async def async_delete_backup(self, backup_id: str) -> None: + """Delete backup archive. + + Raises: + ------ + `FileNotFoundError` -- if either metadata file or archive is not found. + + """ + + metadata: BackupMetadata = await self._load_metadata(backup_id) + + # If for whatever reason, archive does not exist but metadata file does, + # remove the metadata file. + if not await self.sftp.exists(metadata.file_path): + await self.sftp.unlink(metadata.metadata_file) + raise FileNotFoundError( + f"File at provided remote location: {metadata.file_path} does not exist." + ) + + LOGGER.debug("Removing file at path: %s", metadata.file_path) + await self.sftp.unlink(metadata.file_path) + LOGGER.debug("Removing metadata at path: %s", metadata.metadata_file) + await self.sftp.unlink(metadata.metadata_file) + + async def async_list_backups(self) -> list[AgentBackup]: + """Iterate through a list of metadata files and return a list of `AgentBackup` objects.""" + + backups: list[AgentBackup] = [] + + for file in await self.list_backup_location(): + LOGGER.debug( + "Evaluating metadata file at remote location: %s@%s:%s", + self.cfg.runtime_data.username, + self.cfg.runtime_data.host, + file, + ) + + try: + async with self.sftp.open(file, "r") as rfile: + metadata = BackupMetadata( + **json.loads(await rfile.read()), metadata_file=file + ) + backups.append(AgentBackup.from_dict(metadata.metadata)) + except (json.JSONDecodeError, TypeError) as e: + LOGGER.error( + "Failed to load backup metadata from file: %s. %s", file, str(e) + ) + continue + + return backups + + async def async_upload_backup( + self, + iterator: AsyncIterator[bytes], + backup: AgentBackup, + ) -> None: + """Accept `iterator` as bytes iterator and write backup archive to SFTP Server.""" + + file_path = ( + f"{self.cfg.runtime_data.backup_location}/{suggested_filename(backup)}" + ) + async with self.sftp.open(file_path, "wb") as f: + async for b in iterator: + await f.write(b) + + LOGGER.debug("Writing backup metadata") + metadata: dict[str, str | dict[str, list[str]]] = { + "file_path": file_path, + "metadata": backup.as_dict(), + } + async with self.sftp.open( + f"{self.cfg.runtime_data.backup_location}/.{backup.backup_id}.metadata.json", + "w", + ) as f: + await f.write(json.dumps(metadata)) + + async def close(self) -> None: + """Close the `BackupAgentClient` context manager.""" + await self.__aexit__(None, None, None) + + async def iter_file(self, backup_id: str) -> AsyncFileIterator: + """Return Async File Iterator object. + + `SFTPClientFile` object (that would be returned with `sftp.open`) is not an iterator. + So we return custom made class - `AsyncFileIterator` that would allow iteration on file object. + + Raises: + ------ + - `FileNotFoundError` -- if metadata or backup archive is not found. + + """ + + metadata: BackupMetadata = await self._load_metadata(backup_id) + if not await self.sftp.exists(metadata.file_path): + raise FileNotFoundError("Backup archive not found on remote location.") + return AsyncFileIterator(self.cfg, self.hass, metadata.file_path, BUF_SIZE) + + async def list_backup_location(self) -> list[str]: + """Return a list of `*.metadata.json` files located in backup location.""" + files = [] + LOGGER.debug( + "Changing directory to: `%s`", self.cfg.runtime_data.backup_location + ) + await self.sftp.chdir(self.cfg.runtime_data.backup_location) + + for file in await self.sftp.listdir(): + LOGGER.debug( + "Checking if file: `%s/%s` is metadata file", + self.cfg.runtime_data.backup_location, + file, + ) + if file.endswith(".metadata.json"): + LOGGER.debug("Found metadata file: `%s`", file) + files.append(f"{self.cfg.runtime_data.backup_location}/{file}") + return files + + async def open(self) -> BackupAgentClient: + """Return initialized `BackupAgentClient`. + + This is to avoid calling `__aenter__` dunder method. + """ + + # Configure SSH Client Connection + try: + self._ssh = await connect( + host=self.cfg.runtime_data.host, + port=self.cfg.runtime_data.port, + options=await self.hass.async_add_executor_job( + get_client_options, self.cfg.runtime_data + ), + ) + except (OSError, PermissionDenied) as e: + raise BackupAgentError( + "Failure while attempting to establish SSH connection. Please check SSH credentials and if changed, re-install the integration" + ) from e + + # Configure SFTP Client Connection + try: + self.sftp = await self._ssh.start_sftp_client() + await self.sftp.chdir(self.cfg.runtime_data.backup_location) + except (SFTPNoSuchFile, SFTPPermissionDenied) as e: + raise BackupAgentError( + "Failed to create SFTP client. Re-installing integration might be required" + ) from e + + return self diff --git a/homeassistant/components/sftp_storage/config_flow.py b/homeassistant/components/sftp_storage/config_flow.py new file mode 100644 index 00000000000000..3168810edab49c --- /dev/null +++ b/homeassistant/components/sftp_storage/config_flow.py @@ -0,0 +1,236 @@ +"""Config flow to configure the SFTP Storage integration.""" + +from __future__ import annotations + +from contextlib import suppress +from pathlib import Path +import shutil +from typing import Any, cast + +from asyncssh import KeyImportError, SSHClientConnectionOptions, connect +from asyncssh.misc import PermissionDenied +from asyncssh.sftp import SFTPNoSuchFile, SFTPPermissionDenied +import voluptuous as vol + +from homeassistant.components.file_upload import process_uploaded_file +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.core import HomeAssistant +from homeassistant.helpers.selector import ( + FileSelector, + FileSelectorConfig, + TextSelector, + TextSelectorConfig, + TextSelectorType, +) +from homeassistant.helpers.storage import STORAGE_DIR +from homeassistant.util.ulid import ulid + +from . import SFTPConfigEntryData +from .client import get_client_options +from .const import ( + CONF_BACKUP_LOCATION, + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_PRIVATE_KEY_FILE, + CONF_USERNAME, + DEFAULT_PKEY_NAME, + DOMAIN, + LOGGER, +) + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_PORT, default=22): int, + vol.Required(CONF_USERNAME): str, + vol.Optional(CONF_PASSWORD): TextSelector( + config=TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + vol.Optional(CONF_PRIVATE_KEY_FILE): FileSelector( + FileSelectorConfig(accept="*") + ), + vol.Required(CONF_BACKUP_LOCATION): str, + } +) + + +class SFTPStorageException(Exception): + """Base exception for SFTP Storage integration.""" + + +class SFTPStorageInvalidPrivateKey(SFTPStorageException): + """Exception raised during config flow - when user provided invalid private key file.""" + + +class SFTPStorageMissingPasswordOrPkey(SFTPStorageException): + """Exception raised during config flow - when user did not provide password or private key file.""" + + +class SFTPFlowHandler(ConfigFlow, domain=DOMAIN): + """Handle an SFTP Storage config flow.""" + + def __init__(self) -> None: + """Initialize SFTP Storage Flow Handler.""" + self._client_keys: list = [] + + async def _validate_auth_and_save_keyfile( + self, user_input: dict[str, Any] + ) -> dict[str, Any]: + """Validate authentication input and persist uploaded key file. + + Ensures that at least one of password or private key is provided. When a + private key is supplied, the uploaded file is saved to Home Assistant's + config storage and `user_input[CONF_PRIVATE_KEY_FILE]` is replaced with + the stored path. + + Returns: the possibly updated `user_input`. + + Raises: + - SFTPStorageMissingPasswordOrPkey: Neither password nor private key provided + - SFTPStorageInvalidPrivateKey: The provided private key has an invalid format + """ + + # If neither password nor private key is provided, error out; + # we need at least one to perform authentication. + if not (user_input.get(CONF_PASSWORD) or user_input.get(CONF_PRIVATE_KEY_FILE)): + raise SFTPStorageMissingPasswordOrPkey + + if key_file := user_input.get(CONF_PRIVATE_KEY_FILE): + client_key = await save_uploaded_pkey_file(self.hass, cast(str, key_file)) + + LOGGER.debug("Saved client key: %s", client_key) + user_input[CONF_PRIVATE_KEY_FILE] = client_key + + return user_input + + async def async_step_user( + self, + user_input: dict[str, Any] | None = None, + step_id: str = "user", + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors: dict[str, str] = {} + placeholders: dict[str, str] = {} + + if user_input is not None: + LOGGER.debug("Source: %s", self.source) + + self._async_abort_entries_match( + { + CONF_HOST: user_input[CONF_HOST], + CONF_PORT: user_input[CONF_PORT], + CONF_BACKUP_LOCATION: user_input[CONF_BACKUP_LOCATION], + } + ) + + try: + # Validate auth input and save uploaded key file if provided + user_input = await self._validate_auth_and_save_keyfile(user_input) + + # Create a session using your credentials + user_config = SFTPConfigEntryData( + host=user_input[CONF_HOST], + port=user_input[CONF_PORT], + username=user_input[CONF_USERNAME], + password=user_input.get(CONF_PASSWORD), + private_key_file=user_input.get(CONF_PRIVATE_KEY_FILE), + backup_location=user_input[CONF_BACKUP_LOCATION], + ) + + placeholders["backup_location"] = user_config.backup_location + + # Raises: + # - OSError, if host or port are not correct. + # - SFTPStorageInvalidPrivateKey, if private key is not valid format. + # - asyncssh.misc.PermissionDenied, if credentials are not correct. + # - SFTPStorageMissingPasswordOrPkey, if password and private key are not provided. + # - asyncssh.sftp.SFTPNoSuchFile, if directory does not exist. + # - asyncssh.sftp.SFTPPermissionDenied, if we don't have access to said directory + async with ( + connect( + host=user_config.host, + port=user_config.port, + options=await self.hass.async_add_executor_job( + get_client_options, user_config + ), + ) as ssh, + ssh.start_sftp_client() as sftp, + ): + await sftp.chdir(user_config.backup_location) + await sftp.listdir() + + LOGGER.debug( + "Will register SFTP Storage agent with user@host %s@%s", + user_config.host, + user_config.username, + ) + + except OSError as e: + LOGGER.exception(e) + placeholders["error_message"] = str(e) + errors["base"] = "os_error" + except SFTPStorageInvalidPrivateKey: + errors["base"] = "invalid_key" + except PermissionDenied as e: + placeholders["error_message"] = str(e) + errors["base"] = "permission_denied" + except SFTPStorageMissingPasswordOrPkey: + errors["base"] = "key_or_password_needed" + except SFTPNoSuchFile: + errors["base"] = "sftp_no_such_file" + except SFTPPermissionDenied: + errors["base"] = "sftp_permission_denied" + except Exception as e: # noqa: BLE001 + LOGGER.exception(e) + placeholders["error_message"] = str(e) + placeholders["exception"] = type(e).__name__ + errors["base"] = "unknown" + else: + return self.async_create_entry( + title=f"{user_config.username}@{user_config.host}", + data=user_input, + ) + finally: + # We remove the saved private key file if any error occurred. + if errors and bool(user_input.get(CONF_PRIVATE_KEY_FILE)): + keyfile = Path(user_input[CONF_PRIVATE_KEY_FILE]) + keyfile.unlink(missing_ok=True) + with suppress(OSError): + keyfile.parent.rmdir() + + if user_input: + user_input.pop(CONF_PRIVATE_KEY_FILE, None) + + return self.async_show_form( + step_id=step_id, + data_schema=self.add_suggested_values_to_schema(DATA_SCHEMA, user_input), + description_placeholders=placeholders, + errors=errors, + ) + + +async def save_uploaded_pkey_file(hass: HomeAssistant, uploaded_file_id: str) -> str: + """Validate the uploaded private key and move it to the storage directory. + + Return a string representing a path to private key file. + Raises SFTPStorageInvalidPrivateKey if the file is invalid. + """ + + def _process_upload() -> str: + with process_uploaded_file(hass, uploaded_file_id) as file_path: + try: + # Initializing this will verify if private key is in correct format + SSHClientConnectionOptions(client_keys=[file_path]) + except KeyImportError as err: + LOGGER.debug(err) + raise SFTPStorageInvalidPrivateKey from err + + dest_path = Path(hass.config.path(STORAGE_DIR, DOMAIN)) + dest_file = dest_path / f".{ulid()}_{DEFAULT_PKEY_NAME}" + + # Create parent directory + dest_file.parent.mkdir(exist_ok=True) + return str(shutil.move(file_path, dest_file)) + + return await hass.async_add_executor_job(_process_upload) diff --git a/homeassistant/components/sftp_storage/const.py b/homeassistant/components/sftp_storage/const.py new file mode 100644 index 00000000000000..aa582760be839f --- /dev/null +++ b/homeassistant/components/sftp_storage/const.py @@ -0,0 +1,27 @@ +"""Constants for the SFTP Storage integration.""" + +from __future__ import annotations + +from collections.abc import Callable +import logging +from typing import Final + +from homeassistant.util.hass_dict import HassKey + +DOMAIN: Final = "sftp_storage" + +LOGGER = logging.getLogger(__package__) + +CONF_HOST: Final = "host" +CONF_PORT: Final = "port" +CONF_USERNAME: Final = "username" +CONF_PASSWORD: Final = "password" +CONF_PRIVATE_KEY_FILE: Final = "private_key_file" +CONF_BACKUP_LOCATION: Final = "backup_location" + +BUF_SIZE = 2**20 * 4 # 4MB + +DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey( + f"{DOMAIN}.backup_agent_listeners" +) +DEFAULT_PKEY_NAME: str = "sftp_storage_pkey" diff --git a/homeassistant/components/sftp_storage/manifest.json b/homeassistant/components/sftp_storage/manifest.json new file mode 100644 index 00000000000000..c206bd13811f60 --- /dev/null +++ b/homeassistant/components/sftp_storage/manifest.json @@ -0,0 +1,13 @@ +{ + "domain": "sftp_storage", + "name": "SFTP Storage", + "after_dependencies": ["backup"], + "codeowners": ["@maretodoric"], + "config_flow": true, + "dependencies": ["file_upload"], + "documentation": "https://www.home-assistant.io/integrations/sftp_storage", + "integration_type": "service", + "iot_class": "local_polling", + "quality_scale": "silver", + "requirements": ["asyncssh==2.21.0"] +} diff --git a/homeassistant/components/sftp_storage/quality_scale.yaml b/homeassistant/components/sftp_storage/quality_scale.yaml new file mode 100644 index 00000000000000..1d34426be02622 --- /dev/null +++ b/homeassistant/components/sftp_storage/quality_scale.yaml @@ -0,0 +1,140 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: No actions. + appropriate-polling: + status: exempt + comment: No polling. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: No actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: No entities. + entity-unique-id: + status: exempt + comment: No entities. + has-entity-name: + status: exempt + comment: No entities. + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: No configuration options. + docs-installation-parameters: done + entity-unavailable: + status: exempt + comment: No entities. + integration-owner: done + log-when-unavailable: + status: exempt + comment: No entities. + parallel-updates: + status: exempt + comment: No actions and no entities. + reauthentication-flow: + status: exempt + comment: | + This backup storage integration uses static SFTP credentials that do not expire + or require token refresh. Authentication failures indicate configuration issues + that should be resolved by reconfiguring the integration. + test-coverage: done + # Gold + devices: + status: exempt + comment: | + This integration connects to a single service. + diagnostics: + status: exempt + comment: | + There is no data to diagnose. + discovery-update-info: + status: exempt + comment: | + This integration is a cloud service and does not support discovery. + discovery: + status: exempt + comment: | + This integration is a cloud service and does not support discovery. + docs-data-update: + status: exempt + comment: | + This integration does not poll or push. + docs-examples: + status: exempt + comment: | + This integration only serves backup. + docs-known-limitations: done + docs-supported-devices: + status: exempt + comment: | + This integration is a cloud service. + docs-supported-functions: + status: exempt + comment: | + This integration does not have entities. + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + This integration connects to a single service. + entity-category: + status: exempt + comment: | + This integration does not have entities. + entity-device-class: + status: exempt + comment: | + This integration does not have entities. + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have entities. + entity-translations: + status: exempt + comment: | + This integration does not have entities. + exception-translations: done + icon-translations: + status: exempt + comment: | + This integration does not have entities. + reconfiguration-flow: + status: exempt + comment: | + This backup storage integration's configuration consists of static SFTP + connection parameters (host, port, credentials, backup path). Changes to + these parameters effectively create a connection to a different backup + location, which should be configured as a separate integration instance. + repair-issues: + status: exempt + comment: | + This integration provides backup storage functionality only. Connection + failures are handled through config entry setup errors and do not require + persistent repair issues. Users can resolve authentication or connectivity + problems by reconfiguring the integration through the config flow. + stale-devices: + status: exempt + comment: | + This integration connects to a single service. + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/sftp_storage/strings.json b/homeassistant/components/sftp_storage/strings.json new file mode 100644 index 00000000000000..da328bfd854c33 --- /dev/null +++ b/homeassistant/components/sftp_storage/strings.json @@ -0,0 +1,37 @@ +{ + "config": { + "step": { + "user": { + "description": "Set up SFTP Storage", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]", + "private_key_file": "Private key file", + "backup_location": "Remote path" + }, + "data_description": { + "host": "Hostname or IP address of SSH/SFTP server to connect to.", + "port": "Port of your SSH/SFTP server. This is usually 22.", + "username": "Username to authenticate with.", + "password": "Password to authenticate with. Provide this or private key file.", + "private_key_file": "Upload private key file used for authentication. Provide this or password.", + "backup_location": "Remote path where to upload backups." + } + } + }, + "error": { + "invalid_key": "Invalid key uploaded. Please make sure key corresponds to valid SSH key algorithm.", + "key_or_password_needed": "Please configure password or private key file location for SFTP Storage.", + "os_error": "{error_message}. Please check if host and/or port are correct.", + "permission_denied": "{error_message}", + "sftp_no_such_file": "Could not check directory {backup_location}. Make sure directory exists.", + "sftp_permission_denied": "Permission denied for directory {backup_location}", + "unknown": "Unexpected exception ({exception}) occurred during config flow. {error_message}" + }, + "abort": { + "already_configured": "Integration already configured. Host with same address, port and backup location already exists." + } + } +} diff --git a/homeassistant/components/sharkiq/__init__.py b/homeassistant/components/sharkiq/__init__.py index e560bb77b5788f..b87f52ba7b11c7 100644 --- a/homeassistant/components/sharkiq/__init__.py +++ b/homeassistant/components/sharkiq/__init__.py @@ -3,6 +3,7 @@ import asyncio from contextlib import suppress +import aiohttp from sharkiq import ( AylaApi, SharkIqAuthError, @@ -15,7 +16,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.aiohttp_client import async_create_clientsession from .const import ( API_TIMEOUT, @@ -56,10 +57,15 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b data={**config_entry.data, CONF_REGION: SHARKIQ_REGION_DEFAULT}, ) + new_websession = async_create_clientsession( + hass, + cookie_jar=aiohttp.CookieJar(unsafe=True, quote_cookie=False), + ) + ayla_api = get_ayla_api( username=config_entry.data[CONF_USERNAME], password=config_entry.data[CONF_PASSWORD], - websession=async_get_clientsession(hass), + websession=new_websession, europe=(config_entry.data[CONF_REGION] == SHARKIQ_REGION_EUROPE), ) @@ -94,7 +100,7 @@ async def async_disconnect_or_timeout(coordinator: SharkIqUpdateCoordinator): await coordinator.ayla_api.async_sign_out() -async def async_update_options(hass, config_entry): +async def async_update_options(hass: HomeAssistant, config_entry): """Update options.""" await hass.config_entries.async_reload(config_entry.entry_id) diff --git a/homeassistant/components/sharkiq/config_flow.py b/homeassistant/components/sharkiq/config_flow.py index 87367fcf093ea4..7174c6347879aa 100644 --- a/homeassistant/components/sharkiq/config_flow.py +++ b/homeassistant/components/sharkiq/config_flow.py @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import selector -from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.aiohttp_client import async_create_clientsession from .const import ( DOMAIN, @@ -44,15 +44,19 @@ async def _validate_input( hass: HomeAssistant, data: Mapping[str, Any] ) -> dict[str, str]: """Validate the user input allows us to connect.""" + new_websession = async_create_clientsession( + hass, + cookie_jar=aiohttp.CookieJar(unsafe=True, quote_cookie=False), + ) ayla_api = get_ayla_api( username=data[CONF_USERNAME], password=data[CONF_PASSWORD], - websession=async_get_clientsession(hass), + websession=new_websession, europe=(data[CONF_REGION] == SHARKIQ_REGION_EUROPE), ) try: - async with asyncio.timeout(10): + async with asyncio.timeout(15): LOGGER.debug("Initialize connection to Ayla networks API") await ayla_api.async_sign_in() except (TimeoutError, aiohttp.ClientError, TypeError) as error: diff --git a/homeassistant/components/sharkiq/manifest.json b/homeassistant/components/sharkiq/manifest.json index c29fc582462c43..793f65483ea2b7 100644 --- a/homeassistant/components/sharkiq/manifest.json +++ b/homeassistant/components/sharkiq/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/sharkiq", "iot_class": "cloud_polling", "loggers": ["sharkiq"], - "requirements": ["sharkiq==1.1.1"] + "requirements": ["sharkiq==1.4.0"] } diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 61654f0c3d1da9..114e8230596c23 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -570,6 +570,7 @@ "senz", "seventeentrack", "sfr_box", + "sftp_storage", "sharkiq", "shelly", "shopping_list", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 4e243fb686f359..7effcc500bb147 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5872,6 +5872,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "sftp_storage": { + "name": "SFTP Storage", + "integration_type": "service", + "config_flow": true, + "iot_class": "local_polling" + }, "sharkiq": { "name": "Shark IQ", "integration_type": "hub", diff --git a/mypy.ini b/mypy.ini index 41ab0f88a10ab5..5787bb8de8405d 100644 --- a/mypy.ini +++ b/mypy.ini @@ -4356,6 +4356,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.sftp_storage.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.shell_command.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 65d5e1f483ee2f..de55634a3eeb80 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -550,6 +550,9 @@ asyncpysupla==0.0.5 # homeassistant.components.sleepiq asyncsleepiq==1.6.0 +# homeassistant.components.sftp_storage +asyncssh==2.21.0 + # homeassistant.components.aten_pe # atenpdu==0.3.2 @@ -618,7 +621,7 @@ beautifulsoup4==4.13.3 # beewi-smartclim==0.0.10 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.17.2 +bimmer-connected[china]==0.17.3 # homeassistant.components.bizkaibus bizkaibus==0.1.1 @@ -1241,7 +1244,7 @@ igloohome-api==0.1.1 ihcsdk==2.8.5 # homeassistant.components.imeon_inverter -imeon_inverter_api==0.3.14 +imeon_inverter_api==0.3.16 # homeassistant.components.imgw_pib imgw_pib==1.5.4 @@ -2771,7 +2774,7 @@ sentry-sdk==1.45.1 sfrbox-api==0.0.12 # homeassistant.components.sharkiq -sharkiq==1.1.1 +sharkiq==1.4.0 # homeassistant.components.aquostv sharp_aquos_rc==0.3.2 diff --git a/requirements_test.txt b/requirements_test.txt index bb2596d1f7fadb..83dde6fc8f0b5c 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -35,19 +35,19 @@ requests-mock==1.12.1 respx==0.22.0 syrupy==4.9.1 tqdm==4.67.1 -types-aiofiles==24.1.0.20250809 +types-aiofiles==24.1.0.20250822 types-atomicwrites==1.4.5.1 types-croniter==6.0.0.20250809 types-caldav==1.3.0.20250516 types-chardet==0.1.5 types-decorator==5.2.0.20250324 types-pexpect==4.9.0.20250809 -types-protobuf==6.30.2.20250809 -types-psutil==7.0.0.20250801 -types-pyserial==3.5.0.20250809 -types-python-dateutil==2.9.0.20250809 +types-protobuf==6.30.2.20250822 +types-psutil==7.0.0.20250822 +types-pyserial==3.5.0.20250822 +types-python-dateutil==2.9.0.20250822 types-python-slugify==8.0.2.20240310 types-pytz==2025.2.0.20250809 -types-PyYAML==6.0.12.20250809 +types-PyYAML==6.0.12.20250822 types-requests==2.32.4.20250809 types-xmltodict==0.13.0.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 82766490811076..ff0eaae509571e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -508,6 +508,9 @@ asyncarve==0.1.1 # homeassistant.components.sleepiq asyncsleepiq==1.6.0 +# homeassistant.components.sftp_storage +asyncssh==2.21.0 + # homeassistant.components.aurora auroranoaa==0.0.5 @@ -555,7 +558,7 @@ base36==0.1.1 beautifulsoup4==4.13.3 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.17.2 +bimmer-connected[china]==0.17.3 # homeassistant.components.eq3btsmart # homeassistant.components.esphome @@ -1075,7 +1078,7 @@ ifaddr==0.2.0 igloohome-api==0.1.1 # homeassistant.components.imeon_inverter -imeon_inverter_api==0.3.14 +imeon_inverter_api==0.3.16 # homeassistant.components.imgw_pib imgw_pib==1.5.4 @@ -2293,7 +2296,7 @@ sentry-sdk==1.45.1 sfrbox-api==0.0.12 # homeassistant.components.sharkiq -sharkiq==1.1.1 +sharkiq==1.4.0 # homeassistant.components.simplefin simplefin4py==0.0.18 diff --git a/tests/components/analytics/test_analytics.py b/tests/components/analytics/test_analytics.py index 51579177e7e0e8..30bd2c6d7230bb 100644 --- a/tests/components/analytics/test_analytics.py +++ b/tests/components/analytics/test_analytics.py @@ -23,10 +23,13 @@ ATTR_STATISTICS, ATTR_USAGE, ) +from homeassistant.components.number import NumberDeviceClass +from homeassistant.components.sensor import SensorDeviceClass from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState +from homeassistant.const import ATTR_ASSUMED_STATE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.loader import IntegrationNotFound from homeassistant.setup import async_setup_component @@ -976,25 +979,22 @@ async def test_submitting_legacy_integrations( @pytest.mark.usefixtures("enable_custom_integrations") -async def test_devices_payload( +async def test_devices_payload_no_entities( hass: HomeAssistant, hass_client: ClientSessionGenerator, device_registry: dr.DeviceRegistry, ) -> None: - """Test devices payload.""" + """Test devices payload with no entities.""" assert await async_setup_component(hass, "analytics", {}) assert await async_devices_payload(hass) == { "version": "home-assistant:1", "home_assistant": MOCK_VERSION, - "devices": [], + "integrations": {}, } mock_config_entry = MockConfigEntry(domain="hue") mock_config_entry.add_to_hass(hass) - mock_custom_config_entry = MockConfigEntry(domain="test") - mock_custom_config_entry.add_to_hass(hass) - # Normal device with all fields device_registry.async_get_or_create( config_entry_id=mock_config_entry.entry_id, @@ -1019,10 +1019,8 @@ async def test_devices_payload( ) # Device without model_id - no_model_id_config_entry = MockConfigEntry(domain="no_model_id") - no_model_id_config_entry.add_to_hass(hass) device_registry.async_get_or_create( - config_entry_id=no_model_id_config_entry.entry_id, + config_entry_id=mock_config_entry.entry_id, identifiers={("device", "4")}, manufacturer="test-manufacturer", ) @@ -1044,6 +1042,8 @@ async def test_devices_payload( ) # Device from custom integration + mock_custom_config_entry = MockConfigEntry(domain="test") + mock_custom_config_entry.add_to_hass(hass) device_registry.async_get_or_create( config_entry_id=mock_custom_config_entry.entry_id, identifiers={("device", "7")}, @@ -1051,86 +1051,262 @@ async def test_devices_payload( model_id="test-model-id7", ) - assert await async_devices_payload(hass) == { + client = await hass_client() + response = await client.get("/api/analytics/devices") + assert response.status == HTTPStatus.OK + assert await response.json() == { "version": "home-assistant:1", "home_assistant": MOCK_VERSION, - "devices": [ - { - "manufacturer": "test-manufacturer", - "model_id": "test-model-id", - "model": "test-model", - "sw_version": "test-sw-version", - "hw_version": "test-hw-version", - "integration": "hue", - "is_custom_integration": False, - "has_configuration_url": True, - "via_device": None, - "entry_type": None, - }, - { - "manufacturer": "test-manufacturer", - "model_id": "test-model-id", - "model": None, - "sw_version": None, - "hw_version": None, - "integration": "hue", - "is_custom_integration": False, - "has_configuration_url": False, - "via_device": None, - "entry_type": "service", - }, - { - "manufacturer": "test-manufacturer", - "model_id": None, - "model": None, - "sw_version": None, - "hw_version": None, - "integration": "no_model_id", - "has_configuration_url": False, - "via_device": None, - "entry_type": None, - }, - { - "manufacturer": None, - "model_id": "test-model-id", - "model": None, - "sw_version": None, - "hw_version": None, - "integration": "hue", - "is_custom_integration": False, - "has_configuration_url": False, - "via_device": None, - "entry_type": None, - }, - { - "manufacturer": "test-manufacturer6", - "model_id": "test-model-id6", - "model": None, - "sw_version": None, - "hw_version": None, - "integration": "hue", + "integrations": { + "hue": { + "devices": [ + { + "entities": [], + "entry_type": None, + "has_configuration_url": True, + "hw_version": "test-hw-version", + "manufacturer": "test-manufacturer", + "model": "test-model", + "model_id": "test-model-id", + "sw_version": "test-sw-version", + "via_device": None, + }, + { + "entities": [], + "entry_type": "service", + "has_configuration_url": False, + "hw_version": None, + "manufacturer": "test-manufacturer", + "model": None, + "model_id": "test-model-id", + "sw_version": None, + "via_device": None, + }, + { + "entities": [], + "entry_type": None, + "has_configuration_url": False, + "hw_version": None, + "manufacturer": "test-manufacturer", + "model": None, + "model_id": None, + "sw_version": None, + "via_device": None, + }, + { + "entities": [], + "entry_type": None, + "has_configuration_url": False, + "hw_version": None, + "manufacturer": None, + "model": None, + "model_id": "test-model-id", + "sw_version": None, + "via_device": None, + }, + { + "entities": [], + "entry_type": None, + "has_configuration_url": False, + "hw_version": None, + "manufacturer": "test-manufacturer6", + "model": None, + "model_id": "test-model-id6", + "sw_version": None, + "via_device": ["hue", 0], + }, + ], + "entities": [], "is_custom_integration": False, - "has_configuration_url": False, - "via_device": 0, - "entry_type": None, }, - { - "entry_type": None, - "has_configuration_url": False, - "hw_version": None, - "integration": "test", - "manufacturer": "test-manufacturer7", - "model": None, - "model_id": "test-model-id7", - "sw_version": None, - "via_device": None, + "test": { + "devices": [ + { + "entities": [], + "entry_type": None, + "has_configuration_url": False, + "hw_version": None, + "manufacturer": "test-manufacturer7", + "model": None, + "model_id": "test-model-id7", + "sw_version": None, + "via_device": None, + }, + ], + "entities": [], "is_custom_integration": True, "custom_integration_version": "1.2.3", }, - ], + }, } + +async def test_devices_payload_with_entities( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test devices payload with entities.""" + assert await async_setup_component(hass, "analytics", {}) + + mock_config_entry = MockConfigEntry(domain="hue") + mock_config_entry.add_to_hass(hass) + + device_entry = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={("device", "1")}, + manufacturer="test-manufacturer", + model_id="test-model-id", + ) + device_entry_2 = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={("device", "2")}, + manufacturer="test-manufacturer", + model_id="test-model-id", + ) + + # First device + + # Entity with capabilities + entity_registry.async_get_or_create( + domain="light", + platform="hue", + unique_id="1", + capabilities={"min_color_temp_kelvin": 2000, "max_color_temp_kelvin": 6535}, + device_id=device_entry.id, + has_entity_name=True, + ) + # Entity with category and device class + entity_registry.async_get_or_create( + domain="number", + platform="hue", + unique_id="1", + device_id=device_entry.id, + entity_category=EntityCategory.CONFIG, + has_entity_name=True, + original_device_class=NumberDeviceClass.TEMPERATURE, + ) + hass.states.async_set("number.hue_1", "2") + # Helper entity with assumed state + entity_registry.async_get_or_create( + domain="light", + platform="template", + unique_id="1", + device_id=device_entry.id, + has_entity_name=True, + ) + hass.states.async_set("light.template_1", "on", {ATTR_ASSUMED_STATE: True}) + + # Second device + entity_registry.async_get_or_create( + domain="light", + platform="hue", + unique_id="2", + device_id=device_entry_2.id, + ) + + # Entity without device with unit of measurement and state class + entity_registry.async_get_or_create( + domain="sensor", + platform="hue", + unique_id="1", + capabilities={"state_class": "measurement"}, + original_device_class=SensorDeviceClass.TEMPERATURE, + unit_of_measurement="°C", + ) + client = await hass_client() response = await client.get("/api/analytics/devices") assert response.status == HTTPStatus.OK - assert await response.json() == await async_devices_payload(hass) + assert await response.json() == { + "version": "home-assistant:1", + "home_assistant": MOCK_VERSION, + "integrations": { + "hue": { + "devices": [ + { + "entities": [ + { + "assumed_state": None, + "capabilities": { + "min_color_temp_kelvin": 2000, + "max_color_temp_kelvin": 6535, + }, + "domain": "light", + "entity_category": None, + "has_entity_name": True, + "original_device_class": None, + "unit_of_measurement": None, + }, + { + "assumed_state": False, + "capabilities": None, + "domain": "number", + "entity_category": "config", + "has_entity_name": True, + "original_device_class": "temperature", + "unit_of_measurement": None, + }, + ], + "entry_type": None, + "has_configuration_url": False, + "hw_version": None, + "manufacturer": "test-manufacturer", + "model": None, + "model_id": "test-model-id", + "sw_version": None, + "via_device": None, + }, + { + "entities": [ + { + "assumed_state": None, + "capabilities": None, + "domain": "light", + "entity_category": None, + "has_entity_name": False, + "original_device_class": None, + "unit_of_measurement": None, + }, + ], + "entry_type": None, + "has_configuration_url": False, + "hw_version": None, + "manufacturer": "test-manufacturer", + "model": None, + "model_id": "test-model-id", + "sw_version": None, + "via_device": None, + }, + ], + "entities": [ + { + "assumed_state": None, + "capabilities": {"state_class": "measurement"}, + "domain": "sensor", + "entity_category": None, + "has_entity_name": False, + "original_device_class": "temperature", + "unit_of_measurement": "°C", + }, + ], + "is_custom_integration": False, + }, + "template": { + "devices": [], + "entities": [ + { + "assumed_state": True, + "capabilities": None, + "domain": "light", + "entity_category": None, + "has_entity_name": True, + "original_device_class": None, + "unit_of_measurement": None, + }, + ], + "is_custom_integration": False, + }, + }, + } diff --git a/tests/components/androidtv_remote/test_config_flow.py b/tests/components/androidtv_remote/test_config_flow.py index 9652ac0c3a92f9..41c1d95830c467 100644 --- a/tests/components/androidtv_remote/test_config_flow.py +++ b/tests/components/androidtv_remote/test_config_flow.py @@ -102,6 +102,10 @@ async def test_user_flow_cannot_connect( assert not result["errors"] host = "1.2.3.4" + name = "My Android TV" + mac = "1A:2B:3C:4D:5E:6F" + unique_id = "1a:2b:3c:4d:5e:6f" + pin = "123456" mock_api.async_generate_cert_if_missing = AsyncMock(return_value=True) mock_api.async_get_name_and_mac = AsyncMock(side_effect=CannotConnect()) @@ -119,9 +123,87 @@ async def test_user_flow_cannot_connect( mock_api.async_get_name_and_mac.assert_called() mock_api.async_start_pairing.assert_not_called() + # End in CREATE_ENTRY to test that its able to recover + mock_api.async_get_name_and_mac = AsyncMock(return_value=(name, mac)) + mock_api.async_start_pairing = AsyncMock(return_value=None) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"host": host} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "pair" + assert "pin" in result["data_schema"].schema + assert not result["errors"] + + mock_api.async_finish_pairing = AsyncMock(return_value=None) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"pin": pin} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == name + assert result["data"] == {"host": host, "name": name, "mac": mac} + assert result["context"]["unique_id"] == unique_id + await hass.async_block_till_done() assert len(mock_unload_entry.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_flow_start_pair_cannot_connect( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_unload_entry: AsyncMock, + mock_api: MagicMock, +) -> None: + """Test async_start_pairing raises CannotConnect in the user flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert "host" in result["data_schema"].schema + assert not result["errors"] + + host = "1.2.3.4" + name = "My Android TV" + mac = "1A:2B:3C:4D:5E:6F" + + mock_api.async_generate_cert_if_missing = AsyncMock(return_value=True) + mock_api.async_get_name_and_mac = AsyncMock(return_value=(name, mac)) + mock_api.async_start_pairing = AsyncMock(side_effect=CannotConnect()) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"host": host} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert "host" in result["data_schema"].schema + assert result["errors"] == {"base": "cannot_connect"} + + mock_api.async_generate_cert_if_missing.assert_called() + mock_api.async_get_name_and_mac.assert_called() + mock_api.async_start_pairing.assert_called() + + pin = "123456" + mock_api.async_start_pairing = AsyncMock(return_value=None) + mock_api.async_finish_pairing = AsyncMock(return_value=None) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"host": host} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "pair" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"pin": pin} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + await hass.async_block_till_done() + assert len(mock_setup_entry.mock_calls) == 1 async def test_user_flow_pairing_invalid_auth( @@ -146,6 +228,7 @@ async def test_user_flow_pairing_invalid_auth( host = "1.2.3.4" name = "My Android TV" mac = "1A:2B:3C:4D:5E:6F" + unique_id = "1a:2b:3c:4d:5e:6f" pin = "123456" mock_api.async_get_name_and_mac = AsyncMock(return_value=(name, mac)) @@ -164,7 +247,7 @@ async def test_user_flow_pairing_invalid_auth( mock_api.async_generate_cert_if_missing.assert_called() mock_api.async_start_pairing.assert_called() - mock_api.async_finish_pairing = AsyncMock(side_effect=InvalidAuth()) + mock_api.async_finish_pairing = AsyncMock(side_effect=[InvalidAuth(), None]) result = await hass.config_entries.flow.async_configure( result["flow_id"], {"pin": pin} @@ -181,9 +264,19 @@ async def test_user_flow_pairing_invalid_auth( assert mock_api.async_start_pairing.call_count == 1 assert mock_api.async_finish_pairing.call_count == 1 + # End in CREATE_ENTRY to test that its able to recover + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"pin": pin} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == name + assert result["data"] == {"host": host, "name": name, "mac": mac} + assert result["context"]["unique_id"] == unique_id + + assert mock_api.async_finish_pairing.call_count == 2 await hass.async_block_till_done() assert len(mock_unload_entry.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 + assert len(mock_setup_entry.mock_calls) == 1 async def test_user_flow_pairing_connection_closed( @@ -208,6 +301,7 @@ async def test_user_flow_pairing_connection_closed( host = "1.2.3.4" name = "My Android TV" mac = "1A:2B:3C:4D:5E:6F" + unique_id = "1a:2b:3c:4d:5e:6f" pin = "123456" mock_api.async_get_name_and_mac = AsyncMock(return_value=(name, mac)) @@ -243,9 +337,19 @@ async def test_user_flow_pairing_connection_closed( assert mock_api.async_start_pairing.call_count == 2 assert mock_api.async_finish_pairing.call_count == 1 + # End in CREATE_ENTRY to test that its able to recover + mock_api.async_finish_pairing = AsyncMock(return_value=None) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"pin": pin} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == name + assert result["data"] == {"host": host, "name": name, "mac": mac} + assert result["context"]["unique_id"] == unique_id + await hass.async_block_till_done() assert len(mock_unload_entry.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 + assert len(mock_setup_entry.mock_calls) == 1 async def test_user_flow_pairing_connection_closed_followed_by_cannot_connect( @@ -568,6 +672,7 @@ async def test_zeroconf_flow_pairing_invalid_auth( host = "1.2.3.4" name = "My Android TV" mac = "1A:2B:3C:4D:5E:6F" + unique_id = "1a:2b:3c:4d:5e:6f" pin = "123456" result = await hass.config_entries.flow.async_init( @@ -602,7 +707,7 @@ async def test_zeroconf_flow_pairing_invalid_auth( mock_api.async_generate_cert_if_missing.assert_called() mock_api.async_start_pairing.assert_called() - mock_api.async_finish_pairing = AsyncMock(side_effect=InvalidAuth()) + mock_api.async_finish_pairing = AsyncMock(side_effect=[InvalidAuth(), None]) result = await hass.config_entries.flow.async_configure( result["flow_id"], {"pin": pin} @@ -619,9 +724,18 @@ async def test_zeroconf_flow_pairing_invalid_auth( assert mock_api.async_start_pairing.call_count == 1 assert mock_api.async_finish_pairing.call_count == 1 + # End in CREATE_ENTRY to test that its able to recover + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"pin": pin} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == name + assert result["data"] == {"host": host, "name": name, "mac": mac} + assert result["context"]["unique_id"] == unique_id + await hass.async_block_till_done() assert len(mock_unload_entry.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 + assert len(mock_setup_entry.mock_calls) == 1 async def test_zeroconf_flow_already_configured_host_changed_reloads_entry( @@ -1123,7 +1237,12 @@ async def test_reconfigure_flow_cannot_connect( assert result["step_id"] == "reconfigure" mock_api.async_generate_cert_if_missing = AsyncMock(return_value=True) - mock_api.async_get_name_and_mac = AsyncMock(side_effect=CannotConnect()) + mock_api.async_get_name_and_mac = AsyncMock( + side_effect=[ + CannotConnect(), + (mock_config_entry.data["name"], mock_config_entry.data["mac"]), + ] + ) new_host = "4.3.2.1" result = await hass.config_entries.flow.async_configure( @@ -1136,6 +1255,16 @@ async def test_reconfigure_flow_cannot_connect( assert mock_config_entry.data["host"] == "1.2.3.4" assert len(mock_setup_entry.mock_calls) == 0 + # End in CREATE_ENTRY to test that its able to recover + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"host": new_host} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config_entry.data["host"] == new_host + assert len(mock_setup_entry.mock_calls) == 1 + async def test_reconfigure_flow_unique_id_mismatch( hass: HomeAssistant, diff --git a/tests/components/androidtv_remote/test_media_player.py b/tests/components/androidtv_remote/test_media_player.py index 2af8aeb2f56552..ba885759979880 100644 --- a/tests/components/androidtv_remote/test_media_player.py +++ b/tests/components/androidtv_remote/test_media_player.py @@ -291,7 +291,7 @@ async def test_media_player_play_media( ) mock_api.send_launch_app_command.assert_called_with("tv.twitch.android.app") - with pytest.raises(ValueError): + with pytest.raises(HomeAssistantError, match="Channel must be numeric: abc"): await hass.services.async_call( "media_player", "play_media", @@ -303,7 +303,7 @@ async def test_media_player_play_media( blocking=True, ) - with pytest.raises(ValueError): + with pytest.raises(HomeAssistantError, match="Invalid media type: music"): await hass.services.async_call( "media_player", "play_media", diff --git a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr index b87da22a332eef..06e90c878af214 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr @@ -138,6 +138,7 @@ 'state': 'LOW', }), ]), + 'urgent_check_control_messages': None, }), 'climate': dict({ 'activity': 'INACTIVE', @@ -193,6 +194,24 @@ 'state': 'OK', }), ]), + 'next_service_by_distance': dict({ + 'due_date': '2024-12-01T00:00:00+00:00', + 'due_distance': list([ + 50000, + 'km', + ]), + 'service_type': 'BRAKE_FLUID', + 'state': 'OK', + }), + 'next_service_by_time': dict({ + 'due_date': '2024-12-01T00:00:00+00:00', + 'due_distance': list([ + 50000, + 'km', + ]), + 'service_type': 'BRAKE_FLUID', + 'state': 'OK', + }), }), 'data': dict({ 'attributes': dict({ @@ -1053,6 +1072,7 @@ 'state': 'LOW', }), ]), + 'urgent_check_control_messages': None, }), 'climate': dict({ 'activity': 'HEATING', @@ -1108,6 +1128,24 @@ 'state': 'OK', }), ]), + 'next_service_by_distance': dict({ + 'due_date': '2024-12-01T00:00:00+00:00', + 'due_distance': list([ + 50000, + 'km', + ]), + 'service_type': 'BRAKE_FLUID', + 'state': 'OK', + }), + 'next_service_by_time': dict({ + 'due_date': '2024-12-01T00:00:00+00:00', + 'due_distance': list([ + 50000, + 'km', + ]), + 'service_type': 'BRAKE_FLUID', + 'state': 'OK', + }), }), 'data': dict({ 'attributes': dict({ @@ -1858,6 +1896,7 @@ 'state': 'LOW', }), ]), + 'urgent_check_control_messages': None, }), 'climate': dict({ 'activity': 'INACTIVE', @@ -1922,6 +1961,24 @@ 'state': 'OK', }), ]), + 'next_service_by_distance': dict({ + 'due_date': '2024-12-01T00:00:00+00:00', + 'due_distance': list([ + 50000, + 'km', + ]), + 'service_type': 'BRAKE_FLUID', + 'state': 'OK', + }), + 'next_service_by_time': dict({ + 'due_date': '2024-12-01T00:00:00+00:00', + 'due_distance': list([ + 50000, + 'km', + ]), + 'service_type': 'BRAKE_FLUID', + 'state': 'OK', + }), }), 'data': dict({ 'attributes': dict({ @@ -2621,6 +2678,7 @@ 'has_check_control_messages': False, 'messages': list([ ]), + 'urgent_check_control_messages': None, }), 'climate': dict({ 'activity': 'UNKNOWN', @@ -2658,6 +2716,16 @@ 'state': 'OK', }), ]), + 'next_service_by_distance': None, + 'next_service_by_time': dict({ + 'due_date': '2022-10-01T00:00:00+00:00', + 'due_distance': list([ + None, + None, + ]), + 'service_type': 'BRAKE_FLUID', + 'state': 'OK', + }), }), 'data': dict({ 'attributes': dict({ @@ -4991,6 +5059,7 @@ 'has_check_control_messages': False, 'messages': list([ ]), + 'urgent_check_control_messages': None, }), 'climate': dict({ 'activity': 'UNKNOWN', @@ -5028,6 +5097,16 @@ 'state': 'OK', }), ]), + 'next_service_by_distance': None, + 'next_service_by_time': dict({ + 'due_date': '2022-10-01T00:00:00+00:00', + 'due_distance': list([ + None, + None, + ]), + 'service_type': 'BRAKE_FLUID', + 'state': 'OK', + }), }), 'data': dict({ 'attributes': dict({ diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 7c5e897d86c670..a90cd1b55c188c 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -231,6 +231,29 @@ async def test_conversation_agent(hass: HomeAssistant) -> None: ) +@pytest.mark.usefixtures("init_components") +async def test_punctuation(hass: HomeAssistant) -> None: + """Test punctuation is handled properly.""" + hass.states.async_set( + "light.test_light", + "off", + attributes={ATTR_FRIENDLY_NAME: "Test light"}, + ) + expose_entity(hass, "light.test_light", True) + + calls = async_mock_service(hass, "light", "turn_on") + result = await conversation.async_converse( + hass, "Turn?? on,, test;; light!!!", None, Context(), None + ) + + assert len(calls) == 1 + assert calls[0].data["entity_id"][0] == "light.test_light" + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.intent is not None + assert result.response.intent.slots["name"]["value"] == "test light" + assert result.response.intent.slots["name"]["text"] == "test light" + + async def test_expose_flag_automatically_set( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/hue/test_config_flow.py b/tests/components/hue/test_config_flow.py index e4bdda422d1c82..bc63343f9bef9b 100644 --- a/tests/components/hue/test_config_flow.py +++ b/tests/components/hue/test_config_flow.py @@ -4,7 +4,7 @@ from unittest.mock import Mock, patch from aiohue.discovery import URL_NUPNP -from aiohue.errors import LinkButtonNotPressed +from aiohue.errors import AiohueException, LinkButtonNotPressed import pytest import voluptuous as vol @@ -732,3 +732,216 @@ async def test_bridge_connection_failed( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" + + +async def test_bsb003_bridge_discovery( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, +) -> None: + """Test a bridge being discovered.""" + entry = MockConfigEntry( + domain=const.DOMAIN, + data={"host": "192.168.1.217", "api_version": 2, "api_key": "abc"}, + unique_id="bsb002_00000", + ) + entry.add_to_hass(hass) + device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(const.DOMAIN, "bsb002_00000")}, + connections={(dr.CONNECTION_NETWORK_MAC, "AA:BB:CC:DD:EE:FF")}, + ) + create_mock_api_discovery( + aioclient_mock, + [("192.168.1.217", "bsb002_00000"), ("192.168.1.218", "bsb003_00000")], + ) + disc_bridge = get_discovered_bridge( + bridge_id="bsb003_00000", host="192.168.1.218", supports_v2=True + ) + + with ( + patch( + "homeassistant.components.hue.config_flow.discover_bridge", + return_value=disc_bridge, + ), + patch( + "homeassistant.components.hue.config_flow.HueBridgeV2", + autospec=True, + ) as mock_bridge, + ): + mock_bridge.return_value.fetch_full_state.return_value = {} + result = await hass.config_entries.flow.async_init( + const.DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.218"), + ip_addresses=[ip_address("192.168.1.218")], + port=443, + hostname="Philips-hue.local", + type="_hue._tcp.local.", + name="Philips Hue - ABCABC._hue._tcp.local.", + properties={ + "bridgeid": "bsb003_00000", + "modelid": "BSB003", + }, + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "migrated_bridge" + + migrated_device = device_registry.async_get(device.id) + + assert migrated_device is not None + assert len(migrated_device.identifiers) == 1 + assert list(migrated_device.identifiers)[0] == (const.DOMAIN, "bsb003_00000") + # The tests don't add new connection, but that will happen + # outside of the config flow + assert len(migrated_device.connections) == 0 + assert entry.data["host"] == "192.168.1.218" + + +async def test_bsb003_bridge_discovery_old_version( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, +) -> None: + """Test a bridge being discovered.""" + entry = MockConfigEntry( + domain=const.DOMAIN, + data={"host": "192.168.1.217", "api_version": 1, "api_key": "abc"}, + unique_id="bsb002_00000", + ) + entry.add_to_hass(hass) + disc_bridge = get_discovered_bridge( + bridge_id="bsb003_00000", host="192.168.1.218", supports_v2=True + ) + + with patch( + "homeassistant.components.hue.config_flow.discover_bridge", + return_value=disc_bridge, + ): + result = await hass.config_entries.flow.async_init( + const.DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.218"), + ip_addresses=[ip_address("192.168.1.218")], + port=443, + hostname="Philips-hue.local", + type="_hue._tcp.local.", + name="Philips Hue - ABCABC._hue._tcp.local.", + properties={ + "bridgeid": "bsb003_00000", + "modelid": "BSB003", + }, + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "link" + + +async def test_bsb003_bridge_discovery_same_host( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, +) -> None: + """Test a bridge being discovered.""" + entry = MockConfigEntry( + domain=const.DOMAIN, + data={"host": "192.168.1.217", "api_version": 2, "api_key": "abc"}, + unique_id="bsb002_00000", + ) + entry.add_to_hass(hass) + create_mock_api_discovery( + aioclient_mock, + [("192.168.1.217", "bsb003_00000")], + ) + disc_bridge = get_discovered_bridge( + bridge_id="bsb003_00000", host="192.168.1.217", supports_v2=True + ) + + with ( + patch( + "homeassistant.components.hue.config_flow.discover_bridge", + return_value=disc_bridge, + ), + patch( + "homeassistant.components.hue.config_flow.HueBridgeV2", + autospec=True, + ), + ): + result = await hass.config_entries.flow.async_init( + const.DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.217"), + ip_addresses=[ip_address("192.168.1.217")], + port=443, + hostname="Philips-hue.local", + type="_hue._tcp.local.", + name="Philips Hue - ABCABC._hue._tcp.local.", + properties={ + "bridgeid": "bsb003_00000", + "modelid": "BSB003", + }, + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "link" + + +@pytest.mark.parametrize("exception", [AiohueException, ClientError]) +async def test_bsb003_bridge_discovery_cannot_connect( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + device_registry: dr.DeviceRegistry, + exception: Exception, +) -> None: + """Test a bridge being discovered.""" + entry = MockConfigEntry( + domain=const.DOMAIN, + data={"host": "192.168.1.217", "api_version": 2, "api_key": "abc"}, + unique_id="bsb002_00000", + ) + entry.add_to_hass(hass) + create_mock_api_discovery( + aioclient_mock, + [("192.168.1.217", "bsb003_00000")], + ) + disc_bridge = get_discovered_bridge( + bridge_id="bsb003_00000", host="192.168.1.217", supports_v2=True + ) + + with ( + patch( + "homeassistant.components.hue.config_flow.discover_bridge", + return_value=disc_bridge, + ), + patch( + "homeassistant.components.hue.config_flow.HueBridgeV2", + autospec=True, + ) as mock_bridge, + ): + mock_bridge.return_value.fetch_full_state.side_effect = exception + result = await hass.config_entries.flow.async_init( + const.DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.217"), + ip_addresses=[ip_address("192.168.1.217")], + port=443, + hostname="Philips-hue.local", + type="_hue._tcp.local.", + name="Philips Hue - ABCABC._hue._tcp.local.", + properties={ + "bridgeid": "bsb003_00000", + "modelid": "BSB003", + }, + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "link" diff --git a/tests/components/matter/fixtures/nodes/vacuum_cleaner.json b/tests/components/matter/fixtures/nodes/vacuum_cleaner.json index 8f900616799a0b..69f2e9bff8644b 100644 --- a/tests/components/matter/fixtures/nodes/vacuum_cleaner.json +++ b/tests/components/matter/fixtures/nodes/vacuum_cleaner.json @@ -357,7 +357,7 @@ ], "1/336/2": [], "1/336/3": 7, - "1/336/4": null, + "1/336/4": 1756501200, "1/336/5": [], "1/336/65532": 6, "1/336/65533": 1, diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index ca789919cf5087..a2ac33ae9bdfe8 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -6905,6 +6905,55 @@ 'state': '28.3', }) # --- +# name: test_sensors[vacuum_cleaner][sensor.mock_vacuum_estimated_end_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_vacuum_estimated_end_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Estimated end time', + 'platform': 'matter', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'estimated_end_time', + 'unique_id': '00000000000004D2-0000000000000042-MatterNodeDevice-1-ServiceAreaEstimatedEndTime-336-4', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[vacuum_cleaner][sensor.mock_vacuum_estimated_end_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Vacuum Estimated end time', + }), + 'context': , + 'entity_id': 'sensor.mock_vacuum_estimated_end_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2025-08-29T21:00:00+00:00', + }) +# --- # name: test_sensors[vacuum_cleaner][sensor.mock_vacuum_operational_state-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/matter/test_sensor.py b/tests/components/matter/test_sensor.py index 883a976284ecb7..2254c021c6aec7 100644 --- a/tests/components/matter/test_sensor.py +++ b/tests/components/matter/test_sensor.py @@ -583,3 +583,23 @@ async def test_pump( state = hass.states.get("sensor.mock_pump_rotation_speed") assert state assert state.state == "500" + + +@pytest.mark.parametrize("node_fixture", ["vacuum_cleaner"]) +async def test_vacuum_actions( + hass: HomeAssistant, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test vacuum sensors.""" + # EstimatedEndTime + state = hass.states.get("sensor.mock_vacuum_estimated_end_time") + assert state + assert state.state == "2025-08-29T21:00:00+00:00" + + set_node_attribute(matter_node, 1, 336, 4, 1756502000) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("sensor.mock_vacuum_estimated_end_time") + assert state + assert state.state == "2025-08-29T21:13:20+00:00" diff --git a/tests/components/sftp_storage/__init__.py b/tests/components/sftp_storage/__init__.py new file mode 100644 index 00000000000000..c1739571bce715 --- /dev/null +++ b/tests/components/sftp_storage/__init__.py @@ -0,0 +1 @@ +"""Tests SFTP Storage integration.""" diff --git a/tests/components/sftp_storage/asyncssh_mock.py b/tests/components/sftp_storage/asyncssh_mock.py new file mode 100644 index 00000000000000..829ca44d4c238d --- /dev/null +++ b/tests/components/sftp_storage/asyncssh_mock.py @@ -0,0 +1,139 @@ +"""Mock classes for asyncssh module.""" + +from __future__ import annotations + +import json +from typing import Self +from unittest.mock import AsyncMock + +from asyncssh.misc import async_context_manager + + +class SSHClientConnectionMock: + """Class that mocks SSH Client connection.""" + + def __init__(self, *args, **kwargs) -> None: + """Initialize SSHClientConnectionMock.""" + self._sftp: SFTPClientMock = SFTPClientMock() + + async def __aenter__(self) -> Self: + """Allow SSHClientConnectionMock to be used as an async context manager.""" + return self + + async def __aexit__(self, *args) -> None: + """Allow SSHClientConnectionMock to be used as an async context manager.""" + self.close() + + def close(self): + """Mock `close` from `SSHClientConnection`.""" + return + + def mock_setup_backup(self, metadata: dict, with_bad: bool = False) -> str: + """Setup mocks to properly return a backup. + + Return: Backup ID (slug) + """ + + slug = metadata["metadata"]["backup_id"] + side_effect = [ + json.dumps(metadata), # from async_list_backups + json.dumps(metadata), # from iter_file -> _load_metadata + b"backup data", # from AsyncFileIterator + b"", + ] + self._sftp._mock_listdir.return_value = [f"{slug}.metadata.json"] + + if with_bad: + side_effect.insert(0, "invalid") + self._sftp._mock_listdir.return_value = [ + "invalid.metadata.json", + f"{slug}.metadata.json", + ] + + self._sftp._mock_open._mock_read.side_effect = side_effect + return slug + + @async_context_manager + async def start_sftp_client(self, *args, **kwargs) -> SFTPClientMock: + """Return mocked SFTP Client.""" + return self._sftp + + async def wait_closed(self): + """Mock `wait_closed` from `SFTPClient`.""" + return + + +class SFTPClientMock: + """Class that mocks SFTP Client connection.""" + + def __init__(self, *args, **kwargs) -> None: + """Initialize `SFTPClientMock`.""" + self._mock_chdir = AsyncMock() + self._mock_listdir = AsyncMock() + self._mock_exists = AsyncMock(return_value=True) + self._mock_unlink = AsyncMock() + self._mock_open = SFTPOpenMock() + + async def __aenter__(self) -> Self: + """Allow SFTPClientMock to be used as an async context manager.""" + return self + + async def __aexit__(self, *args) -> None: + """Allow SFTPClientMock to be used as an async context manager.""" + self.exit() + + async def chdir(self, *args) -> None: + """Mock `chdir` method from SFTPClient.""" + await self._mock_chdir(*args) + + async def listdir(self, *args) -> list[str]: + """Mock `listdir` method from SFTPClient.""" + result = await self._mock_listdir(*args) + return result if result is not None else [] + + @async_context_manager + async def open(self, *args, **kwargs) -> SFTPOpenMock: + """Mock open a remote file.""" + return self._mock_open + + async def exists(self, *args) -> bool: + """Mock `exists` method from SFTPClient.""" + return await self._mock_exists(*args) + + async def unlink(self, *args) -> None: + """Mock `unlink` method from SFTPClient.""" + await self._mock_unlink(*args) + + def exit(self): + """Mandatory method for quitting SFTP Client.""" + return + + async def wait_closed(self): + """Mock `wait_closed` from `SFTPClient`.""" + return + + +class SFTPOpenMock: + """Mocked remote file.""" + + def __init__(self) -> None: + """Initialize arguments for mocked responses.""" + self._mock_read = AsyncMock(return_value=b"") + self._mock_write = AsyncMock() + self.close = AsyncMock(return_value=None) + + async def __aenter__(self): + """Allow SFTPOpenMock to be used as an async context manager.""" + return self + + async def __aexit__(self, *args) -> None: + """Allow SFTPOpenMock to be used as an async context manager.""" + + async def read(self, *args, **kwargs) -> bytes: + """Read remote file - mocked response from `self._mock_read`.""" + return await self._mock_read(*args, **kwargs) + + async def write(self, content, *args, **kwargs) -> int: + """Mock write to remote file.""" + await self._mock_write(content, *args, **kwargs) + return len(content) diff --git a/tests/components/sftp_storage/conftest.py b/tests/components/sftp_storage/conftest.py new file mode 100644 index 00000000000000..0a5a4b484a5574 --- /dev/null +++ b/tests/components/sftp_storage/conftest.py @@ -0,0 +1,155 @@ +"""PyTest fixtures and test helpers.""" + +from collections.abc import Awaitable, Callable, Generator +from contextlib import contextmanager, suppress +from pathlib import Path +from unittest.mock import patch + +from asyncssh import generate_private_key +import pytest + +from homeassistant.components.backup import DOMAIN as BACKUP_DOMAIN, AgentBackup +from homeassistant.components.sftp_storage import SFTPConfigEntryData +from homeassistant.components.sftp_storage.const import ( + CONF_BACKUP_LOCATION, + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_PRIVATE_KEY_FILE, + CONF_USERNAME, + DEFAULT_PKEY_NAME, + DOMAIN, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.storage import STORAGE_DIR +from homeassistant.setup import async_setup_component +from homeassistant.util.ulid import ulid + +from .asyncssh_mock import SSHClientConnectionMock, async_context_manager + +from tests.common import MockConfigEntry + +type ComponentSetup = Callable[[], Awaitable[None]] + +BACKUP_METADATA = { + "file_path": "backup_location/backup.tar", + "metadata": { + "addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], + "backup_id": "test-backup", + "date": "2025-01-01T01:23:45.687000+01:00", + "database_included": True, + "extra_metadata": { + "instance_id": 1, + "with_automatic_settings": False, + "supervisor.backup_request_date": "2025-01-01T01:23:45.687000+01:00", + }, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": True, + "size": 1234, + }, +} +TEST_AGENT_BACKUP = AgentBackup.from_dict(BACKUP_METADATA["metadata"]) + +CONFIG_ENTRY_TITLE = "testsshuser@127.0.0.1" +PRIVATE_KEY_FILE_UUID = "0123456789abcdef0123456789abcdef" +USER_INPUT = { + CONF_HOST: "127.0.0.1", + CONF_PORT: 22, + CONF_USERNAME: "username", + CONF_PASSWORD: "password", + CONF_PRIVATE_KEY_FILE: PRIVATE_KEY_FILE_UUID, + CONF_BACKUP_LOCATION: "backup_location", +} +TEST_AGENT_ID = ulid() + + +@contextmanager +def private_key_file(hass: HomeAssistant) -> Generator[str]: + """Fixture that create private key file in integration storage directory.""" + + # Create private key file and parent directory. + key_dest_path = Path(hass.config.path(STORAGE_DIR, DOMAIN)) + dest_file = key_dest_path / f".{ulid()}_{DEFAULT_PKEY_NAME}" + dest_file.parent.mkdir(parents=True, exist_ok=True) + + # Write to file only once. + if not dest_file.exists(): + dest_file.write_bytes( + generate_private_key("ssh-rsa").export_private_key("pkcs8-pem") + ) + + yield str(dest_file) + + if dest_file.exists(): + dest_file.unlink(missing_ok=True) + with suppress(OSError): + dest_file.parent.rmdir() + + +@pytest.fixture(name="setup_integration") +async def mock_setup_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_ssh_connection: SSHClientConnectionMock, +) -> ComponentSetup: + """Fixture for setting up the component manually.""" + config_entry.add_to_hass(hass) + + async def func(config_entry: MockConfigEntry = config_entry) -> None: + assert await async_setup_component(hass, BACKUP_DOMAIN, {}) + await hass.config_entries.async_setup(config_entry.entry_id) + + return func + + +@pytest.fixture(name="config_entry") +def mock_config_entry(hass: HomeAssistant) -> Generator[MockConfigEntry]: + """Fixture for MockConfigEntry.""" + + # pylint: disable-next=contextmanager-generator-missing-cleanup + with private_key_file(hass) as private_key: + config_entry = MockConfigEntry( + domain=DOMAIN, + entry_id=TEST_AGENT_ID, + unique_id=TEST_AGENT_ID, + title=CONFIG_ENTRY_TITLE, + data={ + CONF_HOST: "127.0.0.1", + CONF_PORT: 22, + CONF_USERNAME: "username", + CONF_PASSWORD: "password", + CONF_PRIVATE_KEY_FILE: str(private_key), + CONF_BACKUP_LOCATION: "backup_location", + }, + ) + + config_entry.runtime_data = SFTPConfigEntryData(**config_entry.data) + yield config_entry + + +@pytest.fixture +def mock_ssh_connection(): + """Mock `SSHClientConnection` globally.""" + mock = SSHClientConnectionMock() + + # We decorate from same decorator from asyncssh + # It makes the callable an awaitable and context manager. + @async_context_manager + async def mock_connect(*args, **kwargs): + """Mock the asyncssh.connect function to return our mock directly.""" + return mock + + with ( + patch( + "homeassistant.components.sftp_storage.client.connect", + side_effect=mock_connect, + ), + patch( + "homeassistant.components.sftp_storage.config_flow.connect", + side_effect=mock_connect, + ), + ): + yield mock diff --git a/tests/components/sftp_storage/test_backup.py b/tests/components/sftp_storage/test_backup.py new file mode 100644 index 00000000000000..52cdcd49df15ba --- /dev/null +++ b/tests/components/sftp_storage/test_backup.py @@ -0,0 +1,418 @@ +"""Test the Backup SFTP Location platform.""" + +from io import StringIO +import json +from typing import Any +from unittest.mock import MagicMock, patch + +from asyncssh.sftp import SFTPError +import pytest + +from homeassistant.components.sftp_storage.backup import ( + async_register_backup_agents_listener, +) +from homeassistant.components.sftp_storage.const import ( + DATA_BACKUP_AGENT_LISTENERS, + DOMAIN, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from .asyncssh_mock import SSHClientConnectionMock +from .conftest import ( + BACKUP_METADATA, + CONFIG_ENTRY_TITLE, + TEST_AGENT_BACKUP, + TEST_AGENT_ID, + ComponentSetup, +) + +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def mock_setup_integration( + setup_integration: ComponentSetup, +) -> None: + """Set up the integration automatically for backup tests.""" + await setup_integration() + + +def generate_result(metadata: dict) -> dict: + """Generates an expected result from metadata.""" + + expected_result: dict = metadata["metadata"].copy() + expected_result["agents"] = { + f"{DOMAIN}.{TEST_AGENT_ID}": { + "protected": expected_result.pop("protected"), + "size": expected_result.pop("size"), + } + } + expected_result.update( + { + "failed_addons": [], + "failed_agent_ids": [], + "failed_folders": [], + "with_automatic_settings": None, + } + ) + return expected_result + + +async def test_agents_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [ + {"agent_id": "backup.local", "name": "local"}, + {"agent_id": f"{DOMAIN}.{TEST_AGENT_ID}", "name": CONFIG_ENTRY_TITLE}, + ], + } + + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await hass.config_entries.async_unload(config_entry.entry_id) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert ( + response["result"] + == {"agents": [{"agent_id": "backup.local", "name": "local"}]} + or config_entry.state == ConfigEntryState.NOT_LOADED + ) + + +async def test_agents_list_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test agent list backups.""" + mock_ssh_connection.mock_setup_backup(BACKUP_METADATA) + expected_result = generate_result(BACKUP_METADATA) + + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["agent_errors"] == {} + assert response["result"]["backups"] == [expected_result] + + +async def test_agents_list_backups_fail( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test agent list backups fails.""" + mock_ssh_connection.mock_setup_backup(BACKUP_METADATA) + mock_ssh_connection._sftp._mock_open._mock_read.side_effect = SFTPError( + 2, "Error message" + ) + + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["backups"] == [] + assert response["result"]["agent_errors"] == { + f"{DOMAIN}.{TEST_AGENT_ID}": "Remote server error while attempting to list backups: Error message" + } + + +async def test_agents_list_backups_include_bad_metadata( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_ssh_connection: SSHClientConnectionMock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test agent list backups.""" + mock_ssh_connection.mock_setup_backup(BACKUP_METADATA, with_bad=True) + expected_result = generate_result(BACKUP_METADATA) + + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["agent_errors"] == {} + assert response["result"]["backups"] == [expected_result] + # Called two times, one for bad backup metadata and once for good + assert mock_ssh_connection._sftp._mock_open._mock_read.call_count == 2 + assert ( + "Failed to load backup metadata from file: backup_location/invalid.metadata.json. Expecting value: line 1 column 1 (char 0)" + in caplog.messages + ) + + +@pytest.mark.parametrize( + ("backup_id", "expected_result"), + [ + (TEST_AGENT_BACKUP.backup_id, generate_result(BACKUP_METADATA)), + ("12345", None), + ], + ids=["found", "not_found"], +) +async def test_agents_get_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + backup_id: str, + expected_result: dict[str, Any] | None, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test agent get backup.""" + mock_ssh_connection.mock_setup_backup(BACKUP_METADATA) + + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["backup"] == expected_result + + +async def test_agents_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test agent download backup.""" + client = await hass_client() + mock_ssh_connection.mock_setup_backup(BACKUP_METADATA) + + resp = await client.get( + f"/api/backup/download/{TEST_AGENT_BACKUP.backup_id}?agent_id={DOMAIN}.{TEST_AGENT_ID}" + ) + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + mock_ssh_connection._sftp._mock_open.close.assert_awaited() + + +async def test_agents_download_fail( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test agent download backup fails.""" + mock_ssh_connection.mock_setup_backup(BACKUP_METADATA) + + # This will cause `FileNotFoundError` exception in `BackupAgentClient.iter_file() method.` + mock_ssh_connection._sftp._mock_exists.side_effect = [True, False] + client = await hass_client() + resp = await client.get( + f"/api/backup/download/{TEST_AGENT_BACKUP.backup_id}?agent_id={DOMAIN}.{TEST_AGENT_ID}" + ) + assert resp.status == 404 + + # This will raise `RuntimeError` causing Internal Server Error, mimicking that the SFTP setup failed. + mock_ssh_connection._sftp = None + resp = await client.get( + f"/api/backup/download/{TEST_AGENT_BACKUP.backup_id}?agent_id={DOMAIN}.{TEST_AGENT_ID}" + ) + assert resp.status == 500 + content = await resp.content.read() + assert b"Internal Server Error" in content + + +async def test_agents_download_metadata_not_found( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test agent download backup raises error if not found.""" + mock_ssh_connection.mock_setup_backup(BACKUP_METADATA) + + mock_ssh_connection._sftp._mock_exists.return_value = False + client = await hass_client() + resp = await client.get( + f"/api/backup/download/{TEST_AGENT_BACKUP.backup_id}?agent_id={DOMAIN}.{TEST_AGENT_ID}" + ) + assert resp.status == 404 + content = await resp.content.read() + assert content.decode() == "" + + +async def test_agents_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test agent upload backup.""" + client = await hass_client() + + with ( + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=TEST_AGENT_BACKUP, + ), + ): + resp = await client.post( + f"/api/backup/upload?agent_id={DOMAIN}.{TEST_AGENT_ID}", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert f"Uploading backup: {TEST_AGENT_BACKUP.backup_id}" in caplog.text + assert ( + f"Successfully uploaded backup id: {TEST_AGENT_BACKUP.backup_id}" in caplog.text + ) + # Called write 2 times + # 1. When writing backup file + # 2. When writing metadata file + assert mock_ssh_connection._sftp._mock_open._mock_write.call_count == 2 + + # This is 'backup file' + assert ( + b"test" + in mock_ssh_connection._sftp._mock_open._mock_write.call_args_list[0].args + ) + + # This is backup metadata + uploaded_metadata = json.loads( + mock_ssh_connection._sftp._mock_open._mock_write.call_args_list[1].args[0] + )["metadata"] + assert uploaded_metadata == BACKUP_METADATA["metadata"] + + +async def test_agents_upload_fail( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test agent upload backup fails.""" + client = await hass_client() + mock_ssh_connection._sftp._mock_open._mock_write.side_effect = SFTPError( + 2, "Error message" + ) + + with ( + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=TEST_AGENT_BACKUP, + ), + ): + resp = await client.post( + f"/api/backup/upload?agent_id={DOMAIN}.{TEST_AGENT_ID}", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert ( + f"Unexpected error for {DOMAIN}.{TEST_AGENT_ID}: Error message" + in caplog.messages + ) + + +async def test_agents_delete( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test agent delete backup.""" + mock_ssh_connection.mock_setup_backup(BACKUP_METADATA) + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": TEST_AGENT_BACKUP.backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}} + + # Called 2 times, to remove metadata and backup file. + assert mock_ssh_connection._sftp._mock_unlink.call_count == 2 + + +@pytest.mark.parametrize( + ("exists_side_effect", "expected_result"), + [ + ( + [True, False], + {"agent_errors": {}}, + ), # First `True` is to confirm the metadata file exists + ( + SFTPError(0, "manual"), + { + "agent_errors": { + f"{DOMAIN}.{TEST_AGENT_ID}": f"Failed to delete backup id: {TEST_AGENT_BACKUP.backup_id}: manual" + } + }, + ), + ], + ids=["file_not_found_exc", "sftp_error_exc"], +) +async def test_agents_delete_fail( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_ssh_connection: SSHClientConnectionMock, + exists_side_effect: bool | Exception, + expected_result: dict[str, dict[str, str]], +) -> None: + """Test agent delete backup fails.""" + mock_ssh_connection.mock_setup_backup(BACKUP_METADATA) + mock_ssh_connection._sftp._mock_exists.side_effect = exists_side_effect + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": TEST_AGENT_BACKUP.backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == expected_result + + +async def test_agents_delete_not_found( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test agent delete backup not found.""" + mock_ssh_connection.mock_setup_backup(BACKUP_METADATA) + + client = await hass_ws_client(hass) + backup_id = "1234" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}} + + +async def test_listeners_get_cleaned_up(hass: HomeAssistant) -> None: + """Test listener gets cleaned up.""" + listener = MagicMock() + remove_listener = async_register_backup_agents_listener(hass, listener=listener) + + hass.data[DATA_BACKUP_AGENT_LISTENERS] = [ + listener + ] # make sure it's the last listener + remove_listener() + + assert DATA_BACKUP_AGENT_LISTENERS not in hass.data diff --git a/tests/components/sftp_storage/test_config_flow.py b/tests/components/sftp_storage/test_config_flow.py new file mode 100644 index 00000000000000..3974b5aaa6c075 --- /dev/null +++ b/tests/components/sftp_storage/test_config_flow.py @@ -0,0 +1,192 @@ +"""Tests config_flow.""" + +from collections.abc import Awaitable, Callable +from tempfile import NamedTemporaryFile +from unittest.mock import patch + +from asyncssh import KeyImportError, generate_private_key +from asyncssh.misc import PermissionDenied +from asyncssh.sftp import SFTPNoSuchFile, SFTPPermissionDenied +import pytest + +from homeassistant.components.sftp_storage.config_flow import ( + SFTPStorageInvalidPrivateKey, + SFTPStorageMissingPasswordOrPkey, +) +from homeassistant.components.sftp_storage.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PRIVATE_KEY_FILE, + CONF_USERNAME, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import USER_INPUT, SSHClientConnectionMock + +from tests.common import MockConfigEntry + +type ComponentSetup = Callable[[], Awaitable[None]] + + +@pytest.fixture +def mock_process_uploaded_file(): + """Mocks ability to process uploaded private key.""" + with ( + patch( + "homeassistant.components.sftp_storage.config_flow.process_uploaded_file" + ) as mock_process_uploaded_file, + patch("shutil.move") as mock_shutil_move, + NamedTemporaryFile() as f, + ): + pkey = generate_private_key("ssh-rsa") + f.write(pkey.export_private_key("pkcs8-pem")) + f.flush() + mock_process_uploaded_file.return_value.__enter__.return_value = f.name + mock_shutil_move.return_value = f.name + yield + + +@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("mock_process_uploaded_file") +@pytest.mark.usefixtures("mock_ssh_connection") +async def test_backup_sftp_full_flow( + hass: HomeAssistant, +) -> None: + """Test the full backup_sftp config flow with valid user input.""" + + user_input = USER_INPUT.copy() + # Start the configuration flow + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + # The first step should be the "user" form. + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + # Verify that a new config entry is created. + assert result["type"] is FlowResultType.CREATE_ENTRY + expected_title = f"{user_input[CONF_USERNAME]}@{user_input[CONF_HOST]}" + assert result["title"] == expected_title + + # Make sure to match the `private_key_file` from entry + user_input[CONF_PRIVATE_KEY_FILE] = result["data"][CONF_PRIVATE_KEY_FILE] + + assert result["data"] == user_input + + +@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("mock_process_uploaded_file") +@pytest.mark.usefixtures("mock_ssh_connection") +async def test_already_configured( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test successful failure of already added config entry.""" + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], USER_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("exception_type", "error_base"), + [ + (OSError, "os_error"), + (SFTPStorageInvalidPrivateKey, "invalid_key"), + (PermissionDenied, "permission_denied"), + (SFTPStorageMissingPasswordOrPkey, "key_or_password_needed"), + (SFTPNoSuchFile, "sftp_no_such_file"), + (SFTPPermissionDenied, "sftp_permission_denied"), + (Exception, "unknown"), + ], +) +@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("mock_process_uploaded_file") +async def test_config_flow_exceptions( + exception_type: Exception, + error_base: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_ssh_connection: SSHClientConnectionMock, +) -> None: + """Test successful failure of already added config entry.""" + + mock_ssh_connection._sftp._mock_chdir.side_effect = exception_type("Error message.") + + # config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], USER_INPUT + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] and result["errors"]["base"] == error_base + + # Recover from the error + mock_ssh_connection._sftp._mock_chdir.side_effect = None + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], USER_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("mock_process_uploaded_file") +async def test_config_entry_error(hass: HomeAssistant) -> None: + """Test config flow with raised `KeyImportError`.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + + with ( + patch( + "homeassistant.components.sftp_storage.config_flow.SSHClientConnectionOptions", + side_effect=KeyImportError("Invalid key"), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], USER_INPUT + ) + assert "errors" in result and result["errors"]["base"] == "invalid_key" + + user_input = USER_INPUT.copy() + user_input[CONF_PASSWORD] = "" + del user_input[CONF_PRIVATE_KEY_FILE] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + assert "errors" in result and result["errors"]["base"] == "key_or_password_needed" diff --git a/tests/components/sftp_storage/test_init.py b/tests/components/sftp_storage/test_init.py new file mode 100644 index 00000000000000..7f366facb6549a --- /dev/null +++ b/tests/components/sftp_storage/test_init.py @@ -0,0 +1,193 @@ +"""Tests for SFTP Storage.""" + +from pathlib import Path +from unittest.mock import patch + +from asyncssh.sftp import SFTPPermissionDenied +import pytest + +from homeassistant.components.sftp_storage import SFTPConfigEntryData +from homeassistant.components.sftp_storage.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.util.ulid import ulid + +from .asyncssh_mock import SSHClientConnectionMock +from .conftest import ( + CONF_BACKUP_LOCATION, + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_PRIVATE_KEY_FILE, + CONF_USERNAME, + USER_INPUT, + ComponentSetup, + private_key_file, +) + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("mock_ssh_connection") +async def test_setup_and_unload( + hass: HomeAssistant, + setup_integration: ComponentSetup, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test successful setup and unload.""" + + # Patch the `exists` function of Path so that we can also + # test the `homeassistant.components.sftp_storage.client.get_client_keys()` function + with ( + patch( + "homeassistant.components.sftp_storage.client.SSHClientConnectionOptions" + ), + patch("pathlib.Path.exists", return_value=True), + ): + await setup_integration() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(entries[0].entry_id) + + assert entries[0].state is ConfigEntryState.NOT_LOADED + assert ( + f"Unloading {DOMAIN} integration for host {entries[0].data[CONF_USERNAME]}@{entries[0].data[CONF_HOST]}" + in caplog.messages + ) + + +async def test_setup_error( + mock_ssh_connection: SSHClientConnectionMock, + hass: HomeAssistant, + setup_integration: ComponentSetup, +) -> None: + """Test setup error.""" + mock_ssh_connection._sftp._mock_chdir.side_effect = SFTPPermissionDenied( + "Error message" + ) + await setup_integration() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].state is ConfigEntryState.SETUP_ERROR + + +async def test_setup_unexpected_error( + hass: HomeAssistant, + setup_integration: ComponentSetup, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test setup error.""" + with patch( + "homeassistant.components.sftp_storage.client.connect", + side_effect=OSError("Error message"), + ): + await setup_integration() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].state is ConfigEntryState.SETUP_ERROR + assert ( + "Failure while attempting to establish SSH connection. Please check SSH credentials and if changed, re-install the integration" + in caplog.text + ) + + +async def test_async_remove_entry( + hass: HomeAssistant, + setup_integration: ComponentSetup, +) -> None: + """Test async_remove_entry.""" + # Setup default config entry + await setup_integration() + + # Setup additional config entry + agent_id = ulid() + with private_key_file(hass) as private_key: + new_config_entry = MockConfigEntry( + domain=DOMAIN, + entry_id=agent_id, + unique_id=agent_id, + title="another@192.168.0.100", + data={ + CONF_HOST: "127.0.0.1", + CONF_PORT: 22, + CONF_USERNAME: "another", + CONF_PASSWORD: "password", + CONF_PRIVATE_KEY_FILE: str(private_key), + CONF_BACKUP_LOCATION: "backup_location", + }, + ) + new_config_entry.add_to_hass(hass) + await setup_integration(new_config_entry) + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 2 + + config_entry = entries[0] + private_key = Path(config_entry.data[CONF_PRIVATE_KEY_FILE]) + new_private_key = Path(new_config_entry.data[CONF_PRIVATE_KEY_FILE]) + + # Make sure private keys from both configs exists + assert private_key.parent == new_private_key.parent + assert private_key.exists() + assert new_private_key.exists() + + # Remove first config entry - the private key from second will still be in filesystem + # as well as integration storage directory + assert await hass.config_entries.async_remove(config_entry.entry_id) + assert not private_key.exists() + assert new_private_key.exists() + assert new_private_key.parent.exists() + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + # Remove the second config entry, ensuring all files and integration storage directory removed. + assert await hass.config_entries.async_remove(new_config_entry.entry_id) + assert not new_private_key.exists() + assert not new_private_key.parent.exists() + + assert hass.config_entries.async_entries(DOMAIN) == [] + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("patch_target", "expected_logs"), + [ + ( + "os.unlink", + [ + "Failed to remove private key", + f"Storage directory for {DOMAIN} integration is not empty", + ], + ), + ("os.rmdir", ["Error occurred while removing directory"]), + ], +) +async def test_async_remove_entry_errors( + patch_target: str, + expected_logs: list[str], + hass: HomeAssistant, + setup_integration: ComponentSetup, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test async_remove_entry.""" + # Setup default config entry + await setup_integration() + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + + config_entry = entries[0] + + with patch(patch_target, side_effect=OSError(13, "Permission denied")): + await hass.config_entries.async_remove(config_entry.entry_id) + for logline in expected_logs: + assert logline in caplog.text + + +async def test_config_entry_data_password_hidden() -> None: + """Test hiding password in `SFTPConfigEntryData` string representation.""" + user_input = USER_INPUT.copy() + entry_data = SFTPConfigEntryData(**user_input) + assert "password=" not in str(entry_data) diff --git a/tests/components/sharkiq/test_config_flow.py b/tests/components/sharkiq/test_config_flow.py index 22a77678c0d701..f96b2f31e0b161 100644 --- a/tests/components/sharkiq/test_config_flow.py +++ b/tests/components/sharkiq/test_config_flow.py @@ -47,6 +47,7 @@ async def test_form(hass: HomeAssistant) -> None: with ( patch("sharkiq.AylaApi.async_sign_in", return_value=True), + patch("sharkiq.AylaApi.async_set_cookie"), patch( "homeassistant.components.sharkiq.async_setup_entry", return_value=True, @@ -84,7 +85,10 @@ async def test_form_error(hass: HomeAssistant, exc: Exception, base_error: str) DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch.object(AylaApi, "async_sign_in", side_effect=exc): + with ( + patch.object(AylaApi, "async_sign_in", side_effect=exc), + patch("sharkiq.AylaApi.async_set_cookie"), + ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG, @@ -101,7 +105,10 @@ async def test_reauth_success(hass: HomeAssistant) -> None: result = await mock_config.start_reauth_flow(hass) - with patch("sharkiq.AylaApi.async_sign_in", return_value=True): + with ( + patch("sharkiq.AylaApi.async_sign_in", return_value=True), + patch("sharkiq.AylaApi.async_set_cookie"), + ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=CONFIG ) @@ -132,7 +139,10 @@ async def test_reauth( result = await mock_config.start_reauth_flow(hass) - with patch("sharkiq.AylaApi.async_sign_in", side_effect=side_effect): + with ( + patch("sharkiq.AylaApi.async_sign_in", side_effect=side_effect), + patch("sharkiq.AylaApi.async_set_cookie"), + ): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=CONFIG ) diff --git a/tests/components/sharkiq/test_vacuum.py b/tests/components/sharkiq/test_vacuum.py index bfb2176026b733..5b5339ec7a2133 100644 --- a/tests/components/sharkiq/test_vacuum.py +++ b/tests/components/sharkiq/test_vacuum.py @@ -80,6 +80,9 @@ class MockAyla(AylaApi): async def async_sign_in(self): """Instead of signing in, just return.""" + async def async_set_cookie(self): + """Instead of getting cookies, just return.""" + async def async_refresh_auth(self): """Instead of refreshing auth, just return."""