diff --git a/.core_files.yaml b/.core_files.yaml index ab763b77086be6..62a787df0fd96e 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -34,6 +34,7 @@ base_platforms: &base_platforms - homeassistant/components/humidifier/** - homeassistant/components/image/** - homeassistant/components/image_processing/** + - homeassistant/components/infrared/** - homeassistant/components/lawn_mower/** - homeassistant/components/light/** - homeassistant/components/lock/** diff --git a/.strict-typing b/.strict-typing index 202649745468ba..fee39a8060eaf4 100644 --- a/.strict-typing +++ b/.strict-typing @@ -289,6 +289,7 @@ homeassistant.components.imgw_pib.* homeassistant.components.immich.* homeassistant.components.incomfort.* homeassistant.components.inels.* +homeassistant.components.infrared.* homeassistant.components.input_button.* homeassistant.components.input_select.* homeassistant.components.input_text.* diff --git a/CODEOWNERS b/CODEOWNERS index 87f8e595460f90..2c7fa05db850a2 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -794,6 +794,8 @@ build.json @home-assistant/supervisor /tests/components/inels/ @epdevlab /homeassistant/components/influxdb/ @mdegat01 @Robbie1221 /tests/components/influxdb/ @mdegat01 @Robbie1221 +/homeassistant/components/infrared/ @home-assistant/core +/tests/components/infrared/ @home-assistant/core /homeassistant/components/inkbird/ @bdraco /tests/components/inkbird/ @bdraco /homeassistant/components/input_boolean/ @home-assistant/core diff --git a/codecov.yml b/codecov.yml index 9cb9084ed61f67..d4bd8b7fcb721c 100644 --- a/codecov.yml +++ b/codecov.yml @@ -10,6 +10,7 @@ coverage: target: auto threshold: 1 paths: + - homeassistant/components/*/backup.py - homeassistant/components/*/config_flow.py - homeassistant/components/*/device_action.py - homeassistant/components/*/device_condition.py @@ -28,6 +29,7 @@ coverage: target: 100 threshold: 0 paths: + - homeassistant/components/*/backup.py - homeassistant/components/*/config_flow.py - homeassistant/components/*/device_action.py - homeassistant/components/*/device_condition.py diff --git a/homeassistant/components/alexa_devices/services.py b/homeassistant/components/alexa_devices/services.py index fb0fda0b84346e..06beb5258f3ed8 100644 --- a/homeassistant/components/alexa_devices/services.py +++ b/homeassistant/components/alexa_devices/services.py @@ -16,9 +16,6 @@ ATTR_TEXT_COMMAND = "text_command" ATTR_SOUND = "sound" ATTR_INFO_SKILL = "info_skill" -SERVICE_TEXT_COMMAND = "send_text_command" -SERVICE_SOUND_NOTIFICATION = "send_sound" -SERVICE_INFO_SKILL = "send_info_skill" SCHEMA_SOUND_SERVICE = vol.Schema( { @@ -128,17 +125,17 @@ def async_setup_services(hass: HomeAssistant) -> None: """Set up the services for the Amazon Devices integration.""" for service_name, method, schema in ( ( - SERVICE_SOUND_NOTIFICATION, + "send_sound", async_send_sound_notification, SCHEMA_SOUND_SERVICE, ), ( - SERVICE_TEXT_COMMAND, + "send_text_command", async_send_text_command, SCHEMA_CUSTOM_COMMAND, ), ( - SERVICE_INFO_SKILL, + "send_info_skill", async_send_info_skill, SCHEMA_INFO_SKILL, ), diff --git a/homeassistant/components/amcrest/camera.py b/homeassistant/components/amcrest/camera.py index 0bf02b604f1dbb..5c3655e8d3115c 100644 --- a/homeassistant/components/amcrest/camera.py +++ b/homeassistant/components/amcrest/camera.py @@ -49,18 +49,6 @@ STREAM_SOURCE_LIST = ["snapshot", "mjpeg", "rtsp"] -_SRV_EN_REC = "enable_recording" -_SRV_DS_REC = "disable_recording" -_SRV_EN_AUD = "enable_audio" -_SRV_DS_AUD = "disable_audio" -_SRV_EN_MOT_REC = "enable_motion_recording" -_SRV_DS_MOT_REC = "disable_motion_recording" -_SRV_GOTO = "goto_preset" -_SRV_CBW = "set_color_bw" -_SRV_TOUR_ON = "start_tour" -_SRV_TOUR_OFF = "stop_tour" - -_SRV_PTZ_CTRL = "ptz_control" _ATTR_PTZ_TT = "travel_time" _ATTR_PTZ_MOV = "movement" _MOV = [ @@ -103,17 +91,17 @@ ) CAMERA_SERVICES = { - _SRV_EN_REC: (_SRV_SCHEMA, "async_enable_recording", ()), - _SRV_DS_REC: (_SRV_SCHEMA, "async_disable_recording", ()), - _SRV_EN_AUD: (_SRV_SCHEMA, "async_enable_audio", ()), - _SRV_DS_AUD: (_SRV_SCHEMA, "async_disable_audio", ()), - _SRV_EN_MOT_REC: (_SRV_SCHEMA, "async_enable_motion_recording", ()), - _SRV_DS_MOT_REC: (_SRV_SCHEMA, "async_disable_motion_recording", ()), - _SRV_GOTO: (_SRV_GOTO_SCHEMA, "async_goto_preset", (_ATTR_PRESET,)), - _SRV_CBW: (_SRV_CBW_SCHEMA, "async_set_color_bw", (_ATTR_COLOR_BW,)), - _SRV_TOUR_ON: (_SRV_SCHEMA, "async_start_tour", ()), - _SRV_TOUR_OFF: (_SRV_SCHEMA, "async_stop_tour", ()), - _SRV_PTZ_CTRL: ( + "enable_recording": (_SRV_SCHEMA, "async_enable_recording", ()), + "disable_recording": (_SRV_SCHEMA, "async_disable_recording", ()), + "enable_audio": (_SRV_SCHEMA, "async_enable_audio", ()), + "disable_audio": (_SRV_SCHEMA, "async_disable_audio", ()), + "enable_motion_recording": (_SRV_SCHEMA, "async_enable_motion_recording", ()), + "disable_motion_recording": (_SRV_SCHEMA, "async_disable_motion_recording", ()), + "goto_preset": (_SRV_GOTO_SCHEMA, "async_goto_preset", (_ATTR_PRESET,)), + "set_color_bw": (_SRV_CBW_SCHEMA, "async_set_color_bw", (_ATTR_COLOR_BW,)), + "start_tour": (_SRV_SCHEMA, "async_start_tour", ()), + "stop_tour": (_SRV_SCHEMA, "async_stop_tour", ()), + "ptz_control": ( _SRV_PTZ_SCHEMA, "async_ptz_control", (_ATTR_PTZ_MOV, _ATTR_PTZ_TT), diff --git a/homeassistant/components/backup/strings.json b/homeassistant/components/backup/strings.json index 2562c704ee0b24..c61122d43113eb 100644 --- a/homeassistant/components/backup/strings.json +++ b/homeassistant/components/backup/strings.json @@ -43,11 +43,11 @@ "title": "The backup location {agent_id} is unavailable" }, "automatic_backup_failed_addons": { - "description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.", - "title": "Not all add-ons could be included in automatic backup" + "description": "Apps {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.", + "title": "Not all apps could be included in automatic backup" }, "automatic_backup_failed_agents_addons_folders": { - "description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.", + "description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Apps which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured.", "title": "Automatic backup was created with errors" }, "automatic_backup_failed_create": { diff --git a/homeassistant/components/history_stats/__init__.py b/homeassistant/components/history_stats/__init__.py index 5b5fccfbb989b5..762d36c0210520 100644 --- a/homeassistant/components/history_stats/__init__.py +++ b/homeassistant/components/history_stats/__init__.py @@ -16,7 +16,14 @@ ) from homeassistant.helpers.template import Template -from .const import CONF_DURATION, CONF_END, CONF_START, PLATFORMS +from .const import ( + CONF_DURATION, + CONF_END, + CONF_MIN_STATE_DURATION, + CONF_START, + PLATFORMS, + SECTION_ADVANCED_SETTINGS, +) from .coordinator import HistoryStatsUpdateCoordinator from .data import HistoryStats @@ -36,8 +43,14 @@ async def async_setup_entry( end: str | None = entry.options.get(CONF_END) duration: timedelta | None = None + min_state_duration: timedelta if duration_dict := entry.options.get(CONF_DURATION): duration = timedelta(**duration_dict) + advanced_settings = entry.options.get(SECTION_ADVANCED_SETTINGS, {}) + if min_state_duration_dict := advanced_settings.get(CONF_MIN_STATE_DURATION): + min_state_duration = timedelta(**min_state_duration_dict) + else: + min_state_duration = timedelta(0) history_stats = HistoryStats( hass, @@ -46,6 +59,7 @@ async def async_setup_entry( Template(start, hass) if start else None, Template(end, hass) if end else None, duration, + min_state_duration, ) coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, entry, entry.title) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/history_stats/config_flow.py b/homeassistant/components/history_stats/config_flow.py index 593092728b01cf..fc48e3c8e74023 100644 --- a/homeassistant/components/history_stats/config_flow.py +++ b/homeassistant/components/history_stats/config_flow.py @@ -12,6 +12,7 @@ from homeassistant.components.sensor import CONF_STATE_CLASS, SensorStateClass from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE from homeassistant.core import HomeAssistant, callback +from homeassistant.data_entry_flow import section from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.schema_config_entry_flow import ( SchemaCommonFlowHandler, @@ -37,6 +38,7 @@ from .const import ( CONF_DURATION, CONF_END, + CONF_MIN_STATE_DURATION, CONF_PERIOD_KEYS, CONF_START, CONF_TYPE_KEYS, @@ -44,6 +46,7 @@ CONF_TYPE_TIME, DEFAULT_NAME, DOMAIN, + SECTION_ADVANCED_SETTINGS, ) from .coordinator import HistoryStatsUpdateCoordinator from .data import HistoryStats @@ -139,7 +142,7 @@ def _get_options_schema_with_entity_id(entity_id: str, type: str) -> vol.Schema: vol.Optional(CONF_START): TemplateSelector(), vol.Optional(CONF_END): TemplateSelector(), vol.Optional(CONF_DURATION): DurationSelector( - DurationSelectorConfig(enable_day=True, allow_negative=False) + DurationSelectorConfig(enable_day=True, allow_negative=False), ), vol.Optional(CONF_STATE_CLASS): SelectSelector( SelectSelectorConfig( @@ -148,6 +151,18 @@ def _get_options_schema_with_entity_id(entity_id: str, type: str) -> vol.Schema: mode=SelectSelectorMode.DROPDOWN, ), ), + vol.Optional(SECTION_ADVANCED_SETTINGS): section( + vol.Schema( + { + vol.Optional(CONF_MIN_STATE_DURATION): DurationSelector( + DurationSelectorConfig( + enable_day=True, allow_negative=False + ) + ), + } + ), + {"collapsed": True}, + ), } ) @@ -275,6 +290,8 @@ def async_preview_updated( start = validated_data.get(CONF_START) end = validated_data.get(CONF_END) duration = validated_data.get(CONF_DURATION) + advanced_settings = validated_data.get(SECTION_ADVANCED_SETTINGS, {}) + min_state_duration = advanced_settings.get(CONF_MIN_STATE_DURATION) state_class = validated_data.get(CONF_STATE_CLASS) history_stats = HistoryStats( @@ -284,6 +301,7 @@ def async_preview_updated( Template(start, hass) if start else None, Template(end, hass) if end else None, timedelta(**duration) if duration else None, + timedelta(**min_state_duration) if min_state_duration else timedelta(0), True, ) coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, None, name, True) diff --git a/homeassistant/components/history_stats/const.py b/homeassistant/components/history_stats/const.py index 9e89ca1827ce80..d608f56f6d8aec 100644 --- a/homeassistant/components/history_stats/const.py +++ b/homeassistant/components/history_stats/const.py @@ -8,6 +8,7 @@ CONF_START = "start" CONF_END = "end" CONF_DURATION = "duration" +CONF_MIN_STATE_DURATION = "min_state_duration" CONF_PERIOD_KEYS = [CONF_START, CONF_END, CONF_DURATION] CONF_TYPE_TIME = "time" @@ -16,3 +17,5 @@ CONF_TYPE_KEYS = [CONF_TYPE_TIME, CONF_TYPE_RATIO, CONF_TYPE_COUNT] DEFAULT_NAME = "unnamed statistics" + +SECTION_ADVANCED_SETTINGS = "advanced_settings" diff --git a/homeassistant/components/history_stats/data.py b/homeassistant/components/history_stats/data.py index 569483df687c29..9a88812342ede8 100644 --- a/homeassistant/components/history_stats/data.py +++ b/homeassistant/components/history_stats/data.py @@ -47,6 +47,7 @@ def __init__( start: Template | None, end: Template | None, duration: datetime.timedelta | None, + min_state_duration: datetime.timedelta, preview: bool = False, ) -> None: """Init the history stats manager.""" @@ -58,6 +59,7 @@ def __init__( self._has_recorder_data = False self._entity_states = set(entity_states) self._duration = duration + self._min_state_duration = min_state_duration.total_seconds() self._start = start self._end = end self._preview = preview @@ -243,18 +245,38 @@ def _async_compute_seconds_and_changes( ) break - if previous_state_matches: - elapsed += state_change_timestamp - last_state_change_timestamp - elif current_state_matches: - match_count += 1 + if not previous_state_matches and current_state_matches: + # We are entering a matching state. + # This marks the start of a new candidate block that may later + # qualify if it lasts at least min_state_duration. + last_state_change_timestamp = max( + start_timestamp, state_change_timestamp + ) + elif previous_state_matches and not current_state_matches: + # We are leaving a matching state. + # This closes the current matching block and allows to + # evaluate its total duration. + block_duration = state_change_timestamp - last_state_change_timestamp + if block_duration >= self._min_state_duration: + # The block lasted long enough so we increment match count + # and accumulate its duration. + elapsed += block_duration + match_count += 1 previous_state_matches = current_state_matches - last_state_change_timestamp = max(start_timestamp, state_change_timestamp) # Count time elapsed between last history state and end of measure if previous_state_matches: + # We are still inside a matching block at the end of the + # measurement window. This block has not been closed by a + # transition, so we evaluate it up to measure_end. measure_end = min(end_timestamp, now_timestamp) - elapsed += measure_end - last_state_change_timestamp + last_state_duration = max(0, measure_end - last_state_change_timestamp) + if last_state_duration >= self._min_state_duration: + # The open block lasted long enough so we increment match count + # and accumulate its duration. + elapsed += last_state_duration + match_count += 1 # Save value in seconds seconds_matched = elapsed diff --git a/homeassistant/components/history_stats/sensor.py b/homeassistant/components/history_stats/sensor.py index 98616b3e3759ce..367f9892ca2be4 100644 --- a/homeassistant/components/history_stats/sensor.py +++ b/homeassistant/components/history_stats/sensor.py @@ -42,6 +42,7 @@ from .const import ( CONF_DURATION, CONF_END, + CONF_MIN_STATE_DURATION, CONF_PERIOD_KEYS, CONF_START, CONF_TYPE_COUNT, @@ -63,6 +64,8 @@ } ICON = "mdi:chart-line" +DEFAULT_MIN_STATE_DURATION = datetime.timedelta(0) + def exactly_two_period_keys[_T: dict[str, Any]](conf: _T) -> _T: """Ensure exactly 2 of CONF_PERIOD_KEYS are provided.""" @@ -91,6 +94,9 @@ def no_ratio_total[_T: dict[str, Any]](conf: _T) -> _T: vol.Optional(CONF_START): cv.template, vol.Optional(CONF_END): cv.template, vol.Optional(CONF_DURATION): cv.time_period, + vol.Optional( + CONF_MIN_STATE_DURATION, default=DEFAULT_MIN_STATE_DURATION + ): cv.time_period, vol.Optional(CONF_TYPE, default=CONF_TYPE_TIME): vol.In(CONF_TYPE_KEYS), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, @@ -120,6 +126,7 @@ async def async_setup_platform( start: Template | None = config.get(CONF_START) end: Template | None = config.get(CONF_END) duration: datetime.timedelta | None = config.get(CONF_DURATION) + min_state_duration: datetime.timedelta = config[CONF_MIN_STATE_DURATION] sensor_type: str = config[CONF_TYPE] name: str = config[CONF_NAME] unique_id: str | None = config.get(CONF_UNIQUE_ID) @@ -127,7 +134,9 @@ async def async_setup_platform( CONF_STATE_CLASS, SensorStateClass.MEASUREMENT ) - history_stats = HistoryStats(hass, entity_id, entity_states, start, end, duration) + history_stats = HistoryStats( + hass, entity_id, entity_states, start, end, duration, min_state_duration + ) coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, None, name) await coordinator.async_refresh() if not coordinator.last_update_success: diff --git a/homeassistant/components/history_stats/strings.json b/homeassistant/components/history_stats/strings.json index 304ca6e8eb5369..584456484fc444 100644 --- a/homeassistant/components/history_stats/strings.json +++ b/homeassistant/components/history_stats/strings.json @@ -19,14 +19,23 @@ }, "data_description": { "duration": "Duration of the measure.", - "end": "When to stop the measure (timestamp or datetime). Can be a template", + "end": "When to stop the measure (timestamp or datetime). Can be a template.", "entity_id": "[%key:component::history_stats::config::step::user::data_description::entity_id%]", "start": "When to start the measure (timestamp or datetime). Can be a template.", "state": "[%key:component::history_stats::config::step::user::data_description::state%]", "state_class": "The state class for statistics calculation.", "type": "[%key:component::history_stats::config::step::user::data_description::type%]" }, - "description": "Read the documentation for further details on how to configure the history stats sensor using these options." + "description": "Read the documentation for further details on how to configure the history stats sensor using these options.", + "sections": { + "advanced_settings": { + "data": { "min_state_duration": "Minimum state duration" }, + "data_description": { + "min_state_duration": "The minimum state duration to account for the statistics. Default is 0 seconds." + }, + "name": "Advanced settings" + } + } }, "state": { "data": { @@ -82,7 +91,18 @@ "state_class": "The state class for statistics calculation. Changing the state class will require statistics to be reset.", "type": "[%key:component::history_stats::config::step::user::data_description::type%]" }, - "description": "[%key:component::history_stats::config::step::options::description%]" + "description": "[%key:component::history_stats::config::step::options::description%]", + "sections": { + "advanced_settings": { + "data": { + "min_state_duration": "[%key:component::history_stats::config::step::options::sections::advanced_settings::data::min_state_duration%]" + }, + "data_description": { + "min_state_duration": "[%key:component::history_stats::config::step::options::sections::advanced_settings::data_description::min_state_duration%]" + }, + "name": "[%key:component::history_stats::config::step::options::sections::advanced_settings::name%]" + } + } } } }, diff --git a/homeassistant/components/infrared/__init__.py b/homeassistant/components/infrared/__init__.py new file mode 100644 index 00000000000000..6411fe9599a660 --- /dev/null +++ b/homeassistant/components/infrared/__init__.py @@ -0,0 +1,153 @@ +"""Provides functionality to interact with infrared devices.""" + +from __future__ import annotations + +from abc import abstractmethod +from datetime import timedelta +import logging +from typing import final + +from infrared_protocols import Command as InfraredCommand + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import Context, HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.typing import ConfigType +from homeassistant.util import dt as dt_util +from homeassistant.util.hass_dict import HassKey + +from .const import DOMAIN + +__all__ = [ + "DOMAIN", + "InfraredEntity", + "InfraredEntityDescription", + "async_get_emitters", + "async_send_command", +] + +_LOGGER = logging.getLogger(__name__) + +DATA_COMPONENT: HassKey[EntityComponent[InfraredEntity]] = HassKey(DOMAIN) +ENTITY_ID_FORMAT = DOMAIN + ".{}" +PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA +PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE +SCAN_INTERVAL = timedelta(seconds=30) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the infrared domain.""" + component = hass.data[DATA_COMPONENT] = EntityComponent[InfraredEntity]( + _LOGGER, DOMAIN, hass, SCAN_INTERVAL + ) + await component.async_setup(config) + + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up a config entry.""" + return await hass.data[DATA_COMPONENT].async_setup_entry(entry) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.data[DATA_COMPONENT].async_unload_entry(entry) + + +@callback +def async_get_emitters(hass: HomeAssistant) -> list[InfraredEntity]: + """Get all infrared emitters.""" + component = hass.data.get(DATA_COMPONENT) + if component is None: + return [] + + return list(component.entities) + + +async def async_send_command( + hass: HomeAssistant, + entity_id_or_uuid: str, + command: InfraredCommand, + context: Context | None = None, +) -> None: + """Send an IR command to the specified infrared entity. + + Raises: + HomeAssistantError: If the infrared entity is not found. + """ + component = hass.data.get(DATA_COMPONENT) + if component is None: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="component_not_loaded", + ) + + ent_reg = er.async_get(hass) + entity_id = er.async_validate_entity_id(ent_reg, entity_id_or_uuid) + entity = component.get_entity(entity_id) + if entity is None: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="entity_not_found", + translation_placeholders={"entity_id": entity_id}, + ) + + if context is not None: + entity.async_set_context(context) + + await entity.async_send_command_internal(command) + + +class InfraredEntityDescription(EntityDescription, frozen_or_thawed=True): + """Describes infrared entities.""" + + +class InfraredEntity(RestoreEntity): + """Base class for infrared transmitter entities.""" + + entity_description: InfraredEntityDescription + _attr_should_poll = False + _attr_state: None = None + + __last_command_sent: str | None = None + + @property + @final + def state(self) -> str | None: + """Return the entity state.""" + return self.__last_command_sent + + @final + async def async_send_command_internal(self, command: InfraredCommand) -> None: + """Send an IR command and update state. + + Should not be overridden, handles setting last sent timestamp. + """ + await self.async_send_command(command) + self.__last_command_sent = dt_util.utcnow().isoformat(timespec="milliseconds") + self.async_write_ha_state() + + @final + async def async_internal_added_to_hass(self) -> None: + """Call when the infrared entity is added to hass.""" + await super().async_internal_added_to_hass() + state = await self.async_get_last_state() + if state is not None and state.state not in (STATE_UNAVAILABLE, None): + self.__last_command_sent = state.state + + @abstractmethod + async def async_send_command(self, command: InfraredCommand) -> None: + """Send an IR command. + + Args: + command: The IR command to send. + + Raises: + HomeAssistantError: If transmission fails. + """ diff --git a/homeassistant/components/infrared/const.py b/homeassistant/components/infrared/const.py new file mode 100644 index 00000000000000..2240607f52a8ec --- /dev/null +++ b/homeassistant/components/infrared/const.py @@ -0,0 +1,5 @@ +"""Constants for the Infrared integration.""" + +from typing import Final + +DOMAIN: Final = "infrared" diff --git a/homeassistant/components/infrared/icons.json b/homeassistant/components/infrared/icons.json new file mode 100644 index 00000000000000..3a12eb7d0b5025 --- /dev/null +++ b/homeassistant/components/infrared/icons.json @@ -0,0 +1,7 @@ +{ + "entity_component": { + "_": { + "default": "mdi:led-on" + } + } +} diff --git a/homeassistant/components/infrared/manifest.json b/homeassistant/components/infrared/manifest.json new file mode 100644 index 00000000000000..49cf9ad98df38a --- /dev/null +++ b/homeassistant/components/infrared/manifest.json @@ -0,0 +1,9 @@ +{ + "domain": "infrared", + "name": "Infrared", + "codeowners": ["@home-assistant/core"], + "documentation": "https://www.home-assistant.io/integrations/infrared", + "integration_type": "entity", + "quality_scale": "internal", + "requirements": ["infrared-protocols==1.0.0"] +} diff --git a/homeassistant/components/infrared/strings.json b/homeassistant/components/infrared/strings.json new file mode 100644 index 00000000000000..c4cf75cf1f3cb9 --- /dev/null +++ b/homeassistant/components/infrared/strings.json @@ -0,0 +1,10 @@ +{ + "exceptions": { + "component_not_loaded": { + "message": "Infrared component not loaded" + }, + "entity_not_found": { + "message": "Infrared entity `{entity_id}` not found" + } + } +} diff --git a/homeassistant/components/kitchen_sink/__init__.py b/homeassistant/components/kitchen_sink/__init__.py index 5fc498cc94d49d..6bf5896dd70300 100644 --- a/homeassistant/components/kitchen_sink/__init__.py +++ b/homeassistant/components/kitchen_sink/__init__.py @@ -56,7 +56,9 @@ COMPONENTS_WITH_DEMO_PLATFORM = [ Platform.BUTTON, + Platform.FAN, Platform.IMAGE, + Platform.INFRARED, Platform.LAWN_MOWER, Platform.LOCK, Platform.NOTIFY, @@ -131,6 +133,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Notify backup listeners hass.async_create_task(_notify_backup_listeners(hass), eager_start=False) + # Reload config entry when subentries are added/removed/updated + entry.async_on_unload(entry.add_update_listener(_async_update_listener)) + # Subscribe to labs feature updates for kitchen_sink preview repair entry.async_on_unload( async_subscribe_preview_feature( @@ -147,6 +152,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True +async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Reload config entry on update (e.g. subentry added/removed).""" + await hass.config_entries.async_reload(entry.entry_id) + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload config entry.""" # Notify backup listeners diff --git a/homeassistant/components/kitchen_sink/config_flow.py b/homeassistant/components/kitchen_sink/config_flow.py index 27a10738f483fc..434d54dc1e5825 100644 --- a/homeassistant/components/kitchen_sink/config_flow.py +++ b/homeassistant/components/kitchen_sink/config_flow.py @@ -8,18 +8,23 @@ import voluptuous as vol from homeassistant import data_entry_flow +from homeassistant.components.infrared import ( + DOMAIN as INFRARED_DOMAIN, + async_get_emitters, +) from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, ConfigSubentryFlow, - OptionsFlowWithReload, + OptionsFlow, SubentryFlowResult, ) from homeassistant.core import callback from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.selector import EntitySelector, EntitySelectorConfig -from . import DOMAIN +from .const import CONF_INFRARED_ENTITY_ID, DOMAIN CONF_BOOLEAN = "bool" CONF_INT = "int" @@ -44,7 +49,10 @@ def async_get_supported_subentry_types( cls, config_entry: ConfigEntry ) -> dict[str, type[ConfigSubentryFlow]]: """Return subentries supported by this handler.""" - return {"entity": SubentryFlowHandler} + return { + "entity": SubentryFlowHandler, + "infrared_fan": InfraredFanSubentryFlowHandler, + } async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" @@ -65,7 +73,7 @@ async def async_step_reauth_confirm( return self.async_abort(reason="reauth_successful") -class OptionsFlowHandler(OptionsFlowWithReload): +class OptionsFlowHandler(OptionsFlow): """Handle options.""" async def async_step_init( @@ -146,7 +154,7 @@ async def async_step_reconfigure_sensor( """Reconfigure a sensor.""" if user_input is not None: title = user_input.pop("name") - return self.async_update_reload_and_abort( + return self.async_update_and_abort( self._get_entry(), self._get_reconfigure_subentry(), data=user_input, @@ -162,3 +170,35 @@ async def async_step_reconfigure_sensor( } ), ) + + +class InfraredFanSubentryFlowHandler(ConfigSubentryFlow): + """Handle infrared fan subentry flow.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """User flow to add an infrared fan.""" + + entities = async_get_emitters(self.hass) + if not entities: + return self.async_abort(reason="no_emitters") + + if user_input is not None: + title = user_input.pop("name") + return self.async_create_entry(data=user_input, title=title) + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required("name"): str, + vol.Required(CONF_INFRARED_ENTITY_ID): EntitySelector( + EntitySelectorConfig( + domain=INFRARED_DOMAIN, + include_entities=[entity.entity_id for entity in entities], + ) + ), + } + ), + ) diff --git a/homeassistant/components/kitchen_sink/const.py b/homeassistant/components/kitchen_sink/const.py index e6edaca46ce277..bce291bd5d661e 100644 --- a/homeassistant/components/kitchen_sink/const.py +++ b/homeassistant/components/kitchen_sink/const.py @@ -7,6 +7,7 @@ from homeassistant.util.hass_dict import HassKey DOMAIN = "kitchen_sink" +CONF_INFRARED_ENTITY_ID = "infrared_entity_id" DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey( f"{DOMAIN}.backup_agent_listeners" ) diff --git a/homeassistant/components/kitchen_sink/fan.py b/homeassistant/components/kitchen_sink/fan.py new file mode 100644 index 00000000000000..db02da6930c27b --- /dev/null +++ b/homeassistant/components/kitchen_sink/fan.py @@ -0,0 +1,150 @@ +"""Demo platform that offers a fake infrared fan entity.""" + +from __future__ import annotations + +from typing import Any + +import infrared_protocols + +from homeassistant.components.fan import FanEntity, FanEntityFeature +from homeassistant.components.infrared import async_send_command +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.event import async_track_state_change_event + +from .const import CONF_INFRARED_ENTITY_ID, DOMAIN + +PARALLEL_UPDATES = 0 + +DUMMY_FAN_ADDRESS = 0x1234 +DUMMY_CMD_POWER_ON = 0x01 +DUMMY_CMD_POWER_OFF = 0x02 +DUMMY_CMD_SPEED_LOW = 0x03 +DUMMY_CMD_SPEED_MEDIUM = 0x04 +DUMMY_CMD_SPEED_HIGH = 0x05 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the demo infrared fan platform.""" + for subentry_id, subentry in config_entry.subentries.items(): + if subentry.subentry_type != "infrared_fan": + continue + async_add_entities( + [ + DemoInfraredFan( + subentry_id=subentry_id, + device_name=subentry.title, + infrared_entity_id=subentry.data[CONF_INFRARED_ENTITY_ID], + ) + ], + config_subentry_id=subentry_id, + ) + + +class DemoInfraredFan(FanEntity): + """Representation of a demo infrared fan entity.""" + + _attr_has_entity_name = True + _attr_name = None + _attr_should_poll = False + _attr_assumed_state = True + _attr_speed_count = 3 + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + + def __init__( + self, + subentry_id: str, + device_name: str, + infrared_entity_id: str, + ) -> None: + """Initialize the demo infrared fan entity.""" + self._infrared_entity_id = infrared_entity_id + self._attr_unique_id = subentry_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, subentry_id)}, + name=device_name, + ) + self._attr_percentage = 0 + + async def async_added_to_hass(self) -> None: + """Subscribe to infrared entity state changes.""" + await super().async_added_to_hass() + + @callback + def _async_ir_state_changed(event: Event[EventStateChangedData]) -> None: + """Handle infrared entity state changes.""" + new_state = event.data["new_state"] + self._attr_available = ( + new_state is not None and new_state.state != STATE_UNAVAILABLE + ) + self.async_write_ha_state() + + self.async_on_remove( + async_track_state_change_event( + self.hass, [self._infrared_entity_id], _async_ir_state_changed + ) + ) + + # Set initial availability based on current infrared entity state + ir_state = self.hass.states.get(self._infrared_entity_id) + self._attr_available = ( + ir_state is not None and ir_state.state != STATE_UNAVAILABLE + ) + + async def _send_command(self, command_code: int) -> None: + """Send an IR command using the NEC protocol.""" + command = infrared_protocols.NECCommand( + address=DUMMY_FAN_ADDRESS, + command=command_code, + modulation=38000, + ) + await async_send_command( + self.hass, self._infrared_entity_id, command, context=self._context + ) + + async def async_turn_on( + self, + percentage: int | None = None, + preset_mode: str | None = None, + **kwargs: Any, + ) -> None: + """Turn on the fan.""" + if percentage is not None: + await self.async_set_percentage(percentage) + return + await self._send_command(DUMMY_CMD_POWER_ON) + self._attr_percentage = 33 + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the fan.""" + await self._send_command(DUMMY_CMD_POWER_OFF) + self._attr_percentage = 0 + self.async_write_ha_state() + + async def async_set_percentage(self, percentage: int) -> None: + """Set the speed percentage of the fan.""" + if percentage == 0: + await self.async_turn_off() + return + + if percentage <= 33: + await self._send_command(DUMMY_CMD_SPEED_LOW) + elif percentage <= 66: + await self._send_command(DUMMY_CMD_SPEED_MEDIUM) + else: + await self._send_command(DUMMY_CMD_SPEED_HIGH) + + self._attr_percentage = percentage + self.async_write_ha_state() diff --git a/homeassistant/components/kitchen_sink/infrared.py b/homeassistant/components/kitchen_sink/infrared.py new file mode 100644 index 00000000000000..4f93c9be0c59ee --- /dev/null +++ b/homeassistant/components/kitchen_sink/infrared.py @@ -0,0 +1,65 @@ +"""Demo platform that offers a fake infrared entity.""" + +from __future__ import annotations + +import infrared_protocols + +from homeassistant.components import persistent_notification +from homeassistant.components.infrared import InfraredEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import DOMAIN + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the demo infrared platform.""" + async_add_entities( + [ + DemoInfrared( + unique_id="ir_transmitter", + device_name="IR Blaster", + entity_name="Infrared Transmitter", + ), + ] + ) + + +class DemoInfrared(InfraredEntity): + """Representation of a demo infrared entity.""" + + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__( + self, + unique_id: str, + device_name: str, + entity_name: str, + ) -> None: + """Initialize the demo infrared entity.""" + self._attr_unique_id = unique_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + name=device_name, + ) + self._attr_name = entity_name + + async def async_send_command(self, command: infrared_protocols.Command) -> None: + """Send an IR command.""" + timings = [ + interval + for timing in command.get_raw_timings() + for interval in (timing.high_us, -timing.low_us) + ] + persistent_notification.async_create( + self.hass, str(timings), title="Infrared Command" + ) diff --git a/homeassistant/components/kitchen_sink/sensor.py b/homeassistant/components/kitchen_sink/sensor.py index 04cb833f0df844..15f73b781bc446 100644 --- a/homeassistant/components/kitchen_sink/sensor.py +++ b/homeassistant/components/kitchen_sink/sensor.py @@ -101,6 +101,8 @@ async def async_setup_entry( ) for subentry_id, subentry in config_entry.subentries.items(): + if subentry.subentry_type != "entity": + continue async_add_entities( [ DemoSensor( diff --git a/homeassistant/components/kitchen_sink/strings.json b/homeassistant/components/kitchen_sink/strings.json index 107bd1f509b0fa..15305d711b26a1 100644 --- a/homeassistant/components/kitchen_sink/strings.json +++ b/homeassistant/components/kitchen_sink/strings.json @@ -32,6 +32,24 @@ "description": "Reconfigure the sensor" } } + }, + "infrared_fan": { + "abort": { + "no_emitters": "No infrared transmitter entities found. Please set up an infrared device first." + }, + "entry_type": "Infrared fan", + "initiate_flow": { + "user": "Add infrared fan" + }, + "step": { + "user": { + "data": { + "infrared_entity_id": "Infrared transmitter", + "name": "[%key:common::config_flow::data::name%]" + }, + "description": "Select an infrared transmitter to control the fan." + } + } } }, "device": { diff --git a/homeassistant/components/portainer/entity.py b/homeassistant/components/portainer/entity.py index ca3d5bfb40020d..e0bc7ea12ea803 100644 --- a/homeassistant/components/portainer/entity.py +++ b/homeassistant/components/portainer/entity.py @@ -91,7 +91,7 @@ def __init__( # else it's the endpoint via_device=( DOMAIN, - f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_{device_info.stack.name}" + f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_stack_{device_info.stack.id}" if device_info.stack else f"{coordinator.config_entry.entry_id}_{self.endpoint_id}", ), @@ -135,7 +135,7 @@ def __init__( identifiers={ ( DOMAIN, - f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_{self.device_name}", + f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_stack_{self.stack_id}", ) }, manufacturer=DEFAULT_NAME, diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index a750a9262b9abc..8eac44d32d84f2 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -48,6 +48,7 @@ PLATFORMS: Final = [ Platform.BINARY_SENSOR, Platform.BUTTON, + Platform.CALENDAR, Platform.CLIMATE, Platform.COVER, Platform.DEVICE_TRACKER, diff --git a/homeassistant/components/teslemetry/calendar.py b/homeassistant/components/teslemetry/calendar.py new file mode 100644 index 00000000000000..71877344129877 --- /dev/null +++ b/homeassistant/components/teslemetry/calendar.py @@ -0,0 +1,282 @@ +"""Calendar platform for Teslemetry integration.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Any + +from homeassistant.components.calendar import CalendarEntity, CalendarEvent +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.util import dt as dt_util + +from . import TeslemetryConfigEntry +from .entity import TeslemetryEnergyInfoEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeslemetryConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Teslemetry Calendar platform from a config entry.""" + + entities_to_add: list[CalendarEntity] = [] + + entities_to_add.extend( + TeslemetryTariffSchedule(energy, "tariff_content_v2") + for energy in entry.runtime_data.energysites + if energy.info_coordinator.data.get("tariff_content_v2_seasons") + ) + + entities_to_add.extend( + TeslemetryTariffSchedule(energy, "tariff_content_v2_sell_tariff") + for energy in entry.runtime_data.energysites + if energy.info_coordinator.data.get("tariff_content_v2_sell_tariff_seasons") + ) + + async_add_entities(entities_to_add) + + +def _is_day_in_range(day_of_week: int, from_day: int, to_day: int) -> bool: + """Check if a day of week falls within a range, handling week crossing.""" + if from_day <= to_day: + return from_day <= day_of_week <= to_day + # Week crossing (e.g., Fri=4 to Mon=0) + return day_of_week >= from_day or day_of_week <= to_day + + +def _parse_period_times( + period_def: dict[str, Any], + base_day: datetime, +) -> tuple[datetime, datetime] | None: + """Parse a TOU period definition into start and end times. + + Returns None if the base_day's weekday doesn't match the period's day range. + For periods crossing midnight, end_time will be on the following day. + """ + # DaysOfWeek are from 0-6 (Monday-Sunday) + from_day = period_def.get("fromDayOfWeek", 0) + to_day = period_def.get("toDayOfWeek", 6) + + if not _is_day_in_range(base_day.weekday(), from_day, to_day): + return None + + # Hours are from 0-23, so 24 hours is 0-0 + from_hour = period_def.get("fromHour", 0) + to_hour = period_def.get("toHour", 0) + + # Minutes are from 0-59, so 60 minutes is 0-0 + from_minute = period_def.get("fromMinute", 0) + to_minute = period_def.get("toMinute", 0) + + start_time = base_day.replace( + hour=from_hour, minute=from_minute, second=0, microsecond=0 + ) + end_time = base_day.replace(hour=to_hour, minute=to_minute, second=0, microsecond=0) + + if end_time <= start_time: + end_time += timedelta(days=1) + + return start_time, end_time + + +def _build_event( + key_base: str, + season_name: str, + period_name: str, + price: float | None, + start_time: datetime, + end_time: datetime, +) -> CalendarEvent: + """Build a CalendarEvent for a tariff period.""" + price_str = f"{price:.2f}/kWh" if price is not None else "Unknown Price" + return CalendarEvent( + start=start_time, + end=end_time, + summary=f"{period_name.capitalize().replace('_', ' ')}: {price_str}", + description=( + f"Season: {season_name.capitalize()}\n" + f"Period: {period_name.capitalize().replace('_', ' ')}\n" + f"Price: {price_str}" + ), + uid=f"{key_base}_{season_name}_{period_name}_{start_time.isoformat()}", + ) + + +class TeslemetryTariffSchedule(TeslemetryEnergyInfoEntity, CalendarEntity): + """Energy Site Tariff Schedule Calendar.""" + + def __init__( + self, + data: Any, + key_base: str, + ) -> None: + """Initialize the tariff schedule calendar.""" + self.key_base: str = key_base + self.seasons: dict[str, dict[str, Any]] = {} + self.charges: dict[str, dict[str, Any]] = {} + super().__init__(data, key_base) + + @property + def event(self) -> CalendarEvent | None: + """Return the current active tariff event.""" + now = dt_util.now() + current_season_name = self._get_current_season(now) + + if not current_season_name or not self.seasons.get(current_season_name): + return None + + # Time of use (TOU) periods define the tariff schedule within a season + tou_periods = self.seasons[current_season_name].get("tou_periods", {}) + + for period_name, period_group in tou_periods.items(): + for period_def in period_group.get("periods", []): + result = _parse_period_times(period_def, now) + if result is None: + continue + + start_time, end_time = result + + # Check if now falls within this period + if not (start_time <= now < end_time): + # For cross-midnight periods, check yesterday's instance + start_time -= timedelta(days=1) + end_time -= timedelta(days=1) + if not (start_time <= now < end_time): + continue + + price = self._get_price_for_period(current_season_name, period_name) + return _build_event( + self.key_base, + current_season_name, + period_name, + price, + start_time, + end_time, + ) + + return None + + async def async_get_events( + self, + hass: HomeAssistant, + start_date: datetime, + end_date: datetime, + ) -> list[CalendarEvent]: + """Return calendar events (tariff periods) within a datetime range.""" + events: list[CalendarEvent] = [] + + start_date = dt_util.as_local(start_date) + end_date = dt_util.as_local(end_date) + + # Start one day earlier to catch TOU periods that cross midnight + # from the previous evening into the query range. + current_day = dt_util.start_of_local_day(start_date) - timedelta(days=1) + while current_day < end_date: + season_name = self._get_current_season(current_day) + if not season_name or not self.seasons.get(season_name): + current_day += timedelta(days=1) + continue + + tou_periods = self.seasons[season_name].get("tou_periods", {}) + + for period_name, period_group in tou_periods.items(): + for period_def in period_group.get("periods", []): + result = _parse_period_times(period_def, current_day) + if result is None: + continue + + start_time, end_time = result + + if start_time < end_date and end_time > start_date: + price = self._get_price_for_period(season_name, period_name) + events.append( + _build_event( + self.key_base, + season_name, + period_name, + price, + start_time, + end_time, + ) + ) + + current_day += timedelta(days=1) + + events.sort(key=lambda x: x.start) + return events + + def _get_current_season(self, date_to_check: datetime) -> str | None: + """Determine the active season for a given date.""" + local_date = dt_util.as_local(date_to_check) + year = local_date.year + + for season_name, season_data in self.seasons.items(): + if not season_data: + continue + + try: + from_month = season_data["fromMonth"] + from_day = season_data["fromDay"] + to_month = season_data["toMonth"] + to_day = season_data["toDay"] + + # Handle seasons that cross year boundaries + start_year = year + end_year = year + + # Season crosses year boundary (e.g., Oct-Mar) + if from_month > to_month or ( + from_month == to_month and from_day > to_day + ): + if local_date.month > from_month or ( + local_date.month == from_month and local_date.day >= from_day + ): + end_year = year + 1 + else: + start_year = year - 1 + + season_start = local_date.replace( + year=start_year, + month=from_month, + day=from_day, + hour=0, + minute=0, + second=0, + microsecond=0, + ) + season_end = local_date.replace( + year=end_year, + month=to_month, + day=to_day, + hour=0, + minute=0, + second=0, + microsecond=0, + ) + timedelta(days=1) + + if season_start <= local_date < season_end: + return season_name + except KeyError, ValueError: + continue + + return None + + def _get_price_for_period(self, season_name: str, period_name: str) -> float | None: + """Get the price for a specific season and period name.""" + try: + season_charges = self.charges.get(season_name, self.charges.get("ALL", {})) + rates = season_charges.get("rates", {}) + price = rates.get(period_name, rates.get("ALL")) + return float(price) if price is not None else None + except KeyError, ValueError, TypeError: + return None + + def _async_update_attrs(self) -> None: + """Update the Calendar attributes from coordinator data.""" + self.seasons = self.coordinator.data.get(f"{self.key_base}_seasons", {}) + self.charges = self.coordinator.data.get(f"{self.key_base}_energy_charges", {}) + self._attr_available = bool(self.seasons and self.charges) diff --git a/homeassistant/components/teslemetry/coordinator.py b/homeassistant/components/teslemetry/coordinator.py index 37e37d4478202b..c19886ec0d0902 100644 --- a/homeassistant/components/teslemetry/coordinator.py +++ b/homeassistant/components/teslemetry/coordinator.py @@ -104,6 +104,7 @@ async def _async_update_data(self) -> dict[str, Any]: translation_domain=DOMAIN, translation_key="update_failed", ) from e + return flatten(data) @@ -200,7 +201,11 @@ async def _async_update_data(self) -> dict[str, Any]: translation_domain=DOMAIN, translation_key="update_failed", ) from e - return flatten(data) + + return flatten( + data, + skip_keys=["daily_charges", "demand_charges", "energy_charges", "seasons"], + ) class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]): diff --git a/homeassistant/components/teslemetry/helpers.py b/homeassistant/components/teslemetry/helpers.py index e8afe8811ec045..cfca3a07805aa9 100644 --- a/homeassistant/components/teslemetry/helpers.py +++ b/homeassistant/components/teslemetry/helpers.py @@ -11,14 +11,20 @@ from .const import DOMAIN, LOGGER -def flatten(data: dict[str, Any], parent: str | None = None) -> dict[str, Any]: +def flatten( + data: dict[str, Any], + parent: str | None = None, + *, + skip_keys: list[str] | None = None, +) -> dict[str, Any]: """Flatten the data structure.""" result = {} for key, value in data.items(): + skip = skip_keys and key in skip_keys if parent: key = f"{parent}_{key}" - if isinstance(value, dict): - result.update(flatten(value, key)) + if isinstance(value, dict) and not skip: + result.update(flatten(value, key, skip_keys=skip_keys)) else: result[key] = value return result diff --git a/homeassistant/components/teslemetry/strings.json b/homeassistant/components/teslemetry/strings.json index 2900cf3c7dc42e..6041f3d87c4702 100644 --- a/homeassistant/components/teslemetry/strings.json +++ b/homeassistant/components/teslemetry/strings.json @@ -272,6 +272,14 @@ "name": "Wake" } }, + "calendar": { + "tariff_content_v2": { + "name": "Buy tariff" + }, + "tariff_content_v2_sell_tariff": { + "name": "Sell tariff" + } + }, "climate": { "climate_state_cabin_overheat_protection": { "name": "Cabin overheat protection" diff --git a/homeassistant/components/waze_travel_time/__init__.py b/homeassistant/components/waze_travel_time/__init__.py index 093a35177a00f8..4dd901e8bdcc32 100644 --- a/homeassistant/components/waze_travel_time/__init__.py +++ b/homeassistant/components/waze_travel_time/__init__.py @@ -1,6 +1,7 @@ """The waze_travel_time component.""" import asyncio +from datetime import timedelta import logging from pywaze.route_calculator import WazeRouteCalculator @@ -18,6 +19,8 @@ from homeassistant.helpers.location import find_coordinates from homeassistant.helpers.selector import ( BooleanSelector, + DurationSelector, + DurationSelectorConfig, SelectSelector, SelectSelectorConfig, SelectSelectorMode, @@ -35,9 +38,11 @@ CONF_INCL_FILTER, CONF_ORIGIN, CONF_REALTIME, + CONF_TIME_DELTA, CONF_UNITS, CONF_VEHICLE_TYPE, DEFAULT_FILTER, + DEFAULT_TIME_DELTA, DEFAULT_VEHICLE_TYPE, DOMAIN, METRIC_UNITS, @@ -95,6 +100,9 @@ multiple=True, ), ), + vol.Optional(CONF_TIME_DELTA): DurationSelector( + DurationSelectorConfig(allow_negative=True, enable_second=False) + ), } ) @@ -130,6 +138,13 @@ async def async_get_travel_times_service(service: ServiceCall) -> ServiceRespons origin = origin_coordinates or service.data[CONF_ORIGIN] destination = destination_coordinates or service.data[CONF_DESTINATION] + time_delta = int( + timedelta( + **service.data.get(CONF_TIME_DELTA, DEFAULT_TIME_DELTA) + ).total_seconds() + / 60 + ) + response = await async_get_travel_times( client=client, origin=origin, @@ -142,6 +157,7 @@ async def async_get_travel_times_service(service: ServiceCall) -> ServiceRespons units=service.data[CONF_UNITS], incl_filters=service.data.get(CONF_INCL_FILTER, DEFAULT_FILTER), excl_filters=service.data.get(CONF_EXCL_FILTER, DEFAULT_FILTER), + time_delta=time_delta, ) return {"routes": [vars(route) for route in response]} @@ -184,4 +200,22 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> config_entry.version, config_entry.minor_version, ) + + if config_entry.version == 2 and config_entry.minor_version == 1: + _LOGGER.debug( + "Migrating from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + options = dict(config_entry.options) + options[CONF_TIME_DELTA] = DEFAULT_TIME_DELTA + hass.config_entries.async_update_entry( + config_entry, options=options, minor_version=2 + ) + _LOGGER.debug( + "Migration to version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + return True diff --git a/homeassistant/components/waze_travel_time/config_flow.py b/homeassistant/components/waze_travel_time/config_flow.py index 6ab6a4b121c8ac..1b97bed0a8847d 100644 --- a/homeassistant/components/waze_travel_time/config_flow.py +++ b/homeassistant/components/waze_travel_time/config_flow.py @@ -17,6 +17,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.selector import ( BooleanSelector, + DurationSelector, + DurationSelectorConfig, SelectSelector, SelectSelectorConfig, SelectSelectorMode, @@ -35,11 +37,13 @@ CONF_INCL_FILTER, CONF_ORIGIN, CONF_REALTIME, + CONF_TIME_DELTA, CONF_UNITS, CONF_VEHICLE_TYPE, DEFAULT_FILTER, DEFAULT_NAME, DEFAULT_OPTIONS, + DEFAULT_TIME_DELTA, DOMAIN, IMPERIAL_UNITS, REGIONS, @@ -82,6 +86,12 @@ vol.Optional(CONF_AVOID_TOLL_ROADS): BooleanSelector(), vol.Optional(CONF_AVOID_SUBSCRIPTION_ROADS): BooleanSelector(), vol.Optional(CONF_AVOID_FERRIES): BooleanSelector(), + vol.Optional(CONF_TIME_DELTA): DurationSelector( + DurationSelectorConfig( + allow_negative=True, + enable_second=False, + ) + ), } ) @@ -102,7 +112,9 @@ ) -def default_options(hass: HomeAssistant) -> dict[str, str | bool | list[str]]: +def default_options( + hass: HomeAssistant, +) -> dict[str, str | bool | list[str] | dict[str, int]]: """Get the default options.""" defaults = DEFAULT_OPTIONS.copy() if hass.config.units is US_CUSTOMARY_SYSTEM: @@ -120,6 +132,8 @@ async def async_step_init(self, user_input=None) -> ConfigFlowResult: user_input[CONF_INCL_FILTER] = DEFAULT_FILTER if user_input.get(CONF_EXCL_FILTER) is None: user_input[CONF_EXCL_FILTER] = DEFAULT_FILTER + if user_input.get(CONF_TIME_DELTA) is None: + user_input[CONF_TIME_DELTA] = DEFAULT_TIME_DELTA return self.async_create_entry( title="", data=user_input, @@ -137,6 +151,7 @@ class WazeConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Waze Travel Time.""" VERSION = 2 + MINOR_VERSION = 2 @staticmethod @callback diff --git a/homeassistant/components/waze_travel_time/const.py b/homeassistant/components/waze_travel_time/const.py index 7c77f43574d670..894c8a6c0a8280 100644 --- a/homeassistant/components/waze_travel_time/const.py +++ b/homeassistant/components/waze_travel_time/const.py @@ -15,8 +15,10 @@ CONF_AVOID_TOLL_ROADS = "avoid_toll_roads" CONF_AVOID_SUBSCRIPTION_ROADS = "avoid_subscription_roads" CONF_AVOID_FERRIES = "avoid_ferries" +CONF_TIME_DELTA = "time_delta" DEFAULT_NAME = "Waze Travel Time" +DEFAULT_TIME_DELTA = {"minutes": 0} DEFAULT_REALTIME = True DEFAULT_VEHICLE_TYPE = "car" DEFAULT_AVOID_TOLL_ROADS = False @@ -31,7 +33,7 @@ REGIONS = ["us", "na", "eu", "il", "au"] VEHICLE_TYPES = ["car", "taxi", "motorcycle"] -DEFAULT_OPTIONS: dict[str, str | bool | list[str]] = { +DEFAULT_OPTIONS: dict[str, str | bool | list[str] | dict[str, int]] = { CONF_REALTIME: DEFAULT_REALTIME, CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, CONF_UNITS: METRIC_UNITS, @@ -40,4 +42,5 @@ CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, CONF_INCL_FILTER: DEFAULT_FILTER, CONF_EXCL_FILTER: DEFAULT_FILTER, + CONF_TIME_DELTA: DEFAULT_TIME_DELTA, } diff --git a/homeassistant/components/waze_travel_time/coordinator.py b/homeassistant/components/waze_travel_time/coordinator.py index 23dfea86ed2c3e..0cf4f4ef78359c 100644 --- a/homeassistant/components/waze_travel_time/coordinator.py +++ b/homeassistant/components/waze_travel_time/coordinator.py @@ -25,6 +25,7 @@ CONF_INCL_FILTER, CONF_ORIGIN, CONF_REALTIME, + CONF_TIME_DELTA, CONF_UNITS, CONF_VEHICLE_TYPE, DOMAIN, @@ -51,6 +52,7 @@ async def async_get_travel_times( units: Literal["metric", "imperial"] = "metric", incl_filters: Collection[str] | None = None, excl_filters: Collection[str] | None = None, + time_delta: int = 0, ) -> list[CalcRoutesResponse]: """Get all available routes.""" @@ -74,6 +76,7 @@ async def async_get_travel_times( avoid_ferries=avoid_ferries, real_time=realtime, alternatives=3, + time_delta=time_delta, ) if len(routes) < 1: @@ -204,6 +207,11 @@ async def _async_update_data(self) -> WazeTravelTimeData: CONF_AVOID_SUBSCRIPTION_ROADS ] avoid_ferries = self.config_entry.options[CONF_AVOID_FERRIES] + time_delta = int( + timedelta(**self.config_entry.options[CONF_TIME_DELTA]).total_seconds() + / 60 + ) + routes = await async_get_travel_times( self.client, origin_coordinates, @@ -216,6 +224,7 @@ async def _async_update_data(self) -> WazeTravelTimeData: self.config_entry.options[CONF_UNITS], incl_filter, excl_filter, + time_delta, ) if len(routes) < 1: travel_data = WazeTravelTimeData( diff --git a/homeassistant/components/waze_travel_time/services.yaml b/homeassistant/components/waze_travel_time/services.yaml index fd5f2e9adea6a7..6d1faf2904510a 100644 --- a/homeassistant/components/waze_travel_time/services.yaml +++ b/homeassistant/components/waze_travel_time/services.yaml @@ -65,3 +65,7 @@ get_travel_times: selector: text: multiple: true + time_delta: + required: false + selector: + duration: diff --git a/homeassistant/components/waze_travel_time/strings.json b/homeassistant/components/waze_travel_time/strings.json index dcbf2edef6b8ce..55bb7cf995b163 100644 --- a/homeassistant/components/waze_travel_time/strings.json +++ b/homeassistant/components/waze_travel_time/strings.json @@ -29,6 +29,7 @@ "excl_filter": "Exact street name which must NOT be part of the selected route", "incl_filter": "Exact street name which must be part of the selected route", "realtime": "Realtime travel time?", + "time_delta": "Time delta", "units": "Units", "vehicle_type": "Vehicle type" }, @@ -100,6 +101,10 @@ "description": "The region. Controls which Waze server is used.", "name": "[%key:component::waze_travel_time::config::step::user::data::region%]" }, + "time_delta": { + "description": "Time offset from now to calculate the route for. Positive values are in the future, negative values are in the past.", + "name": "Time delta" + }, "units": { "description": "Which unit system to use.", "name": "[%key:component::waze_travel_time::options::step::init::data::units%]" diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 18a3d362f03504..dbaefc4f1cf8c3 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -1,13 +1,13 @@ { "config": { "abort": { - "addon_get_discovery_info_failed": "Failed to get Z-Wave app discovery info.", - "addon_info_failed": "Failed to get Z-Wave app info.", - "addon_install_failed": "Failed to install the Z-Wave app.", - "addon_required": "The Z-Wave migration flow requires the integration to be configured using the Z-Wave Supervisor app. If you are using Z-Wave JS UI, please follow our [migration instructions]({zwave_js_ui_migration}).", - "addon_set_config_failed": "Failed to set Z-Wave configuration.", - "addon_start_failed": "Failed to start the Z-Wave app.", - "addon_stop_failed": "Failed to stop the Z-Wave app.", + "addon_get_discovery_info_failed": "Failed to get Z-Wave JS app discovery info.", + "addon_info_failed": "Failed to get Z-Wave JS app info.", + "addon_install_failed": "Failed to install the Z-Wave JS app.", + "addon_required": "The Z-Wave migration flow requires the integration to be configured using the Z-Wave JS app. If you are using Z-Wave JS UI, please follow our [migration instructions]({zwave_js_ui_migration}).", + "addon_set_config_failed": "Failed to set Z-Wave JS app configuration.", + "addon_start_failed": "Failed to start the Z-Wave JS app.", + "addon_stop_failed": "Failed to stop the Z-Wave JS app.", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "backup_failed": "Failed to back up network.", @@ -17,15 +17,15 @@ "discovery_requires_supervisor": "Discovery requires the Home Assistant Supervisor.", "migration_low_sdk_version": "The SDK version of the old adapter is lower than {ok_sdk_version}. This means it's not possible to migrate the non-volatile memory (NVM) of the old adapter to another adapter.\n\nCheck the documentation on the manufacturer support pages of the old adapter, if it's possible to upgrade the firmware of the old adapter to a version that is built with SDK version {ok_sdk_version} or higher.", "migration_successful": "Migration successful.", - "not_hassio": "ESPHome discovery requires Home Assistant to configure the Z-Wave app.", + "not_hassio": "ESPHome discovery requires Home Assistant to configure the Z-Wave JS app.", "not_zwave_device": "Discovered device is not a Z-Wave device.", - "not_zwave_js_addon": "Discovered app is not the official Z-Wave app.", + "not_zwave_js_addon": "Discovered app is not the official Z-Wave JS app.", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "reset_failed": "Failed to reset adapter.", "usb_ports_failed": "Failed to get USB devices." }, "error": { - "addon_start_failed": "Failed to start the Z-Wave app. Check the configuration.", + "addon_start_failed": "Failed to start the Z-Wave JS app. Check the configuration.", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_ws_url": "Invalid websocket URL", "unknown": "[%key:common::config_flow::error::unknown%]" @@ -65,7 +65,7 @@ "usb_path": "[%key:common::config_flow::data::usb_path%]" }, "description": "Select your Z-Wave adapter", - "title": "Enter the Z-Wave app configuration" + "title": "Enter the Z-Wave JS app configuration" }, "configure_security_keys": { "data": { @@ -84,7 +84,7 @@ "title": "Migrate to a new adapter" }, "hassio_confirm": { - "description": "Do you want to set up the Z-Wave integration with the Z-Wave app?" + "description": "Do you want to set up the Z-Wave integration with the Z-Wave JS app?" }, "install_addon": { "title": "Installing app" @@ -127,9 +127,9 @@ }, "on_supervisor": { "data": { - "use_addon": "Use the Z-Wave Supervisor app" + "use_addon": "Use the Z-Wave JS app" }, - "description": "Do you want to use the Z-Wave Supervisor app?", + "description": "Do you want to use the Z-Wave JS app?", "title": "Select connection method" }, "on_supervisor_reconfigure": { diff --git a/homeassistant/generated/entity_platforms.py b/homeassistant/generated/entity_platforms.py index 7010ffc9be73c1..718c3745be890b 100644 --- a/homeassistant/generated/entity_platforms.py +++ b/homeassistant/generated/entity_platforms.py @@ -29,6 +29,7 @@ class EntityPlatforms(StrEnum): HUMIDIFIER = "humidifier" IMAGE = "image" IMAGE_PROCESSING = "image_processing" + INFRARED = "infrared" LAWN_MOWER = "lawn_mower" LIGHT = "light" LOCK = "lock" diff --git a/mypy.ini b/mypy.ini index b1f029ceae3160..79f7c850ff4dee 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2646,6 +2646,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.infrared.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.input_button.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements.txt b/requirements.txt index d9ce90d3291545..ad9464932e716f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -31,6 +31,7 @@ home-assistant-bluetooth==1.13.1 home-assistant-intents==2026.2.13 httpx==0.28.1 ifaddr==0.2.0 +infrared-protocols==1.0.0 Jinja2==3.1.6 lru-dict==1.3.0 mutagen==1.47.0 diff --git a/requirements_all.txt b/requirements_all.txt index eb335296c0f02e..d2cad36d058cb5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1315,6 +1315,9 @@ influxdb-client==1.50.0 # homeassistant.components.influxdb influxdb==5.3.1 +# homeassistant.components.infrared +infrared-protocols==1.0.0 + # homeassistant.components.inkbird inkbird-ble==1.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 436e0c9a003ff0..4ffd87f4305641 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1164,6 +1164,9 @@ influxdb-client==1.50.0 # homeassistant.components.influxdb influxdb==5.3.1 +# homeassistant.components.infrared +infrared-protocols==1.0.0 + # homeassistant.components.inkbird inkbird-ble==1.1.1 diff --git a/tests/components/alexa_devices/test_services.py b/tests/components/alexa_devices/test_services.py index db7745ee5b7ae2..42a2ee36c5be1f 100644 --- a/tests/components/alexa_devices/test_services.py +++ b/tests/components/alexa_devices/test_services.py @@ -10,9 +10,6 @@ ATTR_INFO_SKILL, ATTR_SOUND, ATTR_TEXT_COMMAND, - SERVICE_INFO_SKILL, - SERVICE_SOUND_NOTIFICATION, - SERVICE_TEXT_COMMAND, ) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_DEVICE_ID @@ -35,9 +32,9 @@ async def test_setup_services( await setup_integration(hass, mock_config_entry) assert (services := hass.services.async_services_for_domain(DOMAIN)) - assert SERVICE_TEXT_COMMAND in services - assert SERVICE_SOUND_NOTIFICATION in services - assert SERVICE_INFO_SKILL in services + assert "send_text_command" in services + assert "send_sound" in services + assert "send_info_skill" in services async def test_info_skill_service( @@ -58,7 +55,7 @@ async def test_info_skill_service( await hass.services.async_call( DOMAIN, - SERVICE_INFO_SKILL, + "send_info_skill", { ATTR_INFO_SKILL: "tell_joke", ATTR_DEVICE_ID: device_entry.id, @@ -88,7 +85,7 @@ async def test_send_sound_service( await hass.services.async_call( DOMAIN, - SERVICE_SOUND_NOTIFICATION, + "send_sound", { ATTR_SOUND: "bell_02", ATTR_DEVICE_ID: device_entry.id, @@ -118,7 +115,7 @@ async def test_send_text_service( await hass.services.async_call( DOMAIN, - SERVICE_TEXT_COMMAND, + "send_text_command", { ATTR_TEXT_COMMAND: "Play B.B.C. radio on TuneIn", ATTR_DEVICE_ID: device_entry.id, @@ -173,7 +170,7 @@ async def test_invalid_parameters( with pytest.raises(ServiceValidationError) as exc_info: await hass.services.async_call( DOMAIN, - SERVICE_SOUND_NOTIFICATION, + "send_sound", { ATTR_SOUND: sound, ATTR_DEVICE_ID: device_id, @@ -229,7 +226,7 @@ async def test_invalid_info_skillparameters( with pytest.raises(ServiceValidationError) as exc_info: await hass.services.async_call( DOMAIN, - SERVICE_INFO_SKILL, + "send_info_skill", { ATTR_INFO_SKILL: info_skill, ATTR_DEVICE_ID: device_id, @@ -266,7 +263,7 @@ async def test_config_entry_not_loaded( with pytest.raises(ServiceValidationError) as exc_info: await hass.services.async_call( DOMAIN, - SERVICE_SOUND_NOTIFICATION, + "send_sound", { ATTR_SOUND: "bell_02", ATTR_DEVICE_ID: device_entry.id, @@ -300,7 +297,7 @@ async def test_invalid_config_entry( # Call Service await hass.services.async_call( DOMAIN, - SERVICE_SOUND_NOTIFICATION, + "send_sound", { ATTR_SOUND: "bell_02", ATTR_DEVICE_ID: device_entry.id, @@ -332,7 +329,7 @@ async def test_missing_config_entry( with pytest.raises(ServiceValidationError) as exc_info: await hass.services.async_call( DOMAIN, - SERVICE_SOUND_NOTIFICATION, + "send_sound", { ATTR_SOUND: "bell_02", ATTR_DEVICE_ID: device_entry.id, diff --git a/tests/components/default_config/test_init.py b/tests/components/default_config/test_init.py index 9bff213bb749c8..8835e943076d92 100644 --- a/tests/components/default_config/test_init.py +++ b/tests/components/default_config/test_init.py @@ -5,6 +5,8 @@ import pytest from homeassistant import bootstrap +from homeassistant.components.default_config import DOMAIN +from homeassistant.components.homeassistant import DOMAIN as HOMEASSISTANT_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import recorder as recorder_helper from homeassistant.setup import async_setup_component @@ -34,7 +36,7 @@ async def test_setup(hass: HomeAssistant) -> None: recorder_helper.async_initialize_recorder(hass) # default_config needs the homeassistant integration, assert it will be # automatically setup by bootstrap and set it up manually for this test - assert "homeassistant" in bootstrap.CORE_INTEGRATIONS - assert await async_setup_component(hass, "homeassistant", {"foo": "bar"}) + assert HOMEASSISTANT_DOMAIN in bootstrap.CORE_INTEGRATIONS + assert await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {"foo": "bar"}) - assert await async_setup_component(hass, "default_config", {"foo": "bar"}) + assert await async_setup_component(hass, DOMAIN, {"foo": "bar"}) diff --git a/tests/components/diagnostics/__init__.py b/tests/components/diagnostics/__init__.py index d241ca09f4197e..7ed8868b231704 100644 --- a/tests/components/diagnostics/__init__.py +++ b/tests/components/diagnostics/__init__.py @@ -3,6 +3,7 @@ from http import HTTPStatus from typing import cast +from homeassistant.components.diagnostics import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry @@ -18,7 +19,7 @@ async def _get_diagnostics_for_config_entry( config_entry: ConfigEntry, ) -> JsonObjectType: """Return the diagnostics config entry for the specified domain.""" - assert await async_setup_component(hass, "diagnostics", {}) + assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() client = await hass_client() @@ -46,7 +47,7 @@ async def _get_diagnostics_for_device( device: DeviceEntry, ) -> JsonObjectType: """Return the diagnostics for the specified device.""" - assert await async_setup_component(hass, "diagnostics", {}) + assert await async_setup_component(hass, DOMAIN, {}) client = await hass_client() response = await client.get( diff --git a/tests/components/diagnostics/test_init.py b/tests/components/diagnostics/test_init.py index e27331811e6353..98686432f2e1d4 100644 --- a/tests/components/diagnostics/test_init.py +++ b/tests/components/diagnostics/test_init.py @@ -7,6 +7,7 @@ from freezegun import freeze_time import pytest +from homeassistant.components.diagnostics import DOMAIN from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, issue_registry as ir @@ -45,7 +46,7 @@ async def mock_diagnostics_integration(hass: HomeAssistant) -> None: "integration_without_diagnostics.diagnostics", Mock(), ) - assert await async_setup_component(hass, "diagnostics", {}) + assert await async_setup_component(hass, DOMAIN, {}) async def test_websocket( diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index fa75e72f4e1e13..8a1b2a4471b10c 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -929,6 +929,26 @@ def _fake_states(*args, **kwargs): "duration": {"hours": 2}, "type": "ratio", }, + { + "platform": "history_stats", + "entity_id": "binary_sensor.state", + "name": "sensor5", + "state": "on", + "start": "{{ utcnow().replace(hour=0, minute=0, second=0) }}", + "duration": {"hours": 2}, + "min_state_duration": {"minutes": 5}, + "type": "time", + }, + { + "platform": "history_stats", + "entity_id": "binary_sensor.state", + "name": "sensor6", + "state": "off", + "start": "{{ utcnow().replace(hour=0, minute=0, second=0) }}", + "duration": {"hours": 2}, + "min_state_duration": {"minutes": 20}, + "type": "time", + }, ] }, ) @@ -942,6 +962,8 @@ def _fake_states(*args, **kwargs): assert hass.states.get("sensor.sensor2").state == "0.0" assert hass.states.get("sensor.sensor3").state == "1" assert hass.states.get("sensor.sensor4").state == "0.0" + assert hass.states.get("sensor.sensor5").state == "0.0" + assert hass.states.get("sensor.sensor6").state == "0.0" one_hour_in = start_time + timedelta(minutes=60) with freeze_time(one_hour_in): @@ -952,6 +974,8 @@ def _fake_states(*args, **kwargs): assert hass.states.get("sensor.sensor2").state == "1.0" assert hass.states.get("sensor.sensor3").state == "1" assert hass.states.get("sensor.sensor4").state == "50.0" + assert hass.states.get("sensor.sensor5").state == "1.0" + assert hass.states.get("sensor.sensor6").state == "0.0" turn_off_time = start_time + timedelta(minutes=90) with freeze_time(turn_off_time): @@ -964,6 +988,8 @@ def _fake_states(*args, **kwargs): assert hass.states.get("sensor.sensor2").state == "1.5" assert hass.states.get("sensor.sensor3").state == "1" assert hass.states.get("sensor.sensor4").state == "75.0" + assert hass.states.get("sensor.sensor5").state == "1.5" + assert hass.states.get("sensor.sensor6").state == "0.0" turn_back_on_time = start_time + timedelta(minutes=105) with freeze_time(turn_back_on_time): @@ -974,6 +1000,8 @@ def _fake_states(*args, **kwargs): assert hass.states.get("sensor.sensor2").state == "1.5" assert hass.states.get("sensor.sensor3").state == "1" assert hass.states.get("sensor.sensor4").state == "75.0" + assert hass.states.get("sensor.sensor5").state == "1.5" + assert hass.states.get("sensor.sensor6").state == "0.0" with freeze_time(turn_back_on_time): hass.states.async_set("binary_sensor.state", "on") @@ -983,6 +1011,8 @@ def _fake_states(*args, **kwargs): assert hass.states.get("sensor.sensor2").state == "1.5" assert hass.states.get("sensor.sensor3").state == "2" assert hass.states.get("sensor.sensor4").state == "75.0" + assert hass.states.get("sensor.sensor5").state == "1.5" + assert hass.states.get("sensor.sensor6").state == "0.0" end_time = start_time + timedelta(minutes=120) with freeze_time(end_time): @@ -993,6 +1023,8 @@ def _fake_states(*args, **kwargs): assert hass.states.get("sensor.sensor2").state == "1.75" assert hass.states.get("sensor.sensor3").state == "2" assert hass.states.get("sensor.sensor4").state == "87.5" + assert hass.states.get("sensor.sensor5").state == "1.75" + assert hass.states.get("sensor.sensor6").state == "0.0" async def test_start_from_history_then_watch_state_changes_sliding( @@ -2125,3 +2157,429 @@ async def test_device_id( history_stats_entity = entity_registry.async_get("sensor.history_stats") assert history_stats_entity is not None assert history_stats_entity.device_id == source_entity.device_id + + +async def test_async_around_min_state_duration( + recorder_mock: Recorder, + hass: HomeAssistant, +) -> None: + """Test min_state_duration boundary where block becomes valid.""" + await hass.config.async_set_time_zone("UTC") + utcnow = dt_util.utcnow() + start_time = utcnow.replace(hour=0, minute=0, second=0, microsecond=0) + t0 = start_time + timedelta(minutes=9) + t1 = start_time + timedelta(minutes=10) + t2 = start_time + timedelta(minutes=11) + + # Start t0 t1 t2 End + # |---9min--|---1min--|---1min--|---1min--| + # |---on----|---on----|---on----|---on----| + + def _fake_states(*args, **kwargs): + return { + "binary_sensor.state": [ + ha.State( + "binary_sensor.state", + "on", + last_changed=start_time, + last_updated=start_time, + ), + ] + } + + with ( + patch( + "homeassistant.components.recorder.history.state_changes_during_period", + _fake_states, + ), + freeze_time(start_time), + ): + await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "history_stats", + "entity_id": "binary_sensor.state", + "name": "sensor1", + "state": "on", + "start": "{{ utcnow().replace(hour=0, minute=0, second=0) }}", + "duration": {"hours": 1}, + "min_state_duration": {"minutes": 10}, + "type": "time", + }, + { + "platform": "history_stats", + "entity_id": "binary_sensor.state", + "name": "sensor2", + "state": "on", + "start": "{{ utcnow().replace(hour=0, minute=0, second=0) }}", + "duration": {"hours": 1}, + "min_state_duration": {"minutes": 10}, + "type": "count", + }, + { + "platform": "history_stats", + "entity_id": "binary_sensor.state", + "name": "sensor3", + "state": "on", + "start": "{{ utcnow().replace(hour=0, minute=0, second=0) }}", + "duration": {"hours": 1}, + "min_state_duration": {"minutes": 10}, + "type": "ratio", + }, + ] + }, + ) + await hass.async_block_till_done() + + for i in range(1, 4): + await async_update_entity(hass, f"sensor.sensor{i}") + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.0" + assert hass.states.get("sensor.sensor2").state == "0" + assert hass.states.get("sensor.sensor3").state == "0.0" + + with freeze_time(t0): + async_fire_time_changed(hass, t0) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.0" + assert hass.states.get("sensor.sensor2").state == "0" + assert hass.states.get("sensor.sensor3").state == "0.0" + + with freeze_time(t1): + async_fire_time_changed(hass, t1) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.17" + assert hass.states.get("sensor.sensor2").state == "1" + assert hass.states.get("sensor.sensor3").state == "16.7" + + with freeze_time(t2): + async_fire_time_changed(hass, t2) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.18" + assert hass.states.get("sensor.sensor2").state == "1" + assert hass.states.get("sensor.sensor3").state == "18.3" + + +async def test_async_around_min_state_duration_sliding_window( + recorder_mock: Recorder, + hass: HomeAssistant, +) -> None: + """Test min_state_duration with sliding window where block duration crosses threshold.""" + await hass.config.async_set_time_zone("UTC") + utcnow = dt_util.utcnow() + start_time = utcnow.replace(hour=1, minute=0, second=0, microsecond=0) + t0 = start_time + timedelta(minutes=60) + t1 = start_time + timedelta(minutes=109) + t2 = start_time + timedelta(minutes=110) + end = start_time + timedelta(minutes=111) + + # Start t0 t1 t2 End + # |--60min--|--49min--|---1min--|---1min--| + # |---on----|---off---|---off---|---off---| + + def _fake_states(*args, **kwargs): + return { + "binary_sensor.state": [ + ha.State( + "binary_sensor.state", + "on", + last_changed=start_time, + last_updated=start_time, + ), + ] + } + + with ( + patch( + "homeassistant.components.recorder.history.state_changes_during_period", + _fake_states, + ), + freeze_time(start_time), + ): + await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "history_stats", + "entity_id": "binary_sensor.state", + "name": "sensor1", + "state": "on", + "end": "{{ utcnow() }}", + "duration": {"hours": 1}, + "min_state_duration": {"minutes": 10}, + "type": "time", + }, + { + "platform": "history_stats", + "entity_id": "binary_sensor.state", + "name": "sensor2", + "state": "on", + "end": "{{ utcnow() }}", + "duration": {"hours": 1}, + "min_state_duration": {"minutes": 10}, + "type": "count", + }, + { + "platform": "history_stats", + "entity_id": "binary_sensor.state", + "name": "sensor3", + "state": "on", + "end": "{{ utcnow() }}", + "duration": {"hours": 1}, + "min_state_duration": {"minutes": 10}, + "type": "ratio", + }, + ] + }, + ) + await hass.async_block_till_done() + + for i in range(1, 4): + await async_update_entity(hass, f"sensor.sensor{i}") + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.0" + assert hass.states.get("sensor.sensor2").state == "0" + assert hass.states.get("sensor.sensor3").state == "0.0" + + with freeze_time(t0): + hass.states.async_set("binary_sensor.state", "off") + await hass.async_block_till_done() + async_fire_time_changed(hass, t0) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "1.0" + assert hass.states.get("sensor.sensor2").state == "1" + assert hass.states.get("sensor.sensor3").state == "100.0" + + with freeze_time(t1): + async_fire_time_changed(hass, t1) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.18" + assert hass.states.get("sensor.sensor2").state == "1" + assert hass.states.get("sensor.sensor3").state == "18.3" + + with freeze_time(t2): + async_fire_time_changed(hass, t2) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.17" + assert hass.states.get("sensor.sensor2").state == "1" + assert hass.states.get("sensor.sensor3").state == "16.7" + + with freeze_time(end): + async_fire_time_changed(hass, end) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.0" + assert hass.states.get("sensor.sensor2").state == "0" + assert hass.states.get("sensor.sensor3").state == "0.0" + + +async def test_measure_multiple_with_min_state_duration( + recorder_mock: Recorder, hass: HomeAssistant +) -> None: + """Test measure for multiple states with min state duration.""" + start_time = dt_util.utcnow() - timedelta(minutes=40) + t0 = start_time + timedelta(minutes=10) + t1 = t0 + timedelta(minutes=10) + t2 = t1 + timedelta(minutes=10) + + # Start t0 t1 t2 End + # |--10min--|--10min--|--10min--|--10min--| + # |---blue--|--orange-|-default-|---blue--| + + def _fake_states(*args, **kwargs): + return { + "input_select.test_id": [ + ha.State( + "input_select.test_id", + "blue", + last_changed=start_time, + last_updated=start_time, + ), + ] + } + + with ( + patch( + "homeassistant.components.recorder.history.state_changes_during_period", + _fake_states, + ), + freeze_time(start_time), + ): + await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "history_stats", + "entity_id": "input_select.test_id", + "name": "sensor1", + "state": ["orange", "blue"], + "duration": {"hours": 1}, + "end": "{{ utcnow() }}", + "min_state_duration": {"minutes": 15}, + "type": "time", + }, + { + "platform": "history_stats", + "entity_id": "input_select.test_id", + "name": "sensor2", + "state": ["orange", "blue"], + "duration": {"hours": 1}, + "end": "{{ utcnow() }}", + "min_state_duration": {"minutes": 15}, + "type": "count", + }, + { + "platform": "history_stats", + "entity_id": "input_select.test_id", + "name": "sensor3", + "state": ["orange", "blue"], + "duration": {"hours": 1}, + "end": "{{ utcnow() }}", + "min_state_duration": {"minutes": 15}, + "type": "ratio", + }, + ] + }, + ) + await hass.async_block_till_done() + for i in range(1, 4): + await async_update_entity(hass, f"sensor.sensor{i}") + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.0" + assert hass.states.get("sensor.sensor2").state == "0" + assert hass.states.get("sensor.sensor3").state == "0.0" + + with freeze_time(t0): + hass.states.async_set("input_select.test_id", "orange") + await hass.async_block_till_done() + async_fire_time_changed(hass, t0) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.0" + assert hass.states.get("sensor.sensor2").state == "0" + assert hass.states.get("sensor.sensor3").state == "0.0" + + with freeze_time(t1): + hass.states.async_set("input_select.test_id", "blue") + await hass.async_block_till_done() + async_fire_time_changed(hass, t1) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.33" + assert hass.states.get("sensor.sensor2").state == "1" + assert hass.states.get("sensor.sensor3").state == "33.3" + + with freeze_time(t2): + hass.states.async_set("input_select.test_id", "blue") + await hass.async_block_till_done() + async_fire_time_changed(hass, t2) + await hass.async_block_till_done() + + assert hass.states.get("sensor.sensor1").state == "0.5" + assert hass.states.get("sensor.sensor2").state == "1" + assert hass.states.get("sensor.sensor3").state == "50.0" + + +async def test_open_block_precision_same_second( + recorder_mock: Recorder, hass: HomeAssistant +) -> None: + """Test open block precision.""" + + await hass.config.async_set_time_zone("UTC") + + base = dt_util.utcnow().replace(microsecond=0) + state_change_time = base + timedelta(microseconds=500) + + def _fake_states(*args, **kwargs): + return { + "binary_sensor.precision": [ + ha.State( + "binary_sensor.precision", + "on", + last_changed=state_change_time, + last_updated=state_change_time, + ), + ] + } + + with ( + patch( + "homeassistant.components.recorder.history.state_changes_during_period", + _fake_states, + ), + freeze_time(base), + ): + await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "history_stats", + "entity_id": "binary_sensor.precision", + "name": "precision_count", + "state": "on", + "start": "{{ utcnow().replace(microsecond=0) }}", + "duration": {"minutes": 5}, + "min_state_duration": {"seconds": 0}, + "type": "count", + }, + { + "platform": "history_stats", + "entity_id": "binary_sensor.precision", + "name": "precision_time", + "state": "on", + "start": "{{ utcnow().replace(microsecond=0) }}", + "duration": {"minutes": 5}, + "min_state_duration": {"seconds": 0}, + "type": "time", + }, + { + "platform": "history_stats", + "entity_id": "binary_sensor.precision", + "name": "precision_ratio", + "state": "on", + "start": "{{ utcnow().replace(microsecond=0) }}", + "duration": {"minutes": 5}, + "min_state_duration": {"seconds": 0}, + "type": "ratio", + }, + ] + }, + ) + await hass.async_block_till_done() + + await async_update_entity(hass, "sensor.precision_count") + await hass.async_block_till_done() + + with freeze_time(base): + async_fire_time_changed(hass, base) + await hass.async_block_till_done() + + assert hass.states.get("sensor.precision_count").state == "1" + assert hass.states.get("sensor.precision_time").state == "0.0" + assert hass.states.get("sensor.precision_ratio").state == "0.0" + + with freeze_time(state_change_time): + async_fire_time_changed(hass, state_change_time) + await hass.async_block_till_done() + + assert hass.states.get("sensor.precision_count").state == "1" + assert hass.states.get("sensor.precision_time").state == "0.0" + assert hass.states.get("sensor.precision_ratio").state == "0.0" diff --git a/tests/components/infrared/__init__.py b/tests/components/infrared/__init__.py new file mode 100644 index 00000000000000..f5712a639f4b24 --- /dev/null +++ b/tests/components/infrared/__init__.py @@ -0,0 +1 @@ +"""Tests for the Infrared integration.""" diff --git a/tests/components/infrared/conftest.py b/tests/components/infrared/conftest.py new file mode 100644 index 00000000000000..b1df1681893cbd --- /dev/null +++ b/tests/components/infrared/conftest.py @@ -0,0 +1,38 @@ +"""Common fixtures for the Infrared tests.""" + +from infrared_protocols import Command as InfraredCommand +import pytest + +from homeassistant.components.infrared import InfraredEntity +from homeassistant.components.infrared.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + + +@pytest.fixture +async def init_integration(hass: HomeAssistant) -> None: + """Set up the Infrared integration for testing.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + +class MockInfraredEntity(InfraredEntity): + """Mock infrared entity for testing.""" + + _attr_has_entity_name = True + _attr_name = "Test IR transmitter" + + def __init__(self, unique_id: str) -> None: + """Initialize mock entity.""" + self._attr_unique_id = unique_id + self.send_command_calls: list[InfraredCommand] = [] + + async def async_send_command(self, command: InfraredCommand) -> None: + """Mock send command.""" + self.send_command_calls.append(command) + + +@pytest.fixture +def mock_infrared_entity() -> MockInfraredEntity: + """Return a mock infrared entity.""" + return MockInfraredEntity("test_ir_transmitter") diff --git a/tests/components/infrared/test_init.py b/tests/components/infrared/test_init.py new file mode 100644 index 00000000000000..d8653db986cef2 --- /dev/null +++ b/tests/components/infrared/test_init.py @@ -0,0 +1,152 @@ +"""Tests for the Infrared integration setup.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from infrared_protocols import NECCommand +import pytest + +from homeassistant.components.infrared import ( + DATA_COMPONENT, + DOMAIN, + async_get_emitters, + async_send_command, +) +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant, State +from homeassistant.exceptions import HomeAssistantError +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util + +from .conftest import MockInfraredEntity + +from tests.common import mock_restore_cache + + +async def test_get_entities_integration_setup(hass: HomeAssistant) -> None: + """Test getting entities when the integration is not setup.""" + assert async_get_emitters(hass) == [] + + +@pytest.mark.usefixtures("init_integration") +async def test_get_entities_empty(hass: HomeAssistant) -> None: + """Test getting entities when none are registered.""" + assert async_get_emitters(hass) == [] + + +@pytest.mark.usefixtures("init_integration") +async def test_infrared_entity_initial_state( + hass: HomeAssistant, mock_infrared_entity: MockInfraredEntity +) -> None: + """Test infrared entity has no state before any command is sent.""" + component = hass.data[DATA_COMPONENT] + await component.async_add_entities([mock_infrared_entity]) + + state = hass.states.get("infrared.test_ir_transmitter") + assert state is not None + assert state.state == STATE_UNKNOWN + + +@pytest.mark.usefixtures("init_integration") +async def test_async_send_command_success( + hass: HomeAssistant, + mock_infrared_entity: MockInfraredEntity, + freezer: FrozenDateTimeFactory, +) -> None: + """Test sending command via async_send_command helper.""" + # Add the mock entity to the component + component = hass.data[DATA_COMPONENT] + await component.async_add_entities([mock_infrared_entity]) + + # Freeze time so we can verify the state update + now = dt_util.utcnow() + freezer.move_to(now) + + command = NECCommand(address=0x04FB, command=0x08F7, modulation=38000) + await async_send_command(hass, mock_infrared_entity.entity_id, command) + + assert len(mock_infrared_entity.send_command_calls) == 1 + assert mock_infrared_entity.send_command_calls[0] is command + + state = hass.states.get("infrared.test_ir_transmitter") + assert state is not None + assert state.state == now.isoformat(timespec="milliseconds") + + +@pytest.mark.usefixtures("init_integration") +async def test_async_send_command_error_does_not_update_state( + hass: HomeAssistant, + mock_infrared_entity: MockInfraredEntity, +) -> None: + """Test that state is not updated when async_send_command raises an error.""" + component = hass.data[DATA_COMPONENT] + await component.async_add_entities([mock_infrared_entity]) + + state = hass.states.get("infrared.test_ir_transmitter") + assert state is not None + assert state.state == STATE_UNKNOWN + + command = NECCommand(address=0x04FB, command=0x08F7, modulation=38000) + + mock_infrared_entity.async_send_command = AsyncMock( + side_effect=HomeAssistantError("Transmission failed") + ) + + with pytest.raises(HomeAssistantError, match="Transmission failed"): + await async_send_command(hass, mock_infrared_entity.entity_id, command) + + # Verify state was not updated after the error + state = hass.states.get("infrared.test_ir_transmitter") + assert state is not None + assert state.state == STATE_UNKNOWN + + +@pytest.mark.usefixtures("init_integration") +async def test_async_send_command_entity_not_found(hass: HomeAssistant) -> None: + """Test async_send_command raises error when entity not found.""" + command = NECCommand( + address=0x04FB, command=0x08F7, modulation=38000, repeat_count=1 + ) + + with pytest.raises( + HomeAssistantError, + match="Infrared entity `infrared.nonexistent_entity` not found", + ): + await async_send_command(hass, "infrared.nonexistent_entity", command) + + +async def test_async_send_command_component_not_loaded(hass: HomeAssistant) -> None: + """Test async_send_command raises error when component not loaded.""" + command = NECCommand( + address=0x04FB, command=0x08F7, modulation=38000, repeat_count=1 + ) + + with pytest.raises(HomeAssistantError, match="component_not_loaded"): + await async_send_command(hass, "infrared.some_entity", command) + + +@pytest.mark.parametrize( + ("restored_value", "expected_state"), + [ + ("2026-01-01T12:00:00.000+00:00", "2026-01-01T12:00:00.000+00:00"), + (STATE_UNAVAILABLE, STATE_UNKNOWN), + ], +) +async def test_infrared_entity_state_restore( + hass: HomeAssistant, + mock_infrared_entity: MockInfraredEntity, + restored_value: str, + expected_state: str, +) -> None: + """Test infrared entity state restore.""" + mock_restore_cache(hass, [State("infrared.test_ir_transmitter", restored_value)]) + + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + component = hass.data[DATA_COMPONENT] + await component.async_add_entities([mock_infrared_entity]) + + state = hass.states.get("infrared.test_ir_transmitter") + assert state is not None + assert state.state == expected_state diff --git a/tests/components/kitchen_sink/test_config_flow.py b/tests/components/kitchen_sink/test_config_flow.py index bc85edc592d7ca..77733a7f4a0d29 100644 --- a/tests/components/kitchen_sink/test_config_flow.py +++ b/tests/components/kitchen_sink/test_config_flow.py @@ -7,12 +7,15 @@ from homeassistant import config_entries, setup from homeassistant.components.kitchen_sink import DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +ENTITY_IR_TRANSMITTER = "infrared.ir_blaster_infrared_transmitter" + @pytest.fixture def no_platforms() -> Generator[None]: @@ -24,6 +27,16 @@ def no_platforms() -> Generator[None]: yield +@pytest.fixture +def infrared_only() -> Generator[None]: + """Enable only the infrared platform.""" + with patch( + "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", + [Platform.INFRARED], + ): + yield + + async def test_import(hass: HomeAssistant) -> None: """Test that we can import a config entry.""" with patch("homeassistant.components.kitchen_sink.async_setup_entry"): @@ -193,3 +206,57 @@ async def test_subentry_reconfigure_flow(hass: HomeAssistant) -> None: } await hass.async_block_till_done() + + +@pytest.mark.usefixtures("infrared_only") +async def test_infrared_fan_subentry_flow(hass: HomeAssistant) -> None: + """Test infrared fan subentry flow creates an entry.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + + result = await hass.config_entries.subentries.async_init( + (config_entry.entry_id, "infrared_fan"), + context={"source": config_entries.SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "name": "Living Room Fan", + "infrared_entity_id": ENTITY_IR_TRANSMITTER, + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + subentry_id = [ + sid + for sid, s in config_entry.subentries.items() + if s.subentry_type == "infrared_fan" + ][0] + assert config_entry.subentries[subentry_id] == config_entries.ConfigSubentry( + data={"infrared_entity_id": ENTITY_IR_TRANSMITTER}, + subentry_id=subentry_id, + subentry_type="infrared_fan", + title="Living Room Fan", + unique_id=None, + ) + + +@pytest.mark.usefixtures("no_platforms") +async def test_infrared_fan_subentry_flow_no_emitters(hass: HomeAssistant) -> None: + """Test infrared fan subentry flow aborts when no emitters are available.""" + config_entry = MockConfigEntry(domain=DOMAIN) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.subentries.async_init( + (config_entry.entry_id, "infrared_fan"), + context={"source": config_entries.SOURCE_USER}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_emitters" diff --git a/tests/components/kitchen_sink/test_infrared.py b/tests/components/kitchen_sink/test_infrared.py new file mode 100644 index 00000000000000..0783087dc210a5 --- /dev/null +++ b/tests/components/kitchen_sink/test_infrared.py @@ -0,0 +1,55 @@ +"""The tests for the kitchen_sink infrared platform.""" + +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +import infrared_protocols +import pytest + +from homeassistant.components.infrared import async_send_command +from homeassistant.components.kitchen_sink import DOMAIN +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util + +ENTITY_IR_TRANSMITTER = "infrared.ir_blaster_infrared_transmitter" + + +@pytest.fixture +async def infrared_only() -> None: + """Enable only the infrared platform.""" + with patch( + "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", + [Platform.INFRARED], + ): + yield + + +@pytest.fixture(autouse=True) +async def setup_comp(hass: HomeAssistant, infrared_only: None) -> None: + """Set up demo component.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + + +async def test_send_command( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test sending an infrared command.""" + state = hass.states.get(ENTITY_IR_TRANSMITTER) + assert state + assert state.state == STATE_UNKNOWN + + now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") + assert now is not None + freezer.move_to(now) + + command = infrared_protocols.NECCommand( + address=0x04, command=0x08, modulation=38000 + ) + await async_send_command(hass, ENTITY_IR_TRANSMITTER, command) + + state = hass.states.get(ENTITY_IR_TRANSMITTER) + assert state + assert state.state == now.isoformat(timespec="milliseconds") diff --git a/tests/components/portainer/snapshots/test_init.ambr b/tests/components/portainer/snapshots/test_init.ambr new file mode 100644 index 00000000000000..5166906493a6da --- /dev/null +++ b/tests/components/portainer/snapshots/test_init.ambr @@ -0,0 +1,208 @@ +# serializer version: 1 +# name: test_device_registry + list([ + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/dashboard', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'portainer', + 'portainer_test_entry_123_1', + ), + }), + 'labels': set({ + }), + 'manufacturer': 'Portainer', + 'model': 'Endpoint', + 'model_id': None, + 'name': 'my-environment', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'sw_version': None, + 'via_device_id': None, + }), + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/containers/aa86eacfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'portainer', + 'portainer_test_entry_123_1_funny_chatelet', + ), + }), + 'labels': set({ + }), + 'manufacturer': 'Portainer', + 'model': 'Container', + 'model_id': None, + 'name': 'funny_chatelet', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'sw_version': None, + 'via_device_id': , + }), + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/containers/dd19facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'portainer', + 'portainer_test_entry_123_1_focused_einstein', + ), + }), + 'labels': set({ + }), + 'manufacturer': 'Portainer', + 'model': 'Container', + 'model_id': None, + 'name': 'focused_einstein', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'sw_version': None, + 'via_device_id': , + }), + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/containers/ee20facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'portainer', + 'portainer_test_entry_123_1_practical_morse', + ), + }), + 'labels': set({ + }), + 'manufacturer': 'Portainer', + 'model': 'Container', + 'model_id': None, + 'name': 'practical_morse', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'sw_version': None, + 'via_device_id': , + }), + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/stacks/webstack', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'portainer', + 'portainer_test_entry_123_1_stack_1', + ), + }), + 'labels': set({ + }), + 'manufacturer': 'Portainer', + 'model': 'Stack', + 'model_id': None, + 'name': 'webstack', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'sw_version': None, + 'via_device_id': , + }), + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/containers/bb97facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'portainer', + 'portainer_test_entry_123_1_serene_banach', + ), + }), + 'labels': set({ + }), + 'manufacturer': 'Portainer', + 'model': 'Container', + 'model_id': None, + 'name': 'serene_banach', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'sw_version': None, + 'via_device_id': , + }), + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://127.0.0.1:9000/#!/1/docker/containers/cc08facfb3b3ed4cd362c1e88fc89a53908ad05fb3a4103bca3f9b28292d14bf', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'portainer', + 'portainer_test_entry_123_1_stoic_turing', + ), + }), + 'labels': set({ + }), + 'manufacturer': 'Portainer', + 'model': 'Container', + 'model_id': None, + 'name': 'stoic_turing', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'sw_version': None, + 'via_device_id': , + }), + ]) +# --- diff --git a/tests/components/portainer/test_init.py b/tests/components/portainer/test_init.py index 85a82309739a04..ef1a6caa67c1fc 100644 --- a/tests/components/portainer/test_init.py +++ b/tests/components/portainer/test_init.py @@ -8,6 +8,7 @@ PortainerTimeoutError, ) import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.portainer.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -166,3 +167,19 @@ async def test_migration_v3_to_v4( (DOMAIN, f"{entry.entry_id}_1_adguard"), } assert entity_after.unique_id == f"{entry.entry_id}_1_adguard_container" + + +async def test_device_registry( + hass: HomeAssistant, + mock_portainer_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test devices are correctly registered.""" + await setup_integration(hass, mock_config_entry) + + device_entries = dr.async_entries_for_config_entry( + device_registry, mock_config_entry.entry_id + ) + assert device_entries == snapshot diff --git a/tests/components/teslemetry/const.py b/tests/components/teslemetry/const.py index 80c423190ccbea..e2f84b66050c10 100644 --- a/tests/components/teslemetry/const.py +++ b/tests/components/teslemetry/const.py @@ -20,6 +20,10 @@ VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) +SITE_INFO_WEEK_CROSSING = load_json_object_fixture( + "site_info_week_crossing.json", DOMAIN +) +SITE_INFO_MULTI_SEASON = load_json_object_fixture("site_info_multi_season.json", DOMAIN) ENERGY_HISTORY = load_json_object_fixture("energy_history.json", DOMAIN) ENERGY_HISTORY_EMPTY = load_json_object_fixture("energy_history_empty.json", DOMAIN) diff --git a/tests/components/teslemetry/fixtures/site_info.json b/tests/components/teslemetry/fixtures/site_info.json index 60958bbabbbd09..43bc7a7bc353ba 100644 --- a/tests/components/teslemetry/fixtures/site_info.json +++ b/tests/components/teslemetry/fixtures/site_info.json @@ -122,6 +122,132 @@ "installation_time_zone": "", "max_site_meter_power_ac": 1000000000, "min_site_meter_power_ac": -1000000000, + "tariff_content_v2": { + "code": "Test", + "name": "Battery Maximiser", + "utility": "Origin", + "daily_charges": [ + { + "name": "Charge" + } + ], + "demand_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + }, + "Summer": {}, + "Winter": {} + }, + "energy_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + }, + "Summer": { + "rates": { + "OFF_PEAK": 0.198, + "ON_PEAK": 0.22 + } + }, + "Winter": {} + }, + "seasons": { + "Summer": { + "fromDay": 1, + "toDay": 31, + "fromMonth": 1, + "toMonth": 12, + "tou_periods": { + "OFF_PEAK": { + "periods": [ + { + "fromDayOfWeek": 0, + "toDayOfWeek": 6, + "fromHour": 21, + "toHour": 16 + } + ] + }, + "ON_PEAK": { + "periods": [ + { + "fromDayOfWeek": 0, + "toDayOfWeek": 6, + "fromHour": 16, + "toHour": 21 + } + ] + } + } + }, + "Winter": {} + }, + "sell_tariff": { + "name": "Battery Maximiser", + "utility": "Origin", + "daily_charges": [ + { + "name": "Charge" + } + ], + "demand_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + }, + "Summer": {}, + "Winter": {} + }, + "energy_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + }, + "Summer": { + "rates": { + "OFF_PEAK": 0.08, + "ON_PEAK": 0.16 + } + }, + "Winter": {} + }, + "seasons": { + "Summer": { + "fromDay": 1, + "toDay": 31, + "fromMonth": 1, + "toMonth": 12, + "tou_periods": { + "OFF_PEAK": { + "periods": [ + { + "toDayOfWeek": 6, + "fromHour": 21, + "toHour": 16 + } + ] + }, + "ON_PEAK": { + "periods": [ + { + "toDayOfWeek": 6, + "fromHour": 16, + "toHour": 21 + } + ] + } + } + }, + "Winter": {} + } + }, + "version": 1 + }, "vpp_backup_reserve_percent": 0 } } diff --git a/tests/components/teslemetry/fixtures/site_info_multi_season.json b/tests/components/teslemetry/fixtures/site_info_multi_season.json new file mode 100644 index 00000000000000..ddd94b0205c6bf --- /dev/null +++ b/tests/components/teslemetry/fixtures/site_info_multi_season.json @@ -0,0 +1,274 @@ +{ + "response": { + "id": "1233-abcd", + "site_name": "Site", + "backup_reserve_percent": 0, + "default_real_mode": "self_consumption", + "installation_date": "2022-01-01T00:00:00+00:00", + "user_settings": { + "go_off_grid_test_banner_enabled": false, + "storm_mode_enabled": true, + "powerwall_onboarding_settings_set": true, + "powerwall_tesla_electric_interested_in": false, + "vpp_tour_enabled": true, + "sync_grid_alert_enabled": true, + "breaker_alert_enabled": false + }, + "components": { + "solar": true, + "solar_type": "pv_panel", + "battery": true, + "grid": true, + "backup": true, + "gateway": "teg", + "load_meter": true, + "tou_capable": true, + "storm_mode_capable": true, + "flex_energy_request_capable": false, + "car_charging_data_supported": false, + "off_grid_vehicle_charging_reserve_supported": true, + "vehicle_charging_performance_view_enabled": false, + "vehicle_charging_solar_offset_view_enabled": false, + "battery_solar_offset_view_enabled": true, + "solar_value_enabled": true, + "energy_value_header": "Energy Value", + "energy_value_subheader": "Estimated Value", + "energy_service_self_scheduling_enabled": true, + "show_grid_import_battery_source_cards": true, + "set_islanding_mode_enabled": true, + "wifi_commissioning_enabled": true, + "backup_time_remaining_enabled": true, + "battery_type": "ac_powerwall", + "configurable": true, + "grid_services_enabled": false, + "gateways": [ + { + "device_id": "gateway-id", + "din": "gateway-din", + "serial_number": "CN00000000J50D", + "part_number": "1152100-14-J", + "part_type": 10, + "part_name": "Tesla Backup Gateway 2", + "is_active": true, + "site_id": "1234-abcd", + "firmware_version": "24.4.0 0fe780c9", + "updated_datetime": "2024-05-14T00:00:00.000Z" + } + ], + "batteries": [ + { + "device_id": "battery-1-id", + "din": "battery-1-din", + "serial_number": "TG000000001DA5", + "part_number": "3012170-10-B", + "part_type": 2, + "part_name": "Powerwall 2", + "nameplate_max_charge_power": 5000, + "nameplate_max_discharge_power": 5000, + "nameplate_energy": 13500 + } + ], + "wall_connectors": [], + "disallow_charge_from_grid_with_solar_installed": true, + "customer_preferred_export_rule": "pv_only", + "net_meter_mode": "battery_ok", + "system_alerts_enabled": true + }, + "version": "23.44.0 eb113390", + "battery_count": 1, + "tou_settings": { + "optimization_strategy": "economics", + "schedule": [] + }, + "nameplate_power": 5000, + "nameplate_energy": 13500, + "installation_time_zone": "", + "max_site_meter_power_ac": 1000000000, + "min_site_meter_power_ac": -1000000000, + "tariff_content_v2": { + "code": "Test", + "name": "Multi Season Tariff", + "utility": "Test Utility", + "daily_charges": [ + { + "name": "Charge" + } + ], + "demand_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + } + }, + "energy_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + }, + "Summer": { + "rates": { + "PEAK": 0.35, + "OFF_PEAK": 0.2 + } + }, + "Winter": { + "rates": { + "PEAK": 0.25, + "OFF_PEAK": 0.12 + } + } + }, + "seasons": { + "Summer": { + "fromDay": 1, + "toDay": 30, + "fromMonth": 4, + "toMonth": 9, + "tou_periods": { + "PEAK": { + "periods": [ + { + "fromDayOfWeek": 0, + "toDayOfWeek": 6, + "fromHour": 16, + "toHour": 21 + } + ] + }, + "OFF_PEAK": { + "periods": [ + { + "fromDayOfWeek": 0, + "toDayOfWeek": 6, + "fromHour": 21, + "toHour": 16 + } + ] + } + } + }, + "Winter": { + "fromDay": 1, + "toDay": 31, + "fromMonth": 10, + "toMonth": 3, + "tou_periods": { + "PEAK": { + "periods": [ + { + "fromDayOfWeek": 0, + "toDayOfWeek": 6, + "fromHour": 17, + "toHour": 20 + } + ] + }, + "OFF_PEAK": { + "periods": [ + { + "fromDayOfWeek": 0, + "toDayOfWeek": 6, + "fromHour": 20, + "toHour": 17 + } + ] + } + } + } + }, + "sell_tariff": { + "name": "Multi Season Tariff Sell", + "utility": "Test Utility", + "daily_charges": [], + "demand_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + } + }, + "energy_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + }, + "Summer": { + "rates": { + "PEAK": 0.18, + "OFF_PEAK": 0.08 + } + }, + "Winter": { + "rates": { + "PEAK": 0.12, + "OFF_PEAK": 0.05 + } + } + }, + "seasons": { + "Summer": { + "fromDay": 1, + "toDay": 30, + "fromMonth": 4, + "toMonth": 9, + "tou_periods": { + "PEAK": { + "periods": [ + { + "fromDayOfWeek": 0, + "toDayOfWeek": 6, + "fromHour": 16, + "toHour": 21 + } + ] + }, + "OFF_PEAK": { + "periods": [ + { + "fromDayOfWeek": 0, + "toDayOfWeek": 6, + "fromHour": 21, + "toHour": 16 + } + ] + } + } + }, + "Winter": { + "fromDay": 1, + "toDay": 31, + "fromMonth": 10, + "toMonth": 3, + "tou_periods": { + "PEAK": { + "periods": [ + { + "fromDayOfWeek": 0, + "toDayOfWeek": 6, + "fromHour": 17, + "toHour": 20 + } + ] + }, + "OFF_PEAK": { + "periods": [ + { + "fromDayOfWeek": 0, + "toDayOfWeek": 6, + "fromHour": 20, + "toHour": 17 + } + ] + } + } + } + } + }, + "version": 1 + }, + "vpp_backup_reserve_percent": 0 + } +} diff --git a/tests/components/teslemetry/fixtures/site_info_week_crossing.json b/tests/components/teslemetry/fixtures/site_info_week_crossing.json new file mode 100644 index 00000000000000..f5bdc6a824d970 --- /dev/null +++ b/tests/components/teslemetry/fixtures/site_info_week_crossing.json @@ -0,0 +1,184 @@ +{ + "response": { + "id": "1233-abcd", + "site_name": "Site", + "backup_reserve_percent": 0, + "default_real_mode": "self_consumption", + "installation_date": "2022-01-01T00:00:00+00:00", + "user_settings": { + "go_off_grid_test_banner_enabled": false, + "storm_mode_enabled": true, + "powerwall_onboarding_settings_set": true, + "powerwall_tesla_electric_interested_in": false, + "vpp_tour_enabled": true, + "sync_grid_alert_enabled": true, + "breaker_alert_enabled": false + }, + "components": { + "solar": true, + "solar_type": "pv_panel", + "battery": true, + "grid": true, + "backup": true, + "gateway": "teg", + "load_meter": true, + "tou_capable": true, + "storm_mode_capable": true, + "flex_energy_request_capable": false, + "car_charging_data_supported": false, + "off_grid_vehicle_charging_reserve_supported": true, + "vehicle_charging_performance_view_enabled": false, + "vehicle_charging_solar_offset_view_enabled": false, + "battery_solar_offset_view_enabled": true, + "solar_value_enabled": true, + "energy_value_header": "Energy Value", + "energy_value_subheader": "Estimated Value", + "energy_service_self_scheduling_enabled": true, + "show_grid_import_battery_source_cards": true, + "set_islanding_mode_enabled": true, + "wifi_commissioning_enabled": true, + "backup_time_remaining_enabled": true, + "battery_type": "ac_powerwall", + "configurable": true, + "grid_services_enabled": false, + "gateways": [ + { + "device_id": "gateway-id", + "din": "gateway-din", + "serial_number": "CN00000000J50D", + "part_number": "1152100-14-J", + "part_type": 10, + "part_name": "Tesla Backup Gateway 2", + "is_active": true, + "site_id": "1234-abcd", + "firmware_version": "24.4.0 0fe780c9", + "updated_datetime": "2024-05-14T00:00:00.000Z" + } + ], + "batteries": [ + { + "device_id": "battery-1-id", + "din": "battery-1-din", + "serial_number": "TG000000001DA5", + "part_number": "3012170-10-B", + "part_type": 2, + "part_name": "Powerwall 2", + "nameplate_max_charge_power": 5000, + "nameplate_max_discharge_power": 5000, + "nameplate_energy": 13500 + } + ], + "wall_connectors": [], + "disallow_charge_from_grid_with_solar_installed": true, + "customer_preferred_export_rule": "pv_only", + "net_meter_mode": "battery_ok", + "system_alerts_enabled": true + }, + "version": "23.44.0 eb113390", + "battery_count": 1, + "tou_settings": { + "optimization_strategy": "economics", + "schedule": [] + }, + "nameplate_power": 5000, + "nameplate_energy": 13500, + "installation_time_zone": "", + "max_site_meter_power_ac": 1000000000, + "min_site_meter_power_ac": -1000000000, + "tariff_content_v2": { + "code": "Test", + "name": "Week Crossing Tariff", + "utility": "Test Utility", + "daily_charges": [ + { + "name": "Charge" + } + ], + "demand_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + } + }, + "energy_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + }, + "Summer": { + "rates": { + "WEEKEND": 0.15 + } + } + }, + "seasons": { + "Summer": { + "fromDay": 1, + "toDay": 31, + "fromMonth": 1, + "toMonth": 12, + "tou_periods": { + "WEEKEND": { + "periods": [ + { + "fromDayOfWeek": 4, + "toDayOfWeek": 0, + "fromHour": 0, + "toHour": 0 + } + ] + } + } + } + }, + "sell_tariff": { + "name": "Week Crossing Tariff Sell", + "utility": "Test Utility", + "daily_charges": [], + "demand_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + } + }, + "energy_charges": { + "ALL": { + "rates": { + "ALL": 0 + } + }, + "Summer": { + "rates": { + "WEEKEND": 0.05 + } + } + }, + "seasons": { + "Summer": { + "fromDay": 1, + "toDay": 31, + "fromMonth": 1, + "toMonth": 12, + "tou_periods": { + "WEEKEND": { + "periods": [ + { + "fromDayOfWeek": 4, + "toDayOfWeek": 0, + "fromHour": 0, + "toHour": 0 + } + ] + } + } + } + } + }, + "version": 1 + }, + "vpp_backup_reserve_percent": 0 + } +} diff --git a/tests/components/teslemetry/snapshots/test_calendar.ambr b/tests/components/teslemetry/snapshots/test_calendar.ambr new file mode 100644 index 00000000000000..28950ef38c9442 --- /dev/null +++ b/tests/components/teslemetry/snapshots/test_calendar.ambr @@ -0,0 +1,977 @@ +# serializer version: 1 +# name: test_calendar[calendar.energy_site_buy_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'calendar', + 'entity_category': None, + 'entity_id': 'calendar.energy_site_buy_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Buy tariff', + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Buy tariff', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'tariff_content_v2', + 'unique_id': '123456-tariff_content_v2', + 'unit_of_measurement': None, + }) +# --- +# name: test_calendar[calendar.energy_site_buy_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_day': False, + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end_time': '2024-01-01 16:00:00', + 'friendly_name': 'Energy Site Buy tariff', + 'location': '', + 'message': 'Off peak: 0.20/kWh', + 'start_time': '2023-12-31 21:00:00', + }), + 'context': , + 'entity_id': 'calendar.energy_site_buy_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_calendar[calendar.energy_site_sell_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'calendar', + 'entity_category': None, + 'entity_id': 'calendar.energy_site_sell_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Sell tariff', + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sell tariff', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'tariff_content_v2_sell_tariff', + 'unique_id': '123456-tariff_content_v2_sell_tariff', + 'unit_of_measurement': None, + }) +# --- +# name: test_calendar[calendar.energy_site_sell_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_day': False, + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end_time': '2024-01-01 16:00:00', + 'friendly_name': 'Energy Site Sell tariff', + 'location': '', + 'message': 'Off peak: 0.08/kWh', + 'start_time': '2023-12-31 21:00:00', + }), + 'context': , + 'entity_id': 'calendar.energy_site_sell_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_calendar_events[time_tuple0-calendar.energy_site_buy_tariff][event] + ReadOnlyDict({ + 'all_day': False, + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end_time': '2024-01-01 16:00:00', + 'friendly_name': 'Energy Site Buy tariff', + 'location': '', + 'message': 'Off peak: 0.20/kWh', + 'start_time': '2023-12-31 21:00:00', + }) +# --- +# name: test_calendar_events[time_tuple0-calendar.energy_site_buy_tariff][events] + dict({ + 'calendar.energy_site_buy_tariff': dict({ + 'events': list([ + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2023-12-31T21:00:00-08:00', + 'start': '2023-12-31T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-01T16:00:00-08:00', + 'start': '2023-12-31T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-01T21:00:00-08:00', + 'start': '2024-01-01T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-02T16:00:00-08:00', + 'start': '2024-01-01T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-02T21:00:00-08:00', + 'start': '2024-01-02T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-03T16:00:00-08:00', + 'start': '2024-01-02T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-03T21:00:00-08:00', + 'start': '2024-01-03T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-04T16:00:00-08:00', + 'start': '2024-01-03T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-04T21:00:00-08:00', + 'start': '2024-01-04T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-05T16:00:00-08:00', + 'start': '2024-01-04T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-05T21:00:00-08:00', + 'start': '2024-01-05T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-06T16:00:00-08:00', + 'start': '2024-01-05T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + ]), + }), + }) +# --- +# name: test_calendar_events[time_tuple0-calendar.energy_site_sell_tariff][event] + ReadOnlyDict({ + 'all_day': False, + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end_time': '2024-01-01 16:00:00', + 'friendly_name': 'Energy Site Sell tariff', + 'location': '', + 'message': 'Off peak: 0.08/kWh', + 'start_time': '2023-12-31 21:00:00', + }) +# --- +# name: test_calendar_events[time_tuple0-calendar.energy_site_sell_tariff][events] + dict({ + 'calendar.energy_site_sell_tariff': dict({ + 'events': list([ + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2023-12-31T21:00:00-08:00', + 'start': '2023-12-31T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-01T16:00:00-08:00', + 'start': '2023-12-31T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-01T21:00:00-08:00', + 'start': '2024-01-01T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-02T16:00:00-08:00', + 'start': '2024-01-01T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-02T21:00:00-08:00', + 'start': '2024-01-02T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-03T16:00:00-08:00', + 'start': '2024-01-02T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-03T21:00:00-08:00', + 'start': '2024-01-03T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-04T16:00:00-08:00', + 'start': '2024-01-03T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-04T21:00:00-08:00', + 'start': '2024-01-04T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-05T16:00:00-08:00', + 'start': '2024-01-04T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-05T21:00:00-08:00', + 'start': '2024-01-05T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-06T16:00:00-08:00', + 'start': '2024-01-05T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + ]), + }), + }) +# --- +# name: test_calendar_events[time_tuple1-calendar.energy_site_buy_tariff][event] + ReadOnlyDict({ + 'all_day': False, + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end_time': '2024-01-01 21:00:00', + 'friendly_name': 'Energy Site Buy tariff', + 'location': '', + 'message': 'On peak: 0.22/kWh', + 'start_time': '2024-01-01 16:00:00', + }) +# --- +# name: test_calendar_events[time_tuple1-calendar.energy_site_buy_tariff][events] + dict({ + 'calendar.energy_site_buy_tariff': dict({ + 'events': list([ + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2023-12-31T21:00:00-08:00', + 'start': '2023-12-31T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-01T16:00:00-08:00', + 'start': '2023-12-31T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-01T21:00:00-08:00', + 'start': '2024-01-01T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-02T16:00:00-08:00', + 'start': '2024-01-01T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-02T21:00:00-08:00', + 'start': '2024-01-02T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-03T16:00:00-08:00', + 'start': '2024-01-02T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-03T21:00:00-08:00', + 'start': '2024-01-03T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-04T16:00:00-08:00', + 'start': '2024-01-03T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-04T21:00:00-08:00', + 'start': '2024-01-04T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-05T16:00:00-08:00', + 'start': '2024-01-04T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-05T21:00:00-08:00', + 'start': '2024-01-05T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-06T16:00:00-08:00', + 'start': '2024-01-05T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + ]), + }), + }) +# --- +# name: test_calendar_events[time_tuple1-calendar.energy_site_sell_tariff][event] + ReadOnlyDict({ + 'all_day': False, + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end_time': '2024-01-01 21:00:00', + 'friendly_name': 'Energy Site Sell tariff', + 'location': '', + 'message': 'On peak: 0.16/kWh', + 'start_time': '2024-01-01 16:00:00', + }) +# --- +# name: test_calendar_events[time_tuple1-calendar.energy_site_sell_tariff][events] + dict({ + 'calendar.energy_site_sell_tariff': dict({ + 'events': list([ + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2023-12-31T21:00:00-08:00', + 'start': '2023-12-31T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-01T16:00:00-08:00', + 'start': '2023-12-31T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-01T21:00:00-08:00', + 'start': '2024-01-01T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-02T16:00:00-08:00', + 'start': '2024-01-01T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-02T21:00:00-08:00', + 'start': '2024-01-02T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-03T16:00:00-08:00', + 'start': '2024-01-02T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-03T21:00:00-08:00', + 'start': '2024-01-03T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-04T16:00:00-08:00', + 'start': '2024-01-03T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-04T21:00:00-08:00', + 'start': '2024-01-04T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-05T16:00:00-08:00', + 'start': '2024-01-04T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-05T21:00:00-08:00', + 'start': '2024-01-05T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-06T16:00:00-08:00', + 'start': '2024-01-05T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + ]), + }), + }) +# --- +# name: test_calendar_events[time_tuple2-calendar.energy_site_buy_tariff][event] + ReadOnlyDict({ + 'all_day': False, + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end_time': '2024-01-02 16:00:00', + 'friendly_name': 'Energy Site Buy tariff', + 'location': '', + 'message': 'Off peak: 0.20/kWh', + 'start_time': '2024-01-01 21:00:00', + }) +# --- +# name: test_calendar_events[time_tuple2-calendar.energy_site_buy_tariff][events] + dict({ + 'calendar.energy_site_buy_tariff': dict({ + 'events': list([ + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2023-12-31T21:00:00-08:00', + 'start': '2023-12-31T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-01T16:00:00-08:00', + 'start': '2023-12-31T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-01T21:00:00-08:00', + 'start': '2024-01-01T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-02T16:00:00-08:00', + 'start': '2024-01-01T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-02T21:00:00-08:00', + 'start': '2024-01-02T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-03T16:00:00-08:00', + 'start': '2024-01-02T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-03T21:00:00-08:00', + 'start': '2024-01-03T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-04T16:00:00-08:00', + 'start': '2024-01-03T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-04T21:00:00-08:00', + 'start': '2024-01-04T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-05T16:00:00-08:00', + 'start': '2024-01-04T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.22/kWh + ''', + 'end': '2024-01-05T21:00:00-08:00', + 'start': '2024-01-05T16:00:00-08:00', + 'summary': 'On peak: 0.22/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.20/kWh + ''', + 'end': '2024-01-06T16:00:00-08:00', + 'start': '2024-01-05T21:00:00-08:00', + 'summary': 'Off peak: 0.20/kWh', + }), + ]), + }), + }) +# --- +# name: test_calendar_events[time_tuple2-calendar.energy_site_sell_tariff][event] + ReadOnlyDict({ + 'all_day': False, + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end_time': '2024-01-02 16:00:00', + 'friendly_name': 'Energy Site Sell tariff', + 'location': '', + 'message': 'Off peak: 0.08/kWh', + 'start_time': '2024-01-01 21:00:00', + }) +# --- +# name: test_calendar_events[time_tuple2-calendar.energy_site_sell_tariff][events] + dict({ + 'calendar.energy_site_sell_tariff': dict({ + 'events': list([ + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2023-12-31T21:00:00-08:00', + 'start': '2023-12-31T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-01T16:00:00-08:00', + 'start': '2023-12-31T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-01T21:00:00-08:00', + 'start': '2024-01-01T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-02T16:00:00-08:00', + 'start': '2024-01-01T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-02T21:00:00-08:00', + 'start': '2024-01-02T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-03T16:00:00-08:00', + 'start': '2024-01-02T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-03T21:00:00-08:00', + 'start': '2024-01-03T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-04T16:00:00-08:00', + 'start': '2024-01-03T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-04T21:00:00-08:00', + 'start': '2024-01-04T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-05T16:00:00-08:00', + 'start': '2024-01-04T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: On peak + Price: 0.16/kWh + ''', + 'end': '2024-01-05T21:00:00-08:00', + 'start': '2024-01-05T16:00:00-08:00', + 'summary': 'On peak: 0.16/kWh', + }), + dict({ + 'description': ''' + Season: Summer + Period: Off peak + Price: 0.08/kWh + ''', + 'end': '2024-01-06T16:00:00-08:00', + 'start': '2024-01-05T21:00:00-08:00', + 'summary': 'Off peak: 0.08/kWh', + }), + ]), + }), + }) +# --- diff --git a/tests/components/teslemetry/snapshots/test_diagnostics.ambr b/tests/components/teslemetry/snapshots/test_diagnostics.ambr index 6b02b2f6d83c8a..50e849e27b7f51 100644 --- a/tests/components/teslemetry/snapshots/test_diagnostics.ambr +++ b/tests/components/teslemetry/snapshots/test_diagnostics.ambr @@ -120,6 +120,136 @@ 'nameplate_energy': 40500, 'nameplate_power': 15000, 'site_name': 'Site', + 'tariff_content_v2_code': 'Test', + 'tariff_content_v2_daily_charges': list([ + dict({ + 'name': 'Charge', + }), + ]), + 'tariff_content_v2_demand_charges': dict({ + 'ALL': dict({ + 'rates': dict({ + 'ALL': 0, + }), + }), + 'Summer': dict({ + }), + 'Winter': dict({ + }), + }), + 'tariff_content_v2_energy_charges': dict({ + 'ALL': dict({ + 'rates': dict({ + 'ALL': 0, + }), + }), + 'Summer': dict({ + 'rates': dict({ + 'OFF_PEAK': 0.198, + 'ON_PEAK': 0.22, + }), + }), + 'Winter': dict({ + }), + }), + 'tariff_content_v2_name': 'Battery Maximiser', + 'tariff_content_v2_seasons': dict({ + 'Summer': dict({ + 'fromDay': 1, + 'fromMonth': 1, + 'toDay': 31, + 'toMonth': 12, + 'tou_periods': dict({ + 'OFF_PEAK': dict({ + 'periods': list([ + dict({ + 'fromDayOfWeek': 0, + 'fromHour': 21, + 'toDayOfWeek': 6, + 'toHour': 16, + }), + ]), + }), + 'ON_PEAK': dict({ + 'periods': list([ + dict({ + 'fromDayOfWeek': 0, + 'fromHour': 16, + 'toDayOfWeek': 6, + 'toHour': 21, + }), + ]), + }), + }), + }), + 'Winter': dict({ + }), + }), + 'tariff_content_v2_sell_tariff_daily_charges': list([ + dict({ + 'name': 'Charge', + }), + ]), + 'tariff_content_v2_sell_tariff_demand_charges': dict({ + 'ALL': dict({ + 'rates': dict({ + 'ALL': 0, + }), + }), + 'Summer': dict({ + }), + 'Winter': dict({ + }), + }), + 'tariff_content_v2_sell_tariff_energy_charges': dict({ + 'ALL': dict({ + 'rates': dict({ + 'ALL': 0, + }), + }), + 'Summer': dict({ + 'rates': dict({ + 'OFF_PEAK': 0.08, + 'ON_PEAK': 0.16, + }), + }), + 'Winter': dict({ + }), + }), + 'tariff_content_v2_sell_tariff_name': 'Battery Maximiser', + 'tariff_content_v2_sell_tariff_seasons': dict({ + 'Summer': dict({ + 'fromDay': 1, + 'fromMonth': 1, + 'toDay': 31, + 'toMonth': 12, + 'tou_periods': dict({ + 'OFF_PEAK': dict({ + 'periods': list([ + dict({ + 'fromHour': 21, + 'toDayOfWeek': 6, + 'toHour': 16, + }), + ]), + }), + 'ON_PEAK': dict({ + 'periods': list([ + dict({ + 'fromHour': 16, + 'toDayOfWeek': 6, + 'toHour': 21, + }), + ]), + }), + }), + }), + 'Winter': dict({ + }), + }), + 'tariff_content_v2_sell_tariff_utility': 'Origin', + 'tariff_content_v2_utility': 'Origin', + 'tariff_content_v2_version': 1, 'tou_settings_optimization_strategy': 'economics', 'tou_settings_schedule': list([ dict({ diff --git a/tests/components/teslemetry/test_calendar.py b/tests/components/teslemetry/test_calendar.py new file mode 100644 index 00000000000000..c0469c1b8c9129 --- /dev/null +++ b/tests/components/teslemetry/test_calendar.py @@ -0,0 +1,401 @@ +"""Test the Teslemetry calendar platform.""" + +from collections.abc import Generator +from copy import deepcopy +from datetime import datetime +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.calendar import ( + DOMAIN as CALENDAR_DOMAIN, + EVENT_END_DATETIME, + EVENT_START_DATETIME, + SERVICE_GET_EVENTS, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.util import dt as dt_util + +from . import assert_entities, setup_platform +from .const import SITE_INFO, SITE_INFO_MULTI_SEASON, SITE_INFO_WEEK_CROSSING + +ENTITY_BUY = "calendar.energy_site_buy_tariff" +ENTITY_SELL = "calendar.energy_site_sell_tariff" + + +@pytest.fixture +def mock_site_info_week_crossing(mock_site_info) -> Generator[AsyncMock]: + """Mock Teslemetry Energy site_info with week-crossing tariff data.""" + with patch( + "tesla_fleet_api.tesla.energysite.EnergySite.site_info", + side_effect=lambda: deepcopy(SITE_INFO_WEEK_CROSSING), + ) as mock: + yield mock + + +@pytest.fixture +def mock_site_info_multi_season(mock_site_info) -> Generator[AsyncMock]: + """Mock Teslemetry Energy site_info with multi-season tariff data.""" + with patch( + "tesla_fleet_api.tesla.energysite.EnergySite.site_info", + side_effect=lambda: deepcopy(SITE_INFO_MULTI_SEASON), + ) as mock: + yield mock + + +@pytest.fixture +def mock_site_info_no_tariff(mock_site_info) -> Generator[AsyncMock]: + """Mock Teslemetry Energy site_info with no tariff data.""" + site_info_no_tariff = deepcopy(SITE_INFO_WEEK_CROSSING) + site_info_no_tariff["response"]["tariff_content_v2"]["seasons"] = {} + site_info_no_tariff["response"]["tariff_content_v2"]["sell_tariff"]["seasons"] = {} + with patch( + "tesla_fleet_api.tesla.energysite.EnergySite.site_info", + side_effect=lambda: deepcopy(site_info_no_tariff), + ) as mock: + yield mock + + +@pytest.fixture +def mock_site_info_invalid_season(mock_site_info) -> Generator[AsyncMock]: + """Mock site_info with invalid/empty season data.""" + site_info = deepcopy(SITE_INFO) + # Empty season first (hits _get_current_season empty check), + # then season with missing keys (hits KeyError exception handler) + site_info["response"]["tariff_content_v2"]["seasons"] = { + "Empty": {}, + "Invalid": {"someKey": "value"}, + } + site_info["response"]["tariff_content_v2"]["sell_tariff"]["seasons"] = {} + with patch( + "tesla_fleet_api.tesla.energysite.EnergySite.site_info", + side_effect=lambda: deepcopy(site_info), + ) as mock: + yield mock + + +@pytest.fixture +def mock_site_info_invalid_price(mock_site_info) -> Generator[AsyncMock]: + """Mock site_info with non-numeric price data.""" + site_info = deepcopy(SITE_INFO) + site_info["response"]["tariff_content_v2"]["energy_charges"]["Summer"]["rates"] = { + "OFF_PEAK": "not_a_number", + "ON_PEAK": "not_a_number", + } + site_info["response"]["tariff_content_v2"]["sell_tariff"]["seasons"] = {} + with patch( + "tesla_fleet_api.tesla.energysite.EnergySite.site_info", + side_effect=lambda: deepcopy(site_info), + ) as mock: + yield mock + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_calendar( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + mock_legacy: AsyncMock, +) -> None: + """Tests that the calendar entity is correct.""" + tz = dt_util.get_default_time_zone() + freezer.move_to(datetime(2024, 1, 1, 10, 0, 0, tzinfo=tz)) + + entry = await setup_platform(hass, [Platform.CALENDAR]) + + assert_entities(hass, entry.entry_id, entity_registry, snapshot) + + +@pytest.mark.parametrize( + "entity_id", + [ENTITY_BUY, ENTITY_SELL], +) +@pytest.mark.parametrize( + "time_tuple", + [ + (2024, 1, 1, 10, 0, 0), # OFF_PEAK period started yesterday + (2024, 1, 1, 20, 0, 0), # ON_PEAK period starts and ends today + (2024, 1, 1, 22, 0, 0), # OFF_PEAK period ends tomorrow + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_calendar_events( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + mock_legacy: AsyncMock, + entity_id: str, + time_tuple: tuple, +) -> None: + """Tests that the energy tariff calendar entity events are correct.""" + tz = dt_util.get_default_time_zone() + freezer.move_to(datetime(*time_tuple, tzinfo=tz)) + + await setup_platform(hass, [Platform.CALENDAR]) + + state = hass.states.get(entity_id) + assert state + assert state.attributes == snapshot(name="event") + + result = await hass.services.async_call( + CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, + { + ATTR_ENTITY_ID: [entity_id], + EVENT_START_DATETIME: dt_util.parse_datetime("2024-01-01T00:00:00Z"), + EVENT_END_DATETIME: dt_util.parse_datetime("2024-01-07T00:00:00Z"), + }, + blocking=True, + return_response=True, + ) + assert result == snapshot(name="events") + + +@pytest.mark.parametrize( + ("time_tuple", "expected_state", "expected_period"), + [ + # Friday (day 4) - WEEKEND period active (Fri-Mon crossing) + ((2024, 1, 5, 12, 0, 0), "on", "Weekend"), + # Saturday (day 5) - WEEKEND period active + ((2024, 1, 6, 12, 0, 0), "on", "Weekend"), + # Sunday (day 6) - WEEKEND period active + ((2024, 1, 7, 12, 0, 0), "on", "Weekend"), + # Monday (day 0) - WEEKEND period active (end of Fri-Mon range) + ((2024, 1, 8, 12, 0, 0), "on", "Weekend"), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_calendar_week_crossing( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_legacy: AsyncMock, + mock_site_info_week_crossing: AsyncMock, + time_tuple: tuple, + expected_state: str, + expected_period: str, +) -> None: + """Test calendar handles week-crossing day ranges correctly.""" + tz = dt_util.get_default_time_zone() + time = datetime(*time_tuple, tzinfo=tz) + freezer.move_to(time) + + await setup_platform(hass, [Platform.CALENDAR]) + + state = hass.states.get(ENTITY_BUY) + assert state + assert state.state == expected_state + assert expected_period in state.attributes["message"] + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_calendar_week_crossing_excluded_day( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_legacy: AsyncMock, + mock_site_info_week_crossing: AsyncMock, +) -> None: + """Test calendar excludes days outside week-crossing range.""" + tz = dt_util.get_default_time_zone() + # Wednesday (day 2) - No period active (outside Fri-Mon range) + freezer.move_to(datetime(2024, 1, 3, 12, 0, 0, tzinfo=tz)) + + await setup_platform(hass, [Platform.CALENDAR]) + + state = hass.states.get(ENTITY_BUY) + assert state + assert state.state == "off" + + +@pytest.mark.parametrize( + ("time_tuple", "expected_season", "expected_buy_price"), + [ + # June 15 at noon - Summer OFF_PEAK (Apr-Sep) + ((2024, 6, 15, 12, 0, 0), "Summer", "0.20"), + # July 1 at 18:00 - Summer PEAK + ((2024, 7, 1, 18, 0, 0), "Summer", "0.35"), + # December 15 at noon - Winter OFF_PEAK (Oct-Mar, crosses year boundary) + ((2024, 12, 15, 12, 0, 0), "Winter", "0.12"), + # January 15 at noon - Winter OFF_PEAK (crosses year boundary) + ((2024, 1, 15, 12, 0, 0), "Winter", "0.12"), + # February 28 at 18:00 - Winter PEAK + ((2024, 2, 28, 18, 0, 0), "Winter", "0.25"), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_calendar_multi_season( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_legacy: AsyncMock, + mock_site_info_multi_season: AsyncMock, + time_tuple: tuple, + expected_season: str, + expected_buy_price: str, +) -> None: + """Test calendar handles multiple seasons and year boundaries correctly.""" + tz = dt_util.get_default_time_zone() + time = datetime(*time_tuple, tzinfo=tz) + freezer.move_to(time) + + await setup_platform(hass, [Platform.CALENDAR]) + + state = hass.states.get(ENTITY_BUY) + assert state + assert state.state == "on" + assert expected_season in state.attributes["description"] + assert expected_buy_price in state.attributes["message"] + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_calendar_no_tariff_data( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_legacy: AsyncMock, + mock_site_info_no_tariff: AsyncMock, +) -> None: + """Test calendar entity is not created when tariff data is missing.""" + tz = dt_util.get_default_time_zone() + freezer.move_to(datetime(2024, 1, 1, 10, 0, 0, tzinfo=tz)) + + await setup_platform(hass, [Platform.CALENDAR]) + + state = hass.states.get(ENTITY_BUY) + assert state is None + state = hass.states.get(ENTITY_SELL) + assert state is None + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_calendar_invalid_season_data( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_legacy: AsyncMock, + mock_site_info_invalid_season: AsyncMock, +) -> None: + """Test calendar handles invalid/empty season data gracefully.""" + tz = dt_util.get_default_time_zone() + freezer.move_to(datetime(2024, 6, 15, 12, 0, 0, tzinfo=tz)) + + await setup_platform(hass, [Platform.CALENDAR]) + + # No valid season found -> event returns None -> state is "off" + state = hass.states.get(ENTITY_BUY) + assert state + assert state.state == "off" + + # async_get_events also returns empty when no valid seasons + result = await hass.services.async_call( + CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, + { + ATTR_ENTITY_ID: [ENTITY_BUY], + EVENT_START_DATETIME: dt_util.parse_datetime("2024-06-15T00:00:00Z"), + EVENT_END_DATETIME: dt_util.parse_datetime("2024-06-17T00:00:00Z"), + }, + blocking=True, + return_response=True, + ) + assert result[ENTITY_BUY]["events"] == [] + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_calendar_week_crossing_get_events( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_legacy: AsyncMock, + mock_site_info_week_crossing: AsyncMock, +) -> None: + """Test async_get_events filters by day of week with week-crossing periods.""" + tz = dt_util.get_default_time_zone() + freezer.move_to(datetime(2024, 1, 1, 12, 0, 0, tzinfo=tz)) + + await setup_platform(hass, [Platform.CALENDAR]) + + # Request events for a full week - only Fri-Mon should have events + result = await hass.services.async_call( + CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, + { + ATTR_ENTITY_ID: [ENTITY_BUY], + EVENT_START_DATETIME: dt_util.parse_datetime("2024-01-01T00:00:00Z"), + EVENT_END_DATETIME: dt_util.parse_datetime("2024-01-08T00:00:00Z"), + }, + blocking=True, + return_response=True, + ) + events = result[ENTITY_BUY]["events"] + # 5 events: Sun Dec 31, Mon Jan 1, Fri Jan 5, Sat Jan 6, Sun Jan 7 + # (Dec 31 included due to UTC-to-local shift) - no Tue/Wed/Thu + assert len(events) == 5 + for event in events: + start = dt_util.parse_datetime(event["start"]) + assert start is not None + assert start.weekday() in (0, 4, 5, 6) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_calendar_midnight_crossing_local_start( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_legacy: AsyncMock, +) -> None: + """Test async_get_events includes overnight period when query starts at local midnight.""" + tz = dt_util.get_default_time_zone() + freezer.move_to(datetime(2024, 1, 1, 10, 0, 0, tzinfo=tz)) + + await setup_platform(hass, [Platform.CALENDAR]) + + # Use local-timezone timestamps so UTC-to-local shift does not + # accidentally push the start back to the previous day. + start = datetime(2024, 1, 1, 0, 0, 0, tzinfo=tz) + end = datetime(2024, 1, 2, 0, 0, 0, tzinfo=tz) + + result = await hass.services.async_call( + CALENDAR_DOMAIN, + SERVICE_GET_EVENTS, + { + ATTR_ENTITY_ID: [ENTITY_BUY], + EVENT_START_DATETIME: start, + EVENT_END_DATETIME: end, + }, + blocking=True, + return_response=True, + ) + events = result[ENTITY_BUY]["events"] + + # Expect 2 events on Jan 1: + # 1) OFF_PEAK that started Dec 31 21:00 and ends Jan 1 16:00 + # 2) ON_PEAK from Jan 1 16:00 to Jan 1 21:00 + # The OFF_PEAK starting Jan 1 21:00 (ending Jan 2 16:00) also overlaps, + # so 3 events total. + assert len(events) == 3 + + starts = [dt_util.parse_datetime(e["start"]) for e in events] + assert starts[0].day == 31 # Dec 31 21:00 (previous evening) + assert starts[1].day == 1 # Jan 1 16:00 + assert starts[2].day == 1 # Jan 1 21:00 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_calendar_invalid_price( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_legacy: AsyncMock, + mock_site_info_invalid_price: AsyncMock, +) -> None: + """Test calendar handles non-numeric price data gracefully.""" + tz = dt_util.get_default_time_zone() + freezer.move_to(datetime(2024, 1, 1, 10, 0, 0, tzinfo=tz)) + + await setup_platform(hass, [Platform.CALENDAR]) + + # Period matches but price is invalid -> shows "Unknown Price" + state = hass.states.get(ENTITY_BUY) + assert state + assert state.state == "on" + assert "Unknown Price" in state.attributes["message"] diff --git a/tests/components/waze_travel_time/test_config_flow.py b/tests/components/waze_travel_time/test_config_flow.py index cdefe1bd175ee0..3e7702f11ed273 100644 --- a/tests/components/waze_travel_time/test_config_flow.py +++ b/tests/components/waze_travel_time/test_config_flow.py @@ -13,6 +13,7 @@ CONF_INCL_FILTER, CONF_ORIGIN, CONF_REALTIME, + CONF_TIME_DELTA, CONF_UNITS, CONF_VEHICLE_TYPE, DEFAULT_NAME, @@ -120,6 +121,7 @@ async def test_options(hass: HomeAssistant) -> None: CONF_EXCL_FILTER: ["ExcludeThis"], CONF_INCL_FILTER: ["IncludeThis"], CONF_REALTIME: False, + CONF_TIME_DELTA: {"hours": 1, "minutes": 30}, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", }, @@ -133,6 +135,7 @@ async def test_options(hass: HomeAssistant) -> None: CONF_EXCL_FILTER: ["ExcludeThis"], CONF_INCL_FILTER: ["IncludeThis"], CONF_REALTIME: False, + CONF_TIME_DELTA: {"hours": 1, "minutes": 30}, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", } @@ -144,6 +147,7 @@ async def test_options(hass: HomeAssistant) -> None: CONF_EXCL_FILTER: ["ExcludeThis"], CONF_INCL_FILTER: ["IncludeThis"], CONF_REALTIME: False, + CONF_TIME_DELTA: {"hours": 1, "minutes": 30}, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", } @@ -243,6 +247,7 @@ async def test_reset_filters(hass: HomeAssistant) -> None: CONF_EXCL_FILTER: [""], CONF_INCL_FILTER: [""], CONF_REALTIME: False, + CONF_TIME_DELTA: {"minutes": 0}, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", } diff --git a/tests/components/waze_travel_time/test_init.py b/tests/components/waze_travel_time/test_init.py index dae11d58409e0a..2a20b46f476f28 100644 --- a/tests/components/waze_travel_time/test_init.py +++ b/tests/components/waze_travel_time/test_init.py @@ -9,6 +9,7 @@ CONF_EXCL_FILTER, CONF_INCL_FILTER, CONF_REALTIME, + CONF_TIME_DELTA, CONF_UNITS, CONF_VEHICLE_TYPE, DEFAULT_AVOID_FERRIES, @@ -17,6 +18,7 @@ DEFAULT_FILTER, DEFAULT_OPTIONS, DEFAULT_REALTIME, + DEFAULT_TIME_DELTA, DEFAULT_VEHICLE_TYPE, DOMAIN, METRIC_UNITS, @@ -33,8 +35,20 @@ ("data", "options"), [(MOCK_CONFIG, DEFAULT_OPTIONS)], ) +@pytest.mark.parametrize( + ("time_delta", "expected_time_delta"), + [ + pytest.param({"hours": 1, "minutes": 30}, 90, id="positive"), + pytest.param({"hours": -1, "minutes": -30}, -90, id="negative"), + ], +) @pytest.mark.usefixtures("mock_update", "mock_config") -async def test_service_get_travel_times(hass: HomeAssistant) -> None: +async def test_service_get_travel_times( + hass: HomeAssistant, + mock_update, + time_delta: dict[str, int], + expected_time_delta: int, +) -> None: """Test service get_travel_times.""" response_data = await hass.services.async_call( "waze_travel_time", @@ -46,6 +60,7 @@ async def test_service_get_travel_times(hass: HomeAssistant) -> None: "region": "us", "units": "imperial", "incl_filter": ["IncludeThis"], + "time_delta": time_delta, }, blocking=True, return_response=True, @@ -60,6 +75,7 @@ async def test_service_get_travel_times(hass: HomeAssistant) -> None: }, ] } + assert mock_update.call_args_list[-1].kwargs["time_delta"] == expected_time_delta @pytest.mark.parametrize( @@ -91,7 +107,7 @@ async def test_service_get_travel_times_empty_response( @pytest.mark.usefixtures("mock_update") async def test_migrate_entry_v1_v2(hass: HomeAssistant) -> None: - """Test successful migration of entry data.""" + """Test successful migration of entry data from v1 to v2.2.""" mock_entry = MockConfigEntry( domain=DOMAIN, version=1, @@ -114,8 +130,10 @@ async def test_migrate_entry_v1_v2(hass: HomeAssistant) -> None: assert updated_entry.state is ConfigEntryState.LOADED assert updated_entry.version == 2 + assert updated_entry.minor_version == 2 assert updated_entry.options[CONF_INCL_FILTER] == DEFAULT_FILTER assert updated_entry.options[CONF_EXCL_FILTER] == DEFAULT_FILTER + assert updated_entry.options[CONF_TIME_DELTA] == DEFAULT_TIME_DELTA mock_entry = MockConfigEntry( domain=DOMAIN, @@ -141,5 +159,39 @@ async def test_migrate_entry_v1_v2(hass: HomeAssistant) -> None: assert updated_entry.state is ConfigEntryState.LOADED assert updated_entry.version == 2 + assert updated_entry.minor_version == 2 assert updated_entry.options[CONF_INCL_FILTER] == ["IncludeThis"] assert updated_entry.options[CONF_EXCL_FILTER] == ["ExcludeThis"] + assert updated_entry.options[CONF_TIME_DELTA] == DEFAULT_TIME_DELTA + + +@pytest.mark.usefixtures("mock_update") +async def test_migrate_entry_v2_1_to_v2_2(hass: HomeAssistant) -> None: + """Test successful migration of entry from version 2.1 to 2.2.""" + mock_entry = MockConfigEntry( + domain=DOMAIN, + version=2, + minor_version=1, + data=MOCK_CONFIG, + options={ + CONF_REALTIME: DEFAULT_REALTIME, + CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, + CONF_UNITS: METRIC_UNITS, + CONF_AVOID_FERRIES: DEFAULT_AVOID_FERRIES, + CONF_AVOID_SUBSCRIPTION_ROADS: DEFAULT_AVOID_SUBSCRIPTION_ROADS, + CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, + CONF_INCL_FILTER: DEFAULT_FILTER, + CONF_EXCL_FILTER: DEFAULT_FILTER, + }, + ) + + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + updated_entry = hass.config_entries.async_get_entry(mock_entry.entry_id) + + assert updated_entry.state is ConfigEntryState.LOADED + assert updated_entry.version == 2 + assert updated_entry.minor_version == 2 + assert updated_entry.options[CONF_TIME_DELTA] == DEFAULT_TIME_DELTA diff --git a/tests/components/waze_travel_time/test_sensor.py b/tests/components/waze_travel_time/test_sensor.py index 0aa99196c48d59..d7b4fc564f2eef 100644 --- a/tests/components/waze_travel_time/test_sensor.py +++ b/tests/components/waze_travel_time/test_sensor.py @@ -11,6 +11,7 @@ CONF_EXCL_FILTER, CONF_INCL_FILTER, CONF_REALTIME, + CONF_TIME_DELTA, CONF_UNITS, CONF_VEHICLE_TYPE, DEFAULT_OPTIONS, @@ -78,6 +79,7 @@ async def test_sensor(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_INCL_FILTER: [""], CONF_EXCL_FILTER: [""], + CONF_TIME_DELTA: {"minutes": 0}, }, ) ], @@ -104,6 +106,7 @@ async def test_imperial(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_INCL_FILTER: ["IncludeThis"], CONF_EXCL_FILTER: [""], + CONF_TIME_DELTA: {"minutes": 0}, }, ) ], @@ -128,6 +131,7 @@ async def test_incl_filter(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_INCL_FILTER: [""], CONF_EXCL_FILTER: ["ExcludeThis"], + CONF_TIME_DELTA: {"minutes": 0}, }, ) ],