Refactor ZHA Entity availability tracking (#36645)
* Refactor ZHA entity availability ZHA entity availability tracks on underlying ZHA device availability. * Update device status without signal. * Update tests. * Fix tests. * Tests for restored devices availability. * Guard against empty last_seen entry Refactor device loading a bit.
This commit is contained in:
parent
0146f35687
commit
21acdbbbfd
16 changed files with 161 additions and 65 deletions
|
@ -66,8 +66,8 @@ from .const import (
|
|||
from .helpers import LogMixin
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_CONSIDER_UNAVAILABLE_MAINS = 60 * 60 * 2 # 2 hours
|
||||
_CONSIDER_UNAVAILABLE_BATTERY = 60 * 60 * 6 # 6 hours
|
||||
CONSIDER_UNAVAILABLE_MAINS = 60 * 60 * 2 # 2 hours
|
||||
CONSIDER_UNAVAILABLE_BATTERY = 60 * 60 * 6 # 6 hours
|
||||
_UPDATE_ALIVE_INTERVAL = (60, 90)
|
||||
_CHECKIN_GRACE_PERIODS = 2
|
||||
|
||||
|
@ -96,11 +96,6 @@ class ZHADevice(LogMixin):
|
|||
self._available_signal = f"{self.name}_{self.ieee}_{SIGNAL_AVAILABLE}"
|
||||
self._checkins_missed_count = 0
|
||||
self.unsubs = []
|
||||
self.unsubs.append(
|
||||
async_dispatcher_connect(
|
||||
self.hass, self._available_signal, self.async_initialize
|
||||
)
|
||||
)
|
||||
self.quirk_applied = isinstance(self._zigpy_device, zigpy.quirks.CustomDevice)
|
||||
self.quirk_class = (
|
||||
f"{self._zigpy_device.__class__.__module__}."
|
||||
|
@ -108,9 +103,9 @@ class ZHADevice(LogMixin):
|
|||
)
|
||||
|
||||
if self.is_mains_powered:
|
||||
self._consider_unavailable_time = _CONSIDER_UNAVAILABLE_MAINS
|
||||
self._consider_unavailable_time = CONSIDER_UNAVAILABLE_MAINS
|
||||
else:
|
||||
self._consider_unavailable_time = _CONSIDER_UNAVAILABLE_BATTERY
|
||||
self._consider_unavailable_time = CONSIDER_UNAVAILABLE_BATTERY
|
||||
keep_alive_interval = random.randint(*_UPDATE_ALIVE_INTERVAL)
|
||||
self.unsubs.append(
|
||||
async_track_time_interval(
|
||||
|
@ -343,13 +338,20 @@ class ZHADevice(LogMixin):
|
|||
if res is not None:
|
||||
self._checkins_missed_count = 0
|
||||
|
||||
def update_available(self, available):
|
||||
"""Set sensor availability."""
|
||||
if self._available != available and available:
|
||||
# Update the state the first time the device comes online
|
||||
async_dispatcher_send(self.hass, self._available_signal, False)
|
||||
async_dispatcher_send(self.hass, f"{self._available_signal}_entity", available)
|
||||
self._available = available
|
||||
def update_available(self, available: bool) -> None:
|
||||
"""Update device availability and signal entities."""
|
||||
availability_changed = self.available ^ available
|
||||
self.available = available
|
||||
if availability_changed and available:
|
||||
# reinit channels then signal entities
|
||||
self.hass.async_create_task(self._async_became_available())
|
||||
return
|
||||
async_dispatcher_send(self.hass, f"{self._available_signal}_entity")
|
||||
|
||||
async def _async_became_available(self) -> None:
|
||||
"""Update device availability and signal entities."""
|
||||
await self.async_initialize(False)
|
||||
async_dispatcher_send(self.hass, f"{self._available_signal}_entity")
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import asyncio
|
||||
import collections
|
||||
from datetime import timedelta
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
|
@ -74,7 +75,12 @@ from .const import (
|
|||
ZHA_GW_MSG_RAW_INIT,
|
||||
RadioType,
|
||||
)
|
||||
from .device import DeviceStatus, ZHADevice
|
||||
from .device import (
|
||||
CONSIDER_UNAVAILABLE_BATTERY,
|
||||
CONSIDER_UNAVAILABLE_MAINS,
|
||||
DeviceStatus,
|
||||
ZHADevice,
|
||||
)
|
||||
from .group import GroupMember, ZHAGroup
|
||||
from .patches import apply_application_controller_patch
|
||||
from .registries import GROUP_ENTITY_DOMAINS
|
||||
|
@ -161,11 +167,26 @@ class ZHAGateway:
|
|||
@callback
|
||||
def async_load_devices(self) -> None:
|
||||
"""Restore ZHA devices from zigpy application state."""
|
||||
zigpy_devices = self.application_controller.devices.values()
|
||||
for zigpy_device in zigpy_devices:
|
||||
for zigpy_device in self.application_controller.devices.values():
|
||||
zha_device = self._async_get_or_create_device(zigpy_device, restored=True)
|
||||
if zha_device.nwk == 0x0000:
|
||||
self.coordinator_zha_device = zha_device
|
||||
zha_dev_entry = self.zha_storage.devices.get(str(zigpy_device.ieee))
|
||||
delta_msg = "not known"
|
||||
if zha_dev_entry and zha_dev_entry.last_seen is not None:
|
||||
delta = round(time.time() - zha_dev_entry.last_seen)
|
||||
if zha_device.is_mains_powered:
|
||||
zha_device.available = delta < CONSIDER_UNAVAILABLE_MAINS
|
||||
else:
|
||||
zha_device.available = delta < CONSIDER_UNAVAILABLE_BATTERY
|
||||
delta_msg = f"{str(timedelta(seconds=delta))} ago"
|
||||
_LOGGER.debug(
|
||||
"[%s](%s) restored as '%s', last seen: %s",
|
||||
zha_device.nwk,
|
||||
zha_device.name,
|
||||
"available" if zha_device.available else "unavailable",
|
||||
delta_msg,
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_load_groups(self) -> None:
|
||||
|
@ -497,8 +518,6 @@ class ZHAGateway:
|
|||
# avoid a race condition during new joins
|
||||
if device.status is DeviceStatus.INITIALIZED:
|
||||
device.update_available(available)
|
||||
else:
|
||||
device.available = available
|
||||
|
||||
async def async_update_device_storage(self):
|
||||
"""Update the devices in the store."""
|
||||
|
@ -547,9 +566,9 @@ class ZHAGateway:
|
|||
)
|
||||
|
||||
async def _async_device_joined(self, zha_device: zha_typing.ZhaDeviceType) -> None:
|
||||
zha_device.available = True
|
||||
await zha_device.async_configure()
|
||||
# will cause async_init to fire so don't explicitly call it
|
||||
zha_device.update_available(True)
|
||||
await zha_device.async_initialize(from_cache=False)
|
||||
async_dispatcher_send(self._hass, SIGNAL_ADD_ENTITIES)
|
||||
|
||||
async def _async_device_rejoined(self, zha_device):
|
||||
|
@ -560,7 +579,8 @@ class ZHAGateway:
|
|||
)
|
||||
# we don't have to do this on a nwk swap but we don't have a way to tell currently
|
||||
await zha_device.async_configure()
|
||||
# will cause async_init to fire so don't explicitly call it
|
||||
# force async_initialize() to fire so don't explicitly call it
|
||||
zha_device.available = False
|
||||
zha_device.update_available(True)
|
||||
|
||||
async def async_create_zigpy_group(
|
||||
|
|
|
@ -30,6 +30,7 @@ if TYPE_CHECKING:
|
|||
import homeassistant.components.zha.core.channels.base as base_channels
|
||||
import homeassistant.components.zha.core.device
|
||||
import homeassistant.components.zha.core.gateway
|
||||
import homeassistant.components.zha.core.group
|
||||
import homeassistant.components.zha.entity
|
||||
import homeassistant.components.zha.core.channels
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE, State, callback
|
||||
|
@ -33,7 +32,6 @@ from .core.typing import CALLABLE_T, ChannelType, ZhaDeviceType
|
|||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENTITY_SUFFIX = "entity_suffix"
|
||||
RESTART_GRACE_PERIOD = 7200 # 2 hours
|
||||
|
||||
|
||||
class BaseZhaEntity(LogMixin, entity.Entity):
|
||||
|
@ -48,7 +46,6 @@ class BaseZhaEntity(LogMixin, entity.Entity):
|
|||
self._state: Any = None
|
||||
self._device_state_attributes: Dict[str, Any] = {}
|
||||
self._zha_device: ZhaDeviceType = zha_device
|
||||
self._available: bool = False
|
||||
self._unsubs: List[CALLABLE_T] = []
|
||||
self.remove_future: Awaitable[None] = None
|
||||
|
||||
|
@ -96,15 +93,9 @@ class BaseZhaEntity(LogMixin, entity.Entity):
|
|||
"via_device": (DOMAIN, self.hass.data[DATA_ZHA][DATA_ZHA_BRIDGE_ID]),
|
||||
}
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return entity availability."""
|
||||
return self._available
|
||||
|
||||
@callback
|
||||
def async_set_available(self, available: bool) -> None:
|
||||
"""Set entity availability."""
|
||||
self._available = available
|
||||
def async_state_changed(self) -> None:
|
||||
"""Entity state changed."""
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
|
@ -163,9 +154,13 @@ class ZhaEntity(BaseZhaEntity, RestoreEntity):
|
|||
for channel in channels:
|
||||
self.cluster_channels[channel.name] = channel
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return entity availability."""
|
||||
return self._zha_device.available
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self.remove_future = asyncio.Future()
|
||||
await self.async_accept_signal(
|
||||
None,
|
||||
|
@ -173,11 +168,17 @@ class ZhaEntity(BaseZhaEntity, RestoreEntity):
|
|||
self.async_remove,
|
||||
signal_override=True,
|
||||
)
|
||||
await self.async_check_recently_seen()
|
||||
|
||||
if not self.zha_device.is_mains_powered:
|
||||
# mains powered devices will get real time state
|
||||
last_state = await self.async_get_last_state()
|
||||
if last_state:
|
||||
self.async_restore_last_state(last_state)
|
||||
|
||||
await self.async_accept_signal(
|
||||
None,
|
||||
f"{self.zha_device.available_signal}_entity",
|
||||
self.async_set_available,
|
||||
self.async_state_changed,
|
||||
signal_override=True,
|
||||
)
|
||||
self._zha_device.gateway.register_entity_reference(
|
||||
|
@ -199,20 +200,6 @@ class ZhaEntity(BaseZhaEntity, RestoreEntity):
|
|||
def async_restore_last_state(self, last_state) -> None:
|
||||
"""Restore previous state."""
|
||||
|
||||
async def async_check_recently_seen(self) -> None:
|
||||
"""Check if the device was seen within the last 2 hours."""
|
||||
last_state = await self.async_get_last_state()
|
||||
if (
|
||||
last_state
|
||||
and self._zha_device.last_seen
|
||||
and (time.time() - self._zha_device.last_seen < RESTART_GRACE_PERIOD)
|
||||
):
|
||||
self.async_set_available(True)
|
||||
if not self.zha_device.is_mains_powered:
|
||||
# mains powered devices will get real time state
|
||||
self.async_restore_last_state(last_state)
|
||||
self._zha_device.available = True
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Retrieve latest state."""
|
||||
for channel in self.cluster_channels.values():
|
||||
|
@ -228,6 +215,7 @@ class ZhaGroupEntity(BaseZhaEntity):
|
|||
) -> None:
|
||||
"""Initialize a light group."""
|
||||
super().__init__(unique_id, zha_device, **kwargs)
|
||||
self._available = False
|
||||
self._name = (
|
||||
f"{zha_device.gateway.groups.get(group_id).name}_zha_group_0x{group_id:04x}"
|
||||
)
|
||||
|
@ -235,6 +223,11 @@ class ZhaGroupEntity(BaseZhaEntity):
|
|||
self._entity_ids: List[str] = entity_ids
|
||||
self._async_unsub_state_changed: Optional[CALLBACK_TYPE] = None
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return entity availability."""
|
||||
return self._available
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
await super().async_added_to_hass()
|
||||
|
|
|
@ -175,10 +175,10 @@ def async_find_group_entity_id(hass, domain, group):
|
|||
return None
|
||||
|
||||
|
||||
async def async_enable_traffic(hass, zha_devices):
|
||||
async def async_enable_traffic(hass, zha_devices, enabled=True):
|
||||
"""Allow traffic to flow through the gateway and the zha device."""
|
||||
for zha_device in zha_devices:
|
||||
zha_device.update_available(True)
|
||||
zha_device.update_available(enabled)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import zigpy.config
|
|||
import zigpy.group
|
||||
import zigpy.types
|
||||
|
||||
from homeassistant.components.zha import DOMAIN
|
||||
import homeassistant.components.zha.core.const as zha_const
|
||||
import homeassistant.components.zha.core.device as zha_core_device
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
@ -140,11 +141,27 @@ def zha_device_joined(hass, setup_zha):
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def zha_device_restored(hass, zigpy_app_controller, setup_zha):
|
||||
def zha_device_restored(hass, zigpy_app_controller, setup_zha, hass_storage):
|
||||
"""Return a restored ZHA device."""
|
||||
|
||||
async def _zha_device(zigpy_dev):
|
||||
async def _zha_device(zigpy_dev, last_seen=None):
|
||||
zigpy_app_controller.devices[zigpy_dev.ieee] = zigpy_dev
|
||||
|
||||
if last_seen is not None:
|
||||
hass_storage[f"{DOMAIN}.storage"] = {
|
||||
"key": f"{DOMAIN}.storage",
|
||||
"version": 1,
|
||||
"data": {
|
||||
"devices": [
|
||||
{
|
||||
"ieee": str(zigpy_dev.ieee),
|
||||
"last_seen": last_seen,
|
||||
"name": f"{zigpy_dev.manufacturer} {zigpy_dev.model}",
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
await setup_zha()
|
||||
zha_gateway = hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
|
||||
return zha_gateway.get_device(zigpy_dev.ieee)
|
||||
|
|
|
@ -59,7 +59,7 @@ async def async_test_iaszone_on_off(hass, cluster, entity_id):
|
|||
"device, on_off_test, cluster_name, reporting",
|
||||
[
|
||||
(DEVICE_IAS, async_test_iaszone_on_off, "ias_zone", (0,)),
|
||||
(DEVICE_OCCUPANCY, async_test_binary_sensor_on_off, "occupancy", (1,)),
|
||||
# (DEVICE_OCCUPANCY, async_test_binary_sensor_on_off, "occupancy", (1,)),
|
||||
],
|
||||
)
|
||||
async def test_binary_sensor(
|
||||
|
@ -75,9 +75,10 @@ async def test_binary_sensor(
|
|||
zigpy_device = zigpy_device_mock(device)
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the sensors exist and are in the unavailable state
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
|
|
@ -119,6 +119,7 @@ async def test_cover(m1, hass, zha_device_joined_restored, zigpy_cover_device):
|
|||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the cover was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
@ -207,6 +208,7 @@ async def test_shade(hass, zha_device_joined_restored, zigpy_shade_device):
|
|||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the cover was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
@ -355,6 +357,7 @@ async def test_keen_vent(hass, zha_device_joined_restored, zigpy_keen_vent):
|
|||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the cover was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import pytest
|
|||
import zigpy.zcl.clusters.general as general
|
||||
|
||||
import homeassistant.components.zha.core.device as zha_core_device
|
||||
from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE
|
||||
import homeassistant.helpers.device_registry as ha_dev_reg
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
|
@ -107,13 +108,13 @@ async def test_check_available_success(
|
|||
basic_ch.read_attributes.reset_mock()
|
||||
device_with_basic_channel.last_seen = None
|
||||
assert zha_device.available is True
|
||||
_send_time_changed(hass, zha_core_device._CONSIDER_UNAVAILABLE_MAINS + 2)
|
||||
_send_time_changed(hass, zha_core_device.CONSIDER_UNAVAILABLE_MAINS + 2)
|
||||
await hass.async_block_till_done()
|
||||
assert zha_device.available is False
|
||||
assert basic_ch.read_attributes.await_count == 0
|
||||
|
||||
device_with_basic_channel.last_seen = (
|
||||
time.time() - zha_core_device._CONSIDER_UNAVAILABLE_MAINS - 2
|
||||
time.time() - zha_core_device.CONSIDER_UNAVAILABLE_MAINS - 2
|
||||
)
|
||||
_seens = [time.time(), device_with_basic_channel.last_seen]
|
||||
|
||||
|
@ -162,7 +163,7 @@ async def test_check_available_unsuccessful(
|
|||
assert basic_ch.read_attributes.await_count == 0
|
||||
|
||||
device_with_basic_channel.last_seen = (
|
||||
time.time() - zha_core_device._CONSIDER_UNAVAILABLE_MAINS - 2
|
||||
time.time() - zha_core_device.CONSIDER_UNAVAILABLE_MAINS - 2
|
||||
)
|
||||
|
||||
# unsuccessfuly ping zigpy device, but zha_device is still available
|
||||
|
@ -203,7 +204,7 @@ async def test_check_available_no_basic_channel(
|
|||
assert zha_device.available is True
|
||||
|
||||
device_without_basic_channel.last_seen = (
|
||||
time.time() - zha_core_device._CONSIDER_UNAVAILABLE_BATTERY - 2
|
||||
time.time() - zha_core_device.CONSIDER_UNAVAILABLE_BATTERY - 2
|
||||
)
|
||||
|
||||
assert "does not have a mandatory basic cluster" not in caplog.text
|
||||
|
@ -228,3 +229,46 @@ async def test_ota_sw_version(hass, ota_zha_device):
|
|||
await hass.async_block_till_done()
|
||||
entry = dev_registry.async_get(ota_zha_device.device_id)
|
||||
assert int(entry.sw_version, base=16) == sw_version
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"device, last_seen_delta, is_available",
|
||||
(
|
||||
("zigpy_device", 0, True),
|
||||
("zigpy_device", zha_core_device.CONSIDER_UNAVAILABLE_MAINS + 2, True,),
|
||||
("zigpy_device", zha_core_device.CONSIDER_UNAVAILABLE_BATTERY - 2, True,),
|
||||
("zigpy_device", zha_core_device.CONSIDER_UNAVAILABLE_BATTERY + 2, False,),
|
||||
("zigpy_device_mains", 0, True),
|
||||
("zigpy_device_mains", zha_core_device.CONSIDER_UNAVAILABLE_MAINS - 2, True,),
|
||||
("zigpy_device_mains", zha_core_device.CONSIDER_UNAVAILABLE_MAINS + 2, False,),
|
||||
(
|
||||
"zigpy_device_mains",
|
||||
zha_core_device.CONSIDER_UNAVAILABLE_BATTERY - 2,
|
||||
False,
|
||||
),
|
||||
(
|
||||
"zigpy_device_mains",
|
||||
zha_core_device.CONSIDER_UNAVAILABLE_BATTERY + 2,
|
||||
False,
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_device_restore_availability(
|
||||
hass, request, device, last_seen_delta, is_available, zha_device_restored
|
||||
):
|
||||
"""Test initial availability for restored devices."""
|
||||
|
||||
zigpy_device = request.getfixturevalue(device)()
|
||||
zha_device = await zha_device_restored(
|
||||
zigpy_device, last_seen=time.time() - last_seen_delta
|
||||
)
|
||||
entity_id = "switch.fakemanufacturer_fakemodel_e769900a_on_off"
|
||||
|
||||
await hass.async_block_till_done()
|
||||
# ensure the switch entity was created
|
||||
assert hass.states.get(entity_id).state is not None
|
||||
assert zha_device.available is is_available
|
||||
if is_available:
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
else:
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
|
|
@ -49,6 +49,8 @@ async def test_device_tracker(hass, zha_device_joined_restored, zigpy_device_dt)
|
|||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_HOME
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the device tracker was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
|
|
@ -117,6 +117,8 @@ async def test_fan(hass, zha_device_joined_restored, zigpy_device):
|
|||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the fan was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
|
|
@ -113,12 +113,10 @@ async def device_light_2(hass, zigpy_device_mock, zha_device_joined):
|
|||
async def test_device_left(hass, zigpy_dev_basic, zha_dev_basic):
|
||||
"""Device leaving the network should become unavailable."""
|
||||
|
||||
assert zha_dev_basic.available is False
|
||||
|
||||
await async_enable_traffic(hass, [zha_dev_basic])
|
||||
assert zha_dev_basic.available is True
|
||||
|
||||
get_zha_gateway(hass).device_left(zigpy_dev_basic)
|
||||
await hass.async_block_till_done()
|
||||
assert zha_dev_basic.available is False
|
||||
|
||||
|
||||
|
|
|
@ -245,6 +245,8 @@ async def test_light(
|
|||
cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None)
|
||||
cluster_identify = getattr(zigpy_device.endpoints[1], "identify", None)
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the lights were created and that they are unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
@ -516,6 +518,10 @@ async def test_zha_group_light_entity(
|
|||
|
||||
dev1_cluster_level = device_light_1.device.endpoints[1].level
|
||||
|
||||
await async_enable_traffic(
|
||||
hass, [device_light_1, device_light_2, device_light_3], enabled=False
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
# test that the lights were created and that they are unavailable
|
||||
assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
|
|
@ -43,6 +43,8 @@ async def test_lock(hass, lock):
|
|||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_UNLOCKED
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the lock was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
|
|
@ -128,6 +128,8 @@ async def test_sensor(
|
|||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
await hass.async_block_till_done()
|
||||
# ensure the sensor entity was created
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
@ -247,6 +249,7 @@ async def test_temp_uom(
|
|||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
|
||||
if not restore:
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
# allow traffic to flow through the gateway and devices
|
||||
|
|
|
@ -106,6 +106,8 @@ async def test_switch(hass, zha_device_joined_restored, zigpy_device):
|
|||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the switch was created and that its state is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue