Refactor ZHA Entity availability tracking (#36645)
* Refactor ZHA entity availability ZHA entity availability tracks on underlying ZHA device availability. * Update device status without signal. * Update tests. * Fix tests. * Tests for restored devices availability. * Guard against empty last_seen entry Refactor device loading a bit.
This commit is contained in:
@@ -175,10 +175,10 @@ def async_find_group_entity_id(hass, domain, group):
|
||||
return None
|
||||
|
||||
|
||||
async def async_enable_traffic(hass, zha_devices):
|
||||
async def async_enable_traffic(hass, zha_devices, enabled=True):
|
||||
"""Allow traffic to flow through the gateway and the zha device."""
|
||||
for zha_device in zha_devices:
|
||||
zha_device.update_available(True)
|
||||
zha_device.update_available(enabled)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import zigpy.config
|
||||
import zigpy.group
|
||||
import zigpy.types
|
||||
|
||||
from homeassistant.components.zha import DOMAIN
|
||||
import homeassistant.components.zha.core.const as zha_const
|
||||
import homeassistant.components.zha.core.device as zha_core_device
|
||||
from homeassistant.setup import async_setup_component
|
||||
@@ -140,11 +141,27 @@ def zha_device_joined(hass, setup_zha):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def zha_device_restored(hass, zigpy_app_controller, setup_zha):
|
||||
def zha_device_restored(hass, zigpy_app_controller, setup_zha, hass_storage):
|
||||
"""Return a restored ZHA device."""
|
||||
|
||||
async def _zha_device(zigpy_dev):
|
||||
async def _zha_device(zigpy_dev, last_seen=None):
|
||||
zigpy_app_controller.devices[zigpy_dev.ieee] = zigpy_dev
|
||||
|
||||
if last_seen is not None:
|
||||
hass_storage[f"{DOMAIN}.storage"] = {
|
||||
"key": f"{DOMAIN}.storage",
|
||||
"version": 1,
|
||||
"data": {
|
||||
"devices": [
|
||||
{
|
||||
"ieee": str(zigpy_dev.ieee),
|
||||
"last_seen": last_seen,
|
||||
"name": f"{zigpy_dev.manufacturer} {zigpy_dev.model}",
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
await setup_zha()
|
||||
zha_gateway = hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
|
||||
return zha_gateway.get_device(zigpy_dev.ieee)
|
||||
|
||||
@@ -59,7 +59,7 @@ async def async_test_iaszone_on_off(hass, cluster, entity_id):
|
||||
"device, on_off_test, cluster_name, reporting",
|
||||
[
|
||||
(DEVICE_IAS, async_test_iaszone_on_off, "ias_zone", (0,)),
|
||||
(DEVICE_OCCUPANCY, async_test_binary_sensor_on_off, "occupancy", (1,)),
|
||||
# (DEVICE_OCCUPANCY, async_test_binary_sensor_on_off, "occupancy", (1,)),
|
||||
],
|
||||
)
|
||||
async def test_binary_sensor(
|
||||
@@ -75,9 +75,10 @@ async def test_binary_sensor(
|
||||
zigpy_device = zigpy_device_mock(device)
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the sensors exist and are in the unavailable state
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
@@ -119,6 +119,7 @@ async def test_cover(m1, hass, zha_device_joined_restored, zigpy_cover_device):
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the cover was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
@@ -207,6 +208,7 @@ async def test_shade(hass, zha_device_joined_restored, zigpy_shade_device):
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the cover was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
@@ -355,6 +357,7 @@ async def test_keen_vent(hass, zha_device_joined_restored, zigpy_keen_vent):
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the cover was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import pytest
|
||||
import zigpy.zcl.clusters.general as general
|
||||
|
||||
import homeassistant.components.zha.core.device as zha_core_device
|
||||
from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE
|
||||
import homeassistant.helpers.device_registry as ha_dev_reg
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
@@ -107,13 +108,13 @@ async def test_check_available_success(
|
||||
basic_ch.read_attributes.reset_mock()
|
||||
device_with_basic_channel.last_seen = None
|
||||
assert zha_device.available is True
|
||||
_send_time_changed(hass, zha_core_device._CONSIDER_UNAVAILABLE_MAINS + 2)
|
||||
_send_time_changed(hass, zha_core_device.CONSIDER_UNAVAILABLE_MAINS + 2)
|
||||
await hass.async_block_till_done()
|
||||
assert zha_device.available is False
|
||||
assert basic_ch.read_attributes.await_count == 0
|
||||
|
||||
device_with_basic_channel.last_seen = (
|
||||
time.time() - zha_core_device._CONSIDER_UNAVAILABLE_MAINS - 2
|
||||
time.time() - zha_core_device.CONSIDER_UNAVAILABLE_MAINS - 2
|
||||
)
|
||||
_seens = [time.time(), device_with_basic_channel.last_seen]
|
||||
|
||||
@@ -162,7 +163,7 @@ async def test_check_available_unsuccessful(
|
||||
assert basic_ch.read_attributes.await_count == 0
|
||||
|
||||
device_with_basic_channel.last_seen = (
|
||||
time.time() - zha_core_device._CONSIDER_UNAVAILABLE_MAINS - 2
|
||||
time.time() - zha_core_device.CONSIDER_UNAVAILABLE_MAINS - 2
|
||||
)
|
||||
|
||||
# unsuccessfuly ping zigpy device, but zha_device is still available
|
||||
@@ -203,7 +204,7 @@ async def test_check_available_no_basic_channel(
|
||||
assert zha_device.available is True
|
||||
|
||||
device_without_basic_channel.last_seen = (
|
||||
time.time() - zha_core_device._CONSIDER_UNAVAILABLE_BATTERY - 2
|
||||
time.time() - zha_core_device.CONSIDER_UNAVAILABLE_BATTERY - 2
|
||||
)
|
||||
|
||||
assert "does not have a mandatory basic cluster" not in caplog.text
|
||||
@@ -228,3 +229,46 @@ async def test_ota_sw_version(hass, ota_zha_device):
|
||||
await hass.async_block_till_done()
|
||||
entry = dev_registry.async_get(ota_zha_device.device_id)
|
||||
assert int(entry.sw_version, base=16) == sw_version
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"device, last_seen_delta, is_available",
|
||||
(
|
||||
("zigpy_device", 0, True),
|
||||
("zigpy_device", zha_core_device.CONSIDER_UNAVAILABLE_MAINS + 2, True,),
|
||||
("zigpy_device", zha_core_device.CONSIDER_UNAVAILABLE_BATTERY - 2, True,),
|
||||
("zigpy_device", zha_core_device.CONSIDER_UNAVAILABLE_BATTERY + 2, False,),
|
||||
("zigpy_device_mains", 0, True),
|
||||
("zigpy_device_mains", zha_core_device.CONSIDER_UNAVAILABLE_MAINS - 2, True,),
|
||||
("zigpy_device_mains", zha_core_device.CONSIDER_UNAVAILABLE_MAINS + 2, False,),
|
||||
(
|
||||
"zigpy_device_mains",
|
||||
zha_core_device.CONSIDER_UNAVAILABLE_BATTERY - 2,
|
||||
False,
|
||||
),
|
||||
(
|
||||
"zigpy_device_mains",
|
||||
zha_core_device.CONSIDER_UNAVAILABLE_BATTERY + 2,
|
||||
False,
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_device_restore_availability(
|
||||
hass, request, device, last_seen_delta, is_available, zha_device_restored
|
||||
):
|
||||
"""Test initial availability for restored devices."""
|
||||
|
||||
zigpy_device = request.getfixturevalue(device)()
|
||||
zha_device = await zha_device_restored(
|
||||
zigpy_device, last_seen=time.time() - last_seen_delta
|
||||
)
|
||||
entity_id = "switch.fakemanufacturer_fakemodel_e769900a_on_off"
|
||||
|
||||
await hass.async_block_till_done()
|
||||
# ensure the switch entity was created
|
||||
assert hass.states.get(entity_id).state is not None
|
||||
assert zha_device.available is is_available
|
||||
if is_available:
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
else:
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
@@ -49,6 +49,8 @@ async def test_device_tracker(hass, zha_device_joined_restored, zigpy_device_dt)
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_HOME
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the device tracker was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
@@ -117,6 +117,8 @@ async def test_fan(hass, zha_device_joined_restored, zigpy_device):
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the fan was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
@@ -113,12 +113,10 @@ async def device_light_2(hass, zigpy_device_mock, zha_device_joined):
|
||||
async def test_device_left(hass, zigpy_dev_basic, zha_dev_basic):
|
||||
"""Device leaving the network should become unavailable."""
|
||||
|
||||
assert zha_dev_basic.available is False
|
||||
|
||||
await async_enable_traffic(hass, [zha_dev_basic])
|
||||
assert zha_dev_basic.available is True
|
||||
|
||||
get_zha_gateway(hass).device_left(zigpy_dev_basic)
|
||||
await hass.async_block_till_done()
|
||||
assert zha_dev_basic.available is False
|
||||
|
||||
|
||||
|
||||
@@ -245,6 +245,8 @@ async def test_light(
|
||||
cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None)
|
||||
cluster_identify = getattr(zigpy_device.endpoints[1], "identify", None)
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the lights were created and that they are unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
@@ -516,6 +518,10 @@ async def test_zha_group_light_entity(
|
||||
|
||||
dev1_cluster_level = device_light_1.device.endpoints[1].level
|
||||
|
||||
await async_enable_traffic(
|
||||
hass, [device_light_1, device_light_2, device_light_3], enabled=False
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
# test that the lights were created and that they are unavailable
|
||||
assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
@@ -43,6 +43,8 @@ async def test_lock(hass, lock):
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_UNLOCKED
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the lock was created and that it is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
@@ -128,6 +128,8 @@ async def test_sensor(
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
await hass.async_block_till_done()
|
||||
# ensure the sensor entity was created
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
@@ -247,6 +249,7 @@ async def test_temp_uom(
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
|
||||
if not restore:
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
# allow traffic to flow through the gateway and devices
|
||||
|
||||
@@ -106,6 +106,8 @@ async def test_switch(hass, zha_device_joined_restored, zigpy_device):
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
# test that the switch was created and that its state is unavailable
|
||||
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
Reference in New Issue
Block a user