Add ZHA roller shadows as cover instead of switch (#36059)

* Implement cover for "Shade" ZHA device type.
* Update ZHA cover tests.
* Add stop command
* Coverage.
This commit is contained in:
Alexei Chetroi 2020-05-23 22:37:49 -04:00 committed by GitHub
parent eaa16fa818
commit f4c5b9f8f8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 335 additions and 12 deletions

View file

@ -86,6 +86,7 @@ CHANNEL_OCCUPANCY = "occupancy"
CHANNEL_ON_OFF = "on_off" CHANNEL_ON_OFF = "on_off"
CHANNEL_POWER_CONFIGURATION = "power" CHANNEL_POWER_CONFIGURATION = "power"
CHANNEL_PRESSURE = "pressure" CHANNEL_PRESSURE = "pressure"
CHANNEL_SHADE = "shade"
CHANNEL_SMARTENERGY_METERING = "smartenergy_metering" CHANNEL_SMARTENERGY_METERING = "smartenergy_metering"
CHANNEL_TEMPERATURE = "temperature" CHANNEL_TEMPERATURE = "temperature"
CHANNEL_THERMOSTAT = "thermostat" CHANNEL_THERMOSTAT = "thermostat"

View file

@ -102,6 +102,7 @@ DEVICE_CLASS = {
zigpy.profiles.zha.DeviceType.ON_OFF_BALLAST: SWITCH, zigpy.profiles.zha.DeviceType.ON_OFF_BALLAST: SWITCH,
zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT: LIGHT, zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT: LIGHT,
zigpy.profiles.zha.DeviceType.ON_OFF_PLUG_IN_UNIT: SWITCH, zigpy.profiles.zha.DeviceType.ON_OFF_PLUG_IN_UNIT: SWITCH,
zigpy.profiles.zha.DeviceType.SHADE: COVER,
zigpy.profiles.zha.DeviceType.SMART_PLUG: SWITCH, zigpy.profiles.zha.DeviceType.SMART_PLUG: SWITCH,
}, },
zigpy.profiles.zll.PROFILE_ID: { zigpy.profiles.zll.PROFILE_ID: {

View file

@ -1,11 +1,17 @@
"""Support for ZHA covers.""" """Support for ZHA covers."""
from datetime import timedelta
import functools import functools
import logging import logging
from typing import List, Optional
from zigpy.zcl.foundation import Status from zigpy.zcl.foundation import Status
from homeassistant.components.cover import ATTR_POSITION, DOMAIN, CoverEntity from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_POSITION,
DEVICE_CLASS_SHADE,
DOMAIN,
CoverEntity,
)
from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING
from homeassistant.core import callback from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.dispatcher import async_dispatcher_connect
@ -13,17 +19,21 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .core import discovery from .core import discovery
from .core.const import ( from .core.const import (
CHANNEL_COVER, CHANNEL_COVER,
CHANNEL_LEVEL,
CHANNEL_ON_OFF,
CHANNEL_SHADE,
DATA_ZHA, DATA_ZHA,
DATA_ZHA_DISPATCHERS, DATA_ZHA_DISPATCHERS,
SIGNAL_ADD_ENTITIES, SIGNAL_ADD_ENTITIES,
SIGNAL_ATTR_UPDATED, SIGNAL_ATTR_UPDATED,
SIGNAL_SET_LEVEL,
) )
from .core.registries import ZHA_ENTITIES from .core.registries import ZHA_ENTITIES
from .core.typing import ChannelType, ZhaDeviceType
from .entity import ZhaEntity from .entity import ZhaEntity
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=60)
STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN) STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
@ -158,3 +168,113 @@ class ZhaCover(ZhaEntity, CoverEntity):
else: else:
self._current_position = None self._current_position = None
self._state = None self._state = None
@STRICT_MATCH(channel_names={CHANNEL_LEVEL, CHANNEL_ON_OFF, CHANNEL_SHADE})
class Shade(ZhaEntity, CoverEntity):
"""ZHA Shade."""
def __init__(
self,
unique_id: str,
zha_device: ZhaDeviceType,
channels: List[ChannelType],
**kwargs,
):
"""Initialize the ZHA light."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._on_off_channel = self.cluster_channels[CHANNEL_ON_OFF]
self._level_channel = self.cluster_channels[CHANNEL_LEVEL]
self._position = None
self._is_open = None
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
return self._position
@property
def device_class(self) -> Optional[str]:
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_SHADE
@property
def is_closed(self) -> Optional[bool]:
"""Return True if shade is closed."""
if self._is_open is None:
return None
return not self._is_open
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
await self.async_accept_signal(
self._on_off_channel, SIGNAL_ATTR_UPDATED, self.async_set_open_closed
)
await self.async_accept_signal(
self._level_channel, SIGNAL_SET_LEVEL, self.async_set_level
)
@callback
def async_restore_last_state(self, last_state):
"""Restore previous state."""
self._is_open = last_state.state == STATE_OPEN
if ATTR_CURRENT_POSITION in last_state.attributes:
self._position = last_state.attributes[ATTR_CURRENT_POSITION]
@callback
def async_set_open_closed(self, attr_id: int, attr_name: str, value: bool) -> None:
"""Set open/closed state."""
self._is_open = bool(value)
self.async_write_ha_state()
@callback
def async_set_level(self, value: int) -> None:
"""Set the reported position."""
value = max(0, min(255, value))
self._position = int(value * 100 / 255)
self.async_write_ha_state()
async def async_open_cover(self, **kwargs):
"""Open the window cover."""
res = await self._on_off_channel.on()
if not isinstance(res, list) or res[1] != Status.SUCCESS:
self.debug("couldn't open cover: %s", res)
return
self._is_open = True
self.async_write_ha_state()
async def async_close_cover(self, **kwargs):
"""Close the window cover."""
res = await self._on_off_channel.off()
if not isinstance(res, list) or res[1] != Status.SUCCESS:
self.debug("couldn't open cover: %s", res)
return
self._is_open = False
self.async_write_ha_state()
async def async_set_cover_position(self, **kwargs):
"""Move the roller shutter to a specific position."""
new_pos = kwargs[ATTR_POSITION]
res = await self._level_channel.move_to_level_with_on_off(
new_pos * 255 / 100, 1
)
if not isinstance(res, list) or res[1] != Status.SUCCESS:
self.debug("couldn't set cover's position: %s", res)
return
self._position = new_pos
self.async_write_ha_state()
async def async_stop_cover(self, **kwargs) -> None:
"""Stop the cover."""
res = await self._level_channel.stop()
if not isinstance(res, list) or res[1] != Status.SUCCESS:
self.debug("couldn't stop cover: %s", res)
return

View file

@ -28,7 +28,7 @@ from .core.const import (
SIGNAL_REMOVE_GROUP, SIGNAL_REMOVE_GROUP,
) )
from .core.helpers import LogMixin from .core.helpers import LogMixin
from .core.typing import CALLABLE_T, ChannelsType, ChannelType, ZhaDeviceType from .core.typing import CALLABLE_T, ChannelType, ZhaDeviceType
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -150,7 +150,7 @@ class ZhaEntity(BaseZhaEntity, RestoreEntity):
self, self,
unique_id: str, unique_id: str,
zha_device: ZhaDeviceType, zha_device: ZhaDeviceType,
channels: ChannelsType, channels: List[ChannelType],
**kwargs, **kwargs,
): ):
"""Init ZHA entity.""" """Init ZHA entity."""

View file

@ -1,11 +1,22 @@
"""Test zha cover.""" """Test zha cover."""
import asyncio
import pytest import pytest
import zigpy.types import zigpy.types
import zigpy.zcl.clusters.closures as closures import zigpy.zcl.clusters.closures as closures
import zigpy.zcl.clusters.general as general
import zigpy.zcl.foundation as zcl_f import zigpy.zcl.foundation as zcl_f
from homeassistant.components.cover import DOMAIN from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
DOMAIN,
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
SERVICE_SET_COVER_POSITION,
SERVICE_STOP_COVER,
)
from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNAVAILABLE
from homeassistant.core import CoreState, State
from .common import ( from .common import (
async_enable_traffic, async_enable_traffic,
@ -14,8 +25,8 @@ from .common import (
send_attributes_report, send_attributes_report,
) )
from tests.async_mock import MagicMock, call, patch from tests.async_mock import AsyncMock, MagicMock, call, patch
from tests.common import mock_coro from tests.common import mock_coro, mock_restore_cache
@pytest.fixture @pytest.fixture
@ -32,6 +43,24 @@ def zigpy_cover_device(zigpy_device_mock):
return zigpy_device_mock(endpoints) return zigpy_device_mock(endpoints)
@pytest.fixture
def zigpy_shade_device(zigpy_device_mock):
"""Zigpy shade device."""
endpoints = {
1: {
"device_type": 512,
"in_clusters": [
closures.Shade.cluster_id,
general.LevelControl.cluster_id,
general.OnOff.cluster_id,
],
"out_clusters": [],
}
}
return zigpy_device_mock(endpoints)
@patch( @patch(
"homeassistant.components.zha.core.channels.closures.WindowCovering.async_initialize" "homeassistant.components.zha.core.channels.closures.WindowCovering.async_initialize"
) )
@ -74,7 +103,7 @@ async def test_cover(m1, hass, zha_device_joined_restored, zigpy_cover_device):
"zigpy.zcl.Cluster.request", return_value=mock_coro([0x1, zcl_f.Status.SUCCESS]) "zigpy.zcl.Cluster.request", return_value=mock_coro([0x1, zcl_f.Status.SUCCESS])
): ):
await hass.services.async_call( await hass.services.async_call(
DOMAIN, "close_cover", {"entity_id": entity_id}, blocking=True DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True
) )
assert cluster.request.call_count == 1 assert cluster.request.call_count == 1
assert cluster.request.call_args == call( assert cluster.request.call_args == call(
@ -86,7 +115,7 @@ async def test_cover(m1, hass, zha_device_joined_restored, zigpy_cover_device):
"zigpy.zcl.Cluster.request", return_value=mock_coro([0x0, zcl_f.Status.SUCCESS]) "zigpy.zcl.Cluster.request", return_value=mock_coro([0x0, zcl_f.Status.SUCCESS])
): ):
await hass.services.async_call( await hass.services.async_call(
DOMAIN, "open_cover", {"entity_id": entity_id}, blocking=True DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True
) )
assert cluster.request.call_count == 1 assert cluster.request.call_count == 1
assert cluster.request.call_args == call( assert cluster.request.call_args == call(
@ -99,7 +128,7 @@ async def test_cover(m1, hass, zha_device_joined_restored, zigpy_cover_device):
): ):
await hass.services.async_call( await hass.services.async_call(
DOMAIN, DOMAIN,
"set_cover_position", SERVICE_SET_COVER_POSITION,
{"entity_id": entity_id, "position": 47}, {"entity_id": entity_id, "position": 47},
blocking=True, blocking=True,
) )
@ -119,7 +148,7 @@ async def test_cover(m1, hass, zha_device_joined_restored, zigpy_cover_device):
"zigpy.zcl.Cluster.request", return_value=mock_coro([0x2, zcl_f.Status.SUCCESS]) "zigpy.zcl.Cluster.request", return_value=mock_coro([0x2, zcl_f.Status.SUCCESS])
): ):
await hass.services.async_call( await hass.services.async_call(
DOMAIN, "stop_cover", {"entity_id": entity_id}, blocking=True DOMAIN, SERVICE_STOP_COVER, {"entity_id": entity_id}, blocking=True
) )
assert cluster.request.call_count == 1 assert cluster.request.call_count == 1
assert cluster.request.call_args == call( assert cluster.request.call_args == call(
@ -129,3 +158,151 @@ async def test_cover(m1, hass, zha_device_joined_restored, zigpy_cover_device):
# test rejoin # test rejoin
await async_test_rejoin(hass, zigpy_cover_device, [cluster], (1,)) await async_test_rejoin(hass, zigpy_cover_device, [cluster], (1,))
assert hass.states.get(entity_id).state == STATE_OPEN assert hass.states.get(entity_id).state == STATE_OPEN
async def test_shade(hass, zha_device_joined_restored, zigpy_shade_device):
"""Test zha cover platform for shade device type."""
# load up cover domain
zha_device = await zha_device_joined_restored(zigpy_shade_device)
cluster_on_off = zigpy_shade_device.endpoints.get(1).on_off
cluster_level = zigpy_shade_device.endpoints.get(1).level
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
assert entity_id is not None
# test that the cover was created and that it is unavailable
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, [zha_device])
await hass.async_block_till_done()
# test that the state has changed from unavailable to off
await send_attributes_report(hass, cluster_on_off, {8: 0, 0: False, 1: 1})
assert hass.states.get(entity_id).state == STATE_CLOSED
# test to see if it opens
await send_attributes_report(hass, cluster_on_off, {8: 0, 0: True, 1: 1})
assert hass.states.get(entity_id).state == STATE_OPEN
# close from UI command fails
with patch("zigpy.zcl.Cluster.request", side_effect=asyncio.TimeoutError):
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True
)
assert cluster_on_off.request.call_count == 1
assert cluster_on_off.request.call_args[0][0] is False
assert cluster_on_off.request.call_args[0][1] == 0x0000
assert hass.states.get(entity_id).state == STATE_OPEN
with patch(
"zigpy.zcl.Cluster.request", AsyncMock(return_value=[0x1, zcl_f.Status.SUCCESS])
):
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True
)
assert cluster_on_off.request.call_count == 1
assert cluster_on_off.request.call_args[0][0] is False
assert cluster_on_off.request.call_args[0][1] == 0x0000
assert hass.states.get(entity_id).state == STATE_CLOSED
# open from UI command fails
assert ATTR_CURRENT_POSITION not in hass.states.get(entity_id).attributes
await send_attributes_report(hass, cluster_level, {0: 0})
with patch("zigpy.zcl.Cluster.request", side_effect=asyncio.TimeoutError):
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True
)
assert cluster_on_off.request.call_count == 1
assert cluster_on_off.request.call_args[0][0] is False
assert cluster_on_off.request.call_args[0][1] == 0x0001
assert hass.states.get(entity_id).state == STATE_CLOSED
# open from UI succeeds
with patch(
"zigpy.zcl.Cluster.request", AsyncMock(return_value=[0x0, zcl_f.Status.SUCCESS])
):
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True
)
assert cluster_on_off.request.call_count == 1
assert cluster_on_off.request.call_args[0][0] is False
assert cluster_on_off.request.call_args[0][1] == 0x0001
assert hass.states.get(entity_id).state == STATE_OPEN
# set position UI command fails
with patch("zigpy.zcl.Cluster.request", side_effect=asyncio.TimeoutError):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_COVER_POSITION,
{"entity_id": entity_id, "position": 47},
blocking=True,
)
assert cluster_level.request.call_count == 1
assert cluster_level.request.call_args[0][0] is False
assert cluster_level.request.call_args[0][1] == 0x0004
assert int(cluster_level.request.call_args[0][3] * 100 / 255) == 47
assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 0
# set position UI success
with patch(
"zigpy.zcl.Cluster.request", AsyncMock(return_value=[0x5, zcl_f.Status.SUCCESS])
):
await hass.services.async_call(
DOMAIN,
SERVICE_SET_COVER_POSITION,
{"entity_id": entity_id, "position": 47},
blocking=True,
)
assert cluster_level.request.call_count == 1
assert cluster_level.request.call_args[0][0] is False
assert cluster_level.request.call_args[0][1] == 0x0004
assert int(cluster_level.request.call_args[0][3] * 100 / 255) == 47
assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 47
# report position change
await send_attributes_report(hass, cluster_level, {8: 0, 0: 100, 1: 1})
assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == int(
100 * 100 / 255
)
# test rejoin
await async_test_rejoin(
hass, zigpy_shade_device, [cluster_level, cluster_on_off], (1,)
)
assert hass.states.get(entity_id).state == STATE_OPEN
# test cover stop
with patch("zigpy.zcl.Cluster.request", side_effect=asyncio.TimeoutError):
await hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER, {"entity_id": entity_id}, blocking=True,
)
assert cluster_level.request.call_count == 1
assert cluster_level.request.call_args[0][0] is False
assert cluster_level.request.call_args[0][1] in (0x0003, 0x0007)
async def test_restore_state(hass, zha_device_restored, zigpy_shade_device):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass,
(
State(
"cover.fakemanufacturer_fakemodel_e769900a_level_on_off_shade",
STATE_OPEN,
{ATTR_CURRENT_POSITION: 50},
),
),
)
hass.state = CoreState.starting
zha_device = await zha_device_restored(zigpy_shade_device)
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
assert entity_id is not None
# test that the cover was created and that it is unavailable
assert hass.states.get(entity_id).state == STATE_OPEN
assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 50

View file

@ -3552,4 +3552,28 @@ DEVICES = [
"model": "Z01-A19NAE26", "model": "Z01-A19NAE26",
"node_descriptor": b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00", "node_descriptor": b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00",
}, },
{
"device_no": 97,
"endpoints": {
1: {
"device_type": 512,
"endpoint_id": 1,
"in_clusters": [0, 3, 4, 5, 6, 8, 10, 21, 256, 64544, 64545],
"out_clusters": [3, 64544],
"profile_id": 260,
}
},
"entities": ["cover.unk_manufacturer_unk_model_77665544_level_on_off_shade"],
"entity_map": {
("cover", "00:11:22:33:44:55:66:77-1"): {
"channels": ["level", "on_off", "shade"],
"entity_class": "Shade",
"entity_id": "cover.unk_manufacturer_unk_model_77665544_level_on_off_shade",
}
},
"event_channels": [],
"manufacturer": "unk_manufacturer",
"model": "unk_model",
"node_descriptor": b"\x01@\x8e\x10\x11RR\x00\x00\x00R\x00\x00",
},
] ]