commit
df2ab62ce9
522 changed files with 23860 additions and 19061 deletions
22
.coveragerc
22
.coveragerc
|
@ -76,16 +76,13 @@ omit =
|
|||
homeassistant/components/daikin.py
|
||||
homeassistant/components/*/daikin.py
|
||||
|
||||
homeassistant/components/deconz/*
|
||||
homeassistant/components/*/deconz.py
|
||||
|
||||
homeassistant/components/digital_ocean.py
|
||||
homeassistant/components/*/digital_ocean.py
|
||||
|
||||
homeassistant/components/dominos.py
|
||||
|
||||
homeassistant/components/doorbird.py
|
||||
homeassistant/components/*/doorbird.py
|
||||
homeassistant/components/doorbird.py
|
||||
homeassistant/components/*/doorbird.py
|
||||
|
||||
homeassistant/components/dweet.py
|
||||
homeassistant/components/*/dweet.py
|
||||
|
@ -129,6 +126,9 @@ omit =
|
|||
homeassistant/components/google.py
|
||||
homeassistant/components/*/google.py
|
||||
|
||||
homeassistant/components/greeneye_monitor.py
|
||||
homeassistant/components/sensor/greeneye_monitor.py
|
||||
|
||||
homeassistant/components/habitica/*
|
||||
homeassistant/components/*/habitica.py
|
||||
|
||||
|
@ -209,7 +209,6 @@ omit =
|
|||
homeassistant/components/lutron_caseta.py
|
||||
homeassistant/components/*/lutron_caseta.py
|
||||
|
||||
homeassistant/components/mailgun.py
|
||||
homeassistant/components/*/mailgun.py
|
||||
|
||||
homeassistant/components/matrix.py
|
||||
|
@ -248,7 +247,7 @@ omit =
|
|||
homeassistant/components/opencv.py
|
||||
homeassistant/components/*/opencv.py
|
||||
|
||||
homeassistant/components/opentherm_gw.py
|
||||
homeassistant/components/opentherm_gw/*
|
||||
homeassistant/components/*/opentherm_gw.py
|
||||
|
||||
homeassistant/components/openuv/__init__.py
|
||||
|
@ -290,6 +289,9 @@ omit =
|
|||
homeassistant/components/scsgate.py
|
||||
homeassistant/components/*/scsgate.py
|
||||
|
||||
homeassistant/components/sense.py
|
||||
homeassistant/components/*/sense.py
|
||||
|
||||
homeassistant/components/simplisafe/__init__.py
|
||||
homeassistant/components/*/simplisafe.py
|
||||
|
||||
|
@ -334,7 +336,6 @@ omit =
|
|||
homeassistant/components/tradfri.py
|
||||
homeassistant/components/*/tradfri.py
|
||||
|
||||
homeassistant/components/twilio.py
|
||||
homeassistant/components/notify/twilio_sms.py
|
||||
homeassistant/components/notify/twilio_call.py
|
||||
|
||||
|
@ -467,6 +468,7 @@ omit =
|
|||
homeassistant/components/device_tracker/bluetooth_le_tracker.py
|
||||
homeassistant/components/device_tracker/bluetooth_tracker.py
|
||||
homeassistant/components/device_tracker/bt_home_hub_5.py
|
||||
homeassistant/components/device_tracker/bt_smarthub.py
|
||||
homeassistant/components/device_tracker/cisco_ios.py
|
||||
homeassistant/components/device_tracker/ddwrt.py
|
||||
homeassistant/components/device_tracker/freebox.py
|
||||
|
@ -500,6 +502,7 @@ omit =
|
|||
homeassistant/components/emoncms_history.py
|
||||
homeassistant/components/emulated_hue/upnp.py
|
||||
homeassistant/components/fan/mqtt.py
|
||||
homeassistant/components/fan/wemo.py
|
||||
homeassistant/components/folder_watcher.py
|
||||
homeassistant/components/foursquare.py
|
||||
homeassistant/components/goalfeed.py
|
||||
|
@ -507,6 +510,7 @@ omit =
|
|||
homeassistant/components/image_processing/dlib_face_detect.py
|
||||
homeassistant/components/image_processing/dlib_face_identify.py
|
||||
homeassistant/components/image_processing/seven_segments.py
|
||||
homeassistant/components/image_processing/tensorflow.py
|
||||
homeassistant/components/keyboard_remote.py
|
||||
homeassistant/components/keyboard.py
|
||||
homeassistant/components/light/avion.py
|
||||
|
@ -722,6 +726,7 @@ omit =
|
|||
homeassistant/components/sensor/luftdaten.py
|
||||
homeassistant/components/sensor/lyft.py
|
||||
homeassistant/components/sensor/magicseaweed.py
|
||||
homeassistant/components/sensor/meteo_france.py
|
||||
homeassistant/components/sensor/metoffice.py
|
||||
homeassistant/components/sensor/miflora.py
|
||||
homeassistant/components/sensor/mitemp_bt.py
|
||||
|
@ -759,7 +764,6 @@ omit =
|
|||
homeassistant/components/sensor/ripple.py
|
||||
homeassistant/components/sensor/rtorrent.py
|
||||
homeassistant/components/sensor/scrape.py
|
||||
homeassistant/components/sensor/sense.py
|
||||
homeassistant/components/sensor/sensehat.py
|
||||
homeassistant/components/sensor/serial_pm.py
|
||||
homeassistant/components/sensor/serial.py
|
||||
|
|
10
.readthedocs.yml
Normal file
10
.readthedocs.yml
Normal file
|
@ -0,0 +1,10 @@
|
|||
# .readthedocs.yml
|
||||
|
||||
build:
|
||||
image: latest
|
||||
|
||||
python:
|
||||
version: 3.6
|
||||
setup_py_install: true
|
||||
|
||||
requirements_file: requirements_docs.txt
|
|
@ -56,17 +56,21 @@ homeassistant/components/climate/ephember.py @ttroy50
|
|||
homeassistant/components/climate/eq3btsmart.py @rytilahti
|
||||
homeassistant/components/climate/mill.py @danielhiversen
|
||||
homeassistant/components/climate/sensibo.py @andrey-git
|
||||
homeassistant/components/cover/brunt.py @eavanvalkenburg
|
||||
homeassistant/components/cover/group.py @cdce8p
|
||||
homeassistant/components/cover/template.py @PhracturedBlue
|
||||
homeassistant/components/device_tracker/asuswrt.py @kennedyshead
|
||||
homeassistant/components/device_tracker/automatic.py @armills
|
||||
homeassistant/components/device_tracker/huawei_router.py @abmantis
|
||||
homeassistant/components/device_tracker/quantum_gateway.py @cisasteelersfan
|
||||
homeassistant/components/device_tracker/tile.py @bachya
|
||||
homeassistant/components/device_tracker/bt_smarthub.py @jxwolstenholme
|
||||
homeassistant/components/history_graph.py @andrey-git
|
||||
homeassistant/components/influx.py @fabaff
|
||||
homeassistant/components/light/lifx_legacy.py @amelchio
|
||||
homeassistant/components/light/tplink.py @rytilahti
|
||||
homeassistant/components/light/yeelight.py @rytilahti
|
||||
homeassistant/components/light/yeelightsunflower.py @lindsaymarkward
|
||||
homeassistant/components/lock/nello.py @pschmitt
|
||||
homeassistant/components/lock/nuki.py @pschmitt
|
||||
homeassistant/components/media_player/emby.py @mezz64
|
||||
|
@ -86,6 +90,7 @@ homeassistant/components/notify/mastodon.py @fabaff
|
|||
homeassistant/components/notify/smtp.py @fabaff
|
||||
homeassistant/components/notify/syslog.py @fabaff
|
||||
homeassistant/components/notify/xmpp.py @fabaff
|
||||
homeassistant/components/notify/yessssms.py @flowolf
|
||||
homeassistant/components/plant.py @ChristianKuehnel
|
||||
homeassistant/components/scene/lifx_cloud.py @amelchio
|
||||
homeassistant/components/sensor/airvisual.py @bachya
|
||||
|
@ -234,6 +239,10 @@ homeassistant/components/*/upcloud.py @scop
|
|||
homeassistant/components/velux.py @Julius2342
|
||||
homeassistant/components/*/velux.py @Julius2342
|
||||
|
||||
# W
|
||||
homeassistant/components/wemo.py @sqldiablo
|
||||
homeassistant/components/*/wemo.py @sqldiablo
|
||||
|
||||
# X
|
||||
homeassistant/components/*/xiaomi_aqara.py @danielhiversen @syssi
|
||||
homeassistant/components/*/xiaomi_miio.py @rytilahti @syssi
|
||||
|
|
|
@ -11,6 +11,7 @@ LABEL maintainer="Paulus Schoutsen <Paulus@PaulusSchoutsen.nl>"
|
|||
#ENV INSTALL_FFMPEG no
|
||||
#ENV INSTALL_LIBCEC no
|
||||
#ENV INSTALL_SSOCR no
|
||||
#ENV INSTALL_DLIB no
|
||||
#ENV INSTALL_IPERF3 no
|
||||
|
||||
VOLUME /config
|
||||
|
@ -27,7 +28,7 @@ COPY requirements_all.txt requirements_all.txt
|
|||
# Uninstall enum34 because some dependencies install it but breaks Python 3.4+.
|
||||
# See PR #8103 for more info.
|
||||
RUN pip3 install --no-cache-dir -r requirements_all.txt && \
|
||||
pip3 install --no-cache-dir mysqlclient psycopg2 uvloop cchardet cython
|
||||
pip3 install --no-cache-dir mysqlclient psycopg2 uvloop cchardet cython tensorflow
|
||||
|
||||
# Copy source
|
||||
COPY . .
|
||||
|
|
|
@ -342,7 +342,6 @@ class AuthManager:
|
|||
"""Create a new access token."""
|
||||
self._store.async_log_refresh_token_usage(refresh_token, remote_ip)
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
now = dt_util.utcnow()
|
||||
return jwt.encode({
|
||||
'iss': refresh_token.id,
|
||||
|
|
|
@ -104,7 +104,7 @@ class SetupFlow(data_entry_flow.FlowHandler):
|
|||
-> Dict[str, Any]:
|
||||
"""Handle the first step of setup flow.
|
||||
|
||||
Return self.async_show_form(step_id='init') if user_input == None.
|
||||
Return self.async_show_form(step_id='init') if user_input is None.
|
||||
Return self.async_create_entry(data={'result': result}) if finish.
|
||||
"""
|
||||
errors = {} # type: Dict[str, str]
|
||||
|
|
|
@ -176,7 +176,7 @@ class TotpSetupFlow(SetupFlow):
|
|||
-> Dict[str, Any]:
|
||||
"""Handle the first step of setup flow.
|
||||
|
||||
Return self.async_show_form(step_id='init') if user_input == None.
|
||||
Return self.async_show_form(step_id='init') if user_input is None.
|
||||
Return self.async_create_entry(data={'result': result}) if finish.
|
||||
"""
|
||||
import pyotp
|
||||
|
|
|
@ -1,252 +0,0 @@
|
|||
"""Permissions for Home Assistant."""
|
||||
from typing import ( # noqa: F401
|
||||
cast, Any, Callable, Dict, List, Mapping, Set, Tuple, Union)
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import State
|
||||
|
||||
CategoryType = Union[Mapping[str, 'CategoryType'], bool, None]
|
||||
PolicyType = Mapping[str, CategoryType]
|
||||
|
||||
|
||||
# Default policy if group has no policy applied.
|
||||
DEFAULT_POLICY = {
|
||||
"entities": True
|
||||
} # type: PolicyType
|
||||
|
||||
CAT_ENTITIES = 'entities'
|
||||
ENTITY_DOMAINS = 'domains'
|
||||
ENTITY_ENTITY_IDS = 'entity_ids'
|
||||
|
||||
VALUES_SCHEMA = vol.Any(True, vol.Schema({
|
||||
str: True
|
||||
}))
|
||||
|
||||
ENTITY_POLICY_SCHEMA = vol.Any(True, vol.Schema({
|
||||
vol.Optional(ENTITY_DOMAINS): VALUES_SCHEMA,
|
||||
vol.Optional(ENTITY_ENTITY_IDS): VALUES_SCHEMA,
|
||||
}))
|
||||
|
||||
POLICY_SCHEMA = vol.Schema({
|
||||
vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA
|
||||
})
|
||||
|
||||
|
||||
class AbstractPermissions:
|
||||
"""Default permissions class."""
|
||||
|
||||
def check_entity(self, entity_id: str, *keys: str) -> bool:
|
||||
"""Test if we can access entity."""
|
||||
raise NotImplementedError
|
||||
|
||||
def filter_states(self, states: List[State]) -> List[State]:
|
||||
"""Filter a list of states for what the user is allowed to see."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class PolicyPermissions(AbstractPermissions):
|
||||
"""Handle permissions."""
|
||||
|
||||
def __init__(self, policy: PolicyType) -> None:
|
||||
"""Initialize the permission class."""
|
||||
self._policy = policy
|
||||
self._compiled = {} # type: Dict[str, Callable[..., bool]]
|
||||
|
||||
def check_entity(self, entity_id: str, *keys: str) -> bool:
|
||||
"""Test if we can access entity."""
|
||||
func = self._policy_func(CAT_ENTITIES, _compile_entities)
|
||||
return func(entity_id, keys)
|
||||
|
||||
def filter_states(self, states: List[State]) -> List[State]:
|
||||
"""Filter a list of states for what the user is allowed to see."""
|
||||
func = self._policy_func(CAT_ENTITIES, _compile_entities)
|
||||
keys = ('read',)
|
||||
return [entity for entity in states if func(entity.entity_id, keys)]
|
||||
|
||||
def _policy_func(self, category: str,
|
||||
compile_func: Callable[[CategoryType], Callable]) \
|
||||
-> Callable[..., bool]:
|
||||
"""Get a policy function."""
|
||||
func = self._compiled.get(category)
|
||||
|
||||
if func:
|
||||
return func
|
||||
|
||||
func = self._compiled[category] = compile_func(
|
||||
self._policy.get(category))
|
||||
return func
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Equals check."""
|
||||
# pylint: disable=protected-access
|
||||
return (isinstance(other, PolicyPermissions) and
|
||||
other._policy == self._policy)
|
||||
|
||||
|
||||
class _OwnerPermissions(AbstractPermissions):
|
||||
"""Owner permissions."""
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
|
||||
def check_entity(self, entity_id: str, *keys: str) -> bool:
|
||||
"""Test if we can access entity."""
|
||||
return True
|
||||
|
||||
def filter_states(self, states: List[State]) -> List[State]:
|
||||
"""Filter a list of states for what the user is allowed to see."""
|
||||
return states
|
||||
|
||||
|
||||
OwnerPermissions = _OwnerPermissions() # pylint: disable=invalid-name
|
||||
|
||||
|
||||
def _compile_entities(policy: CategoryType) \
|
||||
-> Callable[[str, Tuple[str]], bool]:
|
||||
"""Compile policy into a function that tests policy."""
|
||||
# None, Empty Dict, False
|
||||
if not policy:
|
||||
def apply_policy_deny_all(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Decline all."""
|
||||
return False
|
||||
|
||||
return apply_policy_deny_all
|
||||
|
||||
if policy is True:
|
||||
def apply_policy_allow_all(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Approve all."""
|
||||
return True
|
||||
|
||||
return apply_policy_allow_all
|
||||
|
||||
assert isinstance(policy, dict)
|
||||
|
||||
domains = policy.get(ENTITY_DOMAINS)
|
||||
entity_ids = policy.get(ENTITY_ENTITY_IDS)
|
||||
|
||||
funcs = [] # type: List[Callable[[str, Tuple[str]], Union[None, bool]]]
|
||||
|
||||
# The order of these functions matter. The more precise are at the top.
|
||||
# If a function returns None, they cannot handle it.
|
||||
# If a function returns a boolean, that's the result to return.
|
||||
|
||||
# Setting entity_ids to a boolean is final decision for permissions
|
||||
# So return right away.
|
||||
if isinstance(entity_ids, bool):
|
||||
def apply_entity_id_policy(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Test if allowed entity_id."""
|
||||
return entity_ids # type: ignore
|
||||
|
||||
return apply_entity_id_policy
|
||||
|
||||
if entity_ids is not None:
|
||||
def allowed_entity_id(entity_id: str, keys: Tuple[str]) \
|
||||
-> Union[None, bool]:
|
||||
"""Test if allowed entity_id."""
|
||||
return entity_ids.get(entity_id) # type: ignore
|
||||
|
||||
funcs.append(allowed_entity_id)
|
||||
|
||||
if isinstance(domains, bool):
|
||||
def allowed_domain(entity_id: str, keys: Tuple[str]) \
|
||||
-> Union[None, bool]:
|
||||
"""Test if allowed domain."""
|
||||
return domains
|
||||
|
||||
funcs.append(allowed_domain)
|
||||
|
||||
elif domains is not None:
|
||||
def allowed_domain(entity_id: str, keys: Tuple[str]) \
|
||||
-> Union[None, bool]:
|
||||
"""Test if allowed domain."""
|
||||
domain = entity_id.split(".", 1)[0]
|
||||
return domains.get(domain) # type: ignore
|
||||
|
||||
funcs.append(allowed_domain)
|
||||
|
||||
# Can happen if no valid subcategories specified
|
||||
if not funcs:
|
||||
def apply_policy_deny_all_2(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Decline all."""
|
||||
return False
|
||||
|
||||
return apply_policy_deny_all_2
|
||||
|
||||
if len(funcs) == 1:
|
||||
func = funcs[0]
|
||||
|
||||
def apply_policy_func(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Apply a single policy function."""
|
||||
return func(entity_id, keys) is True
|
||||
|
||||
return apply_policy_func
|
||||
|
||||
def apply_policy_funcs(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Apply several policy functions."""
|
||||
for func in funcs:
|
||||
result = func(entity_id, keys)
|
||||
if result is not None:
|
||||
return result
|
||||
return False
|
||||
|
||||
return apply_policy_funcs
|
||||
|
||||
|
||||
def merge_policies(policies: List[PolicyType]) -> PolicyType:
|
||||
"""Merge policies."""
|
||||
new_policy = {} # type: Dict[str, CategoryType]
|
||||
seen = set() # type: Set[str]
|
||||
for policy in policies:
|
||||
for category in policy:
|
||||
if category in seen:
|
||||
continue
|
||||
seen.add(category)
|
||||
new_policy[category] = _merge_policies([
|
||||
policy.get(category) for policy in policies])
|
||||
cast(PolicyType, new_policy)
|
||||
return new_policy
|
||||
|
||||
|
||||
def _merge_policies(sources: List[CategoryType]) -> CategoryType:
|
||||
"""Merge a policy."""
|
||||
# When merging policies, the most permissive wins.
|
||||
# This means we order it like this:
|
||||
# True > Dict > None
|
||||
#
|
||||
# True: allow everything
|
||||
# Dict: specify more granular permissions
|
||||
# None: no opinion
|
||||
#
|
||||
# If there are multiple sources with a dict as policy, we recursively
|
||||
# merge each key in the source.
|
||||
|
||||
policy = None # type: CategoryType
|
||||
seen = set() # type: Set[str]
|
||||
for source in sources:
|
||||
if source is None:
|
||||
continue
|
||||
|
||||
# A source that's True will always win. Shortcut return.
|
||||
if source is True:
|
||||
return True
|
||||
|
||||
assert isinstance(source, dict)
|
||||
|
||||
if policy is None:
|
||||
policy = {}
|
||||
|
||||
assert isinstance(policy, dict)
|
||||
|
||||
for key in source:
|
||||
if key in seen:
|
||||
continue
|
||||
seen.add(key)
|
||||
|
||||
key_sources = []
|
||||
for src in sources:
|
||||
if isinstance(src, dict):
|
||||
key_sources.append(src.get(key))
|
||||
|
||||
policy[key] = _merge_policies(key_sources)
|
||||
|
||||
return policy
|
97
homeassistant/auth/permissions/__init__.py
Normal file
97
homeassistant/auth/permissions/__init__.py
Normal file
|
@ -0,0 +1,97 @@
|
|||
"""Permissions for Home Assistant."""
|
||||
import logging
|
||||
from typing import ( # noqa: F401
|
||||
cast, Any, Callable, Dict, List, Mapping, Set, Tuple, Union)
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import State
|
||||
|
||||
from .common import CategoryType, PolicyType
|
||||
from .entities import ENTITY_POLICY_SCHEMA, compile_entities
|
||||
from .merge import merge_policies # noqa
|
||||
|
||||
|
||||
# Default policy if group has no policy applied.
|
||||
DEFAULT_POLICY = {
|
||||
"entities": True
|
||||
} # type: PolicyType
|
||||
|
||||
CAT_ENTITIES = 'entities'
|
||||
|
||||
POLICY_SCHEMA = vol.Schema({
|
||||
vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA
|
||||
})
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AbstractPermissions:
|
||||
"""Default permissions class."""
|
||||
|
||||
def check_entity(self, entity_id: str, key: str) -> bool:
|
||||
"""Test if we can access entity."""
|
||||
raise NotImplementedError
|
||||
|
||||
def filter_states(self, states: List[State]) -> List[State]:
|
||||
"""Filter a list of states for what the user is allowed to see."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class PolicyPermissions(AbstractPermissions):
|
||||
"""Handle permissions."""
|
||||
|
||||
def __init__(self, policy: PolicyType) -> None:
|
||||
"""Initialize the permission class."""
|
||||
self._policy = policy
|
||||
self._compiled = {} # type: Dict[str, Callable[..., bool]]
|
||||
|
||||
def check_entity(self, entity_id: str, key: str) -> bool:
|
||||
"""Test if we can access entity."""
|
||||
func = self._policy_func(CAT_ENTITIES, compile_entities)
|
||||
return func(entity_id, (key,))
|
||||
|
||||
def filter_states(self, states: List[State]) -> List[State]:
|
||||
"""Filter a list of states for what the user is allowed to see."""
|
||||
func = self._policy_func(CAT_ENTITIES, compile_entities)
|
||||
keys = ('read',)
|
||||
return [entity for entity in states if func(entity.entity_id, keys)]
|
||||
|
||||
def _policy_func(self, category: str,
|
||||
compile_func: Callable[[CategoryType], Callable]) \
|
||||
-> Callable[..., bool]:
|
||||
"""Get a policy function."""
|
||||
func = self._compiled.get(category)
|
||||
|
||||
if func:
|
||||
return func
|
||||
|
||||
func = self._compiled[category] = compile_func(
|
||||
self._policy.get(category))
|
||||
|
||||
_LOGGER.debug("Compiled %s func: %s", category, func)
|
||||
|
||||
return func
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Equals check."""
|
||||
# pylint: disable=protected-access
|
||||
return (isinstance(other, PolicyPermissions) and
|
||||
other._policy == self._policy)
|
||||
|
||||
|
||||
class _OwnerPermissions(AbstractPermissions):
|
||||
"""Owner permissions."""
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
|
||||
def check_entity(self, entity_id: str, key: str) -> bool:
|
||||
"""Test if we can access entity."""
|
||||
return True
|
||||
|
||||
def filter_states(self, states: List[State]) -> List[State]:
|
||||
"""Filter a list of states for what the user is allowed to see."""
|
||||
return states
|
||||
|
||||
|
||||
OwnerPermissions = _OwnerPermissions() # pylint: disable=invalid-name
|
33
homeassistant/auth/permissions/common.py
Normal file
33
homeassistant/auth/permissions/common.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
"""Common code for permissions."""
|
||||
from typing import ( # noqa: F401
|
||||
Mapping, Union, Any)
|
||||
|
||||
# MyPy doesn't support recursion yet. So writing it out as far as we need.
|
||||
|
||||
ValueType = Union[
|
||||
# Example: entities.all = { read: true, control: true }
|
||||
Mapping[str, bool],
|
||||
bool,
|
||||
None
|
||||
]
|
||||
|
||||
SubCategoryType = Union[
|
||||
# Example: entities.domains = { light: … }
|
||||
Mapping[str, ValueType],
|
||||
bool,
|
||||
None
|
||||
]
|
||||
|
||||
CategoryType = Union[
|
||||
# Example: entities.domains
|
||||
Mapping[str, SubCategoryType],
|
||||
# Example: entities.all
|
||||
Mapping[str, ValueType],
|
||||
bool,
|
||||
None
|
||||
]
|
||||
|
||||
# Example: { entities: … }
|
||||
PolicyType = Mapping[str, CategoryType]
|
||||
|
||||
SUBCAT_ALL = 'all'
|
149
homeassistant/auth/permissions/entities.py
Normal file
149
homeassistant/auth/permissions/entities.py
Normal file
|
@ -0,0 +1,149 @@
|
|||
"""Entity permissions."""
|
||||
from functools import wraps
|
||||
from typing import ( # noqa: F401
|
||||
Callable, Dict, List, Tuple, Union)
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .common import CategoryType, ValueType, SUBCAT_ALL
|
||||
|
||||
|
||||
POLICY_READ = 'read'
|
||||
POLICY_CONTROL = 'control'
|
||||
POLICY_EDIT = 'edit'
|
||||
|
||||
SINGLE_ENTITY_SCHEMA = vol.Any(True, vol.Schema({
|
||||
vol.Optional(POLICY_READ): True,
|
||||
vol.Optional(POLICY_CONTROL): True,
|
||||
vol.Optional(POLICY_EDIT): True,
|
||||
}))
|
||||
|
||||
ENTITY_DOMAINS = 'domains'
|
||||
ENTITY_ENTITY_IDS = 'entity_ids'
|
||||
|
||||
ENTITY_VALUES_SCHEMA = vol.Any(True, vol.Schema({
|
||||
str: SINGLE_ENTITY_SCHEMA
|
||||
}))
|
||||
|
||||
ENTITY_POLICY_SCHEMA = vol.Any(True, vol.Schema({
|
||||
vol.Optional(SUBCAT_ALL): SINGLE_ENTITY_SCHEMA,
|
||||
vol.Optional(ENTITY_DOMAINS): ENTITY_VALUES_SCHEMA,
|
||||
vol.Optional(ENTITY_ENTITY_IDS): ENTITY_VALUES_SCHEMA,
|
||||
}))
|
||||
|
||||
|
||||
def _entity_allowed(schema: ValueType, keys: Tuple[str]) \
|
||||
-> Union[bool, None]:
|
||||
"""Test if an entity is allowed based on the keys."""
|
||||
if schema is None or isinstance(schema, bool):
|
||||
return schema
|
||||
assert isinstance(schema, dict)
|
||||
return schema.get(keys[0])
|
||||
|
||||
|
||||
def compile_entities(policy: CategoryType) \
|
||||
-> Callable[[str, Tuple[str]], bool]:
|
||||
"""Compile policy into a function that tests policy."""
|
||||
# None, Empty Dict, False
|
||||
if not policy:
|
||||
def apply_policy_deny_all(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Decline all."""
|
||||
return False
|
||||
|
||||
return apply_policy_deny_all
|
||||
|
||||
if policy is True:
|
||||
def apply_policy_allow_all(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Approve all."""
|
||||
return True
|
||||
|
||||
return apply_policy_allow_all
|
||||
|
||||
assert isinstance(policy, dict)
|
||||
|
||||
domains = policy.get(ENTITY_DOMAINS)
|
||||
entity_ids = policy.get(ENTITY_ENTITY_IDS)
|
||||
all_entities = policy.get(SUBCAT_ALL)
|
||||
|
||||
funcs = [] # type: List[Callable[[str, Tuple[str]], Union[None, bool]]]
|
||||
|
||||
# The order of these functions matter. The more precise are at the top.
|
||||
# If a function returns None, they cannot handle it.
|
||||
# If a function returns a boolean, that's the result to return.
|
||||
|
||||
# Setting entity_ids to a boolean is final decision for permissions
|
||||
# So return right away.
|
||||
if isinstance(entity_ids, bool):
|
||||
def allowed_entity_id_bool(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Test if allowed entity_id."""
|
||||
return entity_ids # type: ignore
|
||||
|
||||
return allowed_entity_id_bool
|
||||
|
||||
if entity_ids is not None:
|
||||
def allowed_entity_id_dict(entity_id: str, keys: Tuple[str]) \
|
||||
-> Union[None, bool]:
|
||||
"""Test if allowed entity_id."""
|
||||
return _entity_allowed(
|
||||
entity_ids.get(entity_id), keys) # type: ignore
|
||||
|
||||
funcs.append(allowed_entity_id_dict)
|
||||
|
||||
if isinstance(domains, bool):
|
||||
def allowed_domain_bool(entity_id: str, keys: Tuple[str]) \
|
||||
-> Union[None, bool]:
|
||||
"""Test if allowed domain."""
|
||||
return domains
|
||||
|
||||
funcs.append(allowed_domain_bool)
|
||||
|
||||
elif domains is not None:
|
||||
def allowed_domain_dict(entity_id: str, keys: Tuple[str]) \
|
||||
-> Union[None, bool]:
|
||||
"""Test if allowed domain."""
|
||||
domain = entity_id.split(".", 1)[0]
|
||||
return _entity_allowed(domains.get(domain), keys) # type: ignore
|
||||
|
||||
funcs.append(allowed_domain_dict)
|
||||
|
||||
if isinstance(all_entities, bool):
|
||||
def allowed_all_entities_bool(entity_id: str, keys: Tuple[str]) \
|
||||
-> Union[None, bool]:
|
||||
"""Test if allowed domain."""
|
||||
return all_entities
|
||||
funcs.append(allowed_all_entities_bool)
|
||||
|
||||
elif all_entities is not None:
|
||||
def allowed_all_entities_dict(entity_id: str, keys: Tuple[str]) \
|
||||
-> Union[None, bool]:
|
||||
"""Test if allowed domain."""
|
||||
return _entity_allowed(all_entities, keys)
|
||||
funcs.append(allowed_all_entities_dict)
|
||||
|
||||
# Can happen if no valid subcategories specified
|
||||
if not funcs:
|
||||
def apply_policy_deny_all_2(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Decline all."""
|
||||
return False
|
||||
|
||||
return apply_policy_deny_all_2
|
||||
|
||||
if len(funcs) == 1:
|
||||
func = funcs[0]
|
||||
|
||||
@wraps(func)
|
||||
def apply_policy_func(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Apply a single policy function."""
|
||||
return func(entity_id, keys) is True
|
||||
|
||||
return apply_policy_func
|
||||
|
||||
def apply_policy_funcs(entity_id: str, keys: Tuple[str]) -> bool:
|
||||
"""Apply several policy functions."""
|
||||
for func in funcs:
|
||||
result = func(entity_id, keys)
|
||||
if result is not None:
|
||||
return result
|
||||
return False
|
||||
|
||||
return apply_policy_funcs
|
65
homeassistant/auth/permissions/merge.py
Normal file
65
homeassistant/auth/permissions/merge.py
Normal file
|
@ -0,0 +1,65 @@
|
|||
"""Merging of policies."""
|
||||
from typing import ( # noqa: F401
|
||||
cast, Dict, List, Set)
|
||||
|
||||
from .common import PolicyType, CategoryType
|
||||
|
||||
|
||||
def merge_policies(policies: List[PolicyType]) -> PolicyType:
|
||||
"""Merge policies."""
|
||||
new_policy = {} # type: Dict[str, CategoryType]
|
||||
seen = set() # type: Set[str]
|
||||
for policy in policies:
|
||||
for category in policy:
|
||||
if category in seen:
|
||||
continue
|
||||
seen.add(category)
|
||||
new_policy[category] = _merge_policies([
|
||||
policy.get(category) for policy in policies])
|
||||
cast(PolicyType, new_policy)
|
||||
return new_policy
|
||||
|
||||
|
||||
def _merge_policies(sources: List[CategoryType]) -> CategoryType:
|
||||
"""Merge a policy."""
|
||||
# When merging policies, the most permissive wins.
|
||||
# This means we order it like this:
|
||||
# True > Dict > None
|
||||
#
|
||||
# True: allow everything
|
||||
# Dict: specify more granular permissions
|
||||
# None: no opinion
|
||||
#
|
||||
# If there are multiple sources with a dict as policy, we recursively
|
||||
# merge each key in the source.
|
||||
|
||||
policy = None # type: CategoryType
|
||||
seen = set() # type: Set[str]
|
||||
for source in sources:
|
||||
if source is None:
|
||||
continue
|
||||
|
||||
# A source that's True will always win. Shortcut return.
|
||||
if source is True:
|
||||
return True
|
||||
|
||||
assert isinstance(source, dict)
|
||||
|
||||
if policy is None:
|
||||
policy = cast(CategoryType, {})
|
||||
|
||||
assert isinstance(policy, dict)
|
||||
|
||||
for key in source:
|
||||
if key in seen:
|
||||
continue
|
||||
seen.add(key)
|
||||
|
||||
key_sources = []
|
||||
for src in sources:
|
||||
if isinstance(src, dict):
|
||||
key_sources.append(src.get(key))
|
||||
|
||||
policy[key] = _merge_policies(key_sources)
|
||||
|
||||
return policy
|
|
@ -179,7 +179,7 @@ class LoginFlow(data_entry_flow.FlowHandler):
|
|||
-> Dict[str, Any]:
|
||||
"""Handle the first step of login flow.
|
||||
|
||||
Return self.async_show_form(step_id='init') if user_input == None.
|
||||
Return self.async_show_form(step_id='init') if user_input is None.
|
||||
Return await self.async_finish(flow_result) if login init step pass.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
|
|
@ -21,6 +21,7 @@ from homeassistant.const import (
|
|||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import track_point_in_time
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.helpers.restore_state import async_get_last_state
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -306,3 +307,10 @@ class ManualAlarm(alarm.AlarmControlPanel):
|
|||
state_attr[ATTR_POST_PENDING_STATE] = self._state
|
||||
|
||||
return state_attr
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Run when entity about to be added to hass."""
|
||||
state = await async_get_last_state(self.hass, self.entity_id)
|
||||
if state:
|
||||
self._state = state.state
|
||||
self._state_ts = state.last_updated
|
||||
|
|
|
@ -24,23 +24,25 @@ _LOGGER = logging.getLogger(__name__)
|
|||
DOMAIN = 'alert'
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
|
||||
CONF_DONE_MESSAGE = 'done_message'
|
||||
CONF_CAN_ACK = 'can_acknowledge'
|
||||
CONF_NOTIFIERS = 'notifiers'
|
||||
CONF_REPEAT = 'repeat'
|
||||
CONF_SKIP_FIRST = 'skip_first'
|
||||
CONF_ALERT_MESSAGE = 'message'
|
||||
CONF_DONE_MESSAGE = 'done_message'
|
||||
|
||||
DEFAULT_CAN_ACK = True
|
||||
DEFAULT_SKIP_FIRST = False
|
||||
|
||||
ALERT_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_DONE_MESSAGE): cv.string,
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(CONF_STATE, default=STATE_ON): cv.string,
|
||||
vol.Required(CONF_REPEAT): vol.All(cv.ensure_list, [vol.Coerce(float)]),
|
||||
vol.Required(CONF_CAN_ACK, default=DEFAULT_CAN_ACK): cv.boolean,
|
||||
vol.Required(CONF_SKIP_FIRST, default=DEFAULT_SKIP_FIRST): cv.boolean,
|
||||
vol.Optional(CONF_ALERT_MESSAGE): cv.template,
|
||||
vol.Optional(CONF_DONE_MESSAGE): cv.template,
|
||||
vol.Required(CONF_NOTIFIERS): cv.ensure_list})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
|
@ -62,31 +64,47 @@ def is_on(hass, entity_id):
|
|||
|
||||
async def async_setup(hass, config):
|
||||
"""Set up the Alert component."""
|
||||
alerts = config.get(DOMAIN)
|
||||
all_alerts = {}
|
||||
entities = []
|
||||
|
||||
for object_id, cfg in config[DOMAIN].items():
|
||||
if not cfg:
|
||||
cfg = {}
|
||||
|
||||
name = cfg.get(CONF_NAME)
|
||||
watched_entity_id = cfg.get(CONF_ENTITY_ID)
|
||||
alert_state = cfg.get(CONF_STATE)
|
||||
repeat = cfg.get(CONF_REPEAT)
|
||||
skip_first = cfg.get(CONF_SKIP_FIRST)
|
||||
message_template = cfg.get(CONF_ALERT_MESSAGE)
|
||||
done_message_template = cfg.get(CONF_DONE_MESSAGE)
|
||||
notifiers = cfg.get(CONF_NOTIFIERS)
|
||||
can_ack = cfg.get(CONF_CAN_ACK)
|
||||
|
||||
entities.append(Alert(hass, object_id, name,
|
||||
watched_entity_id, alert_state, repeat,
|
||||
skip_first, message_template,
|
||||
done_message_template, notifiers,
|
||||
can_ack))
|
||||
|
||||
if not entities:
|
||||
return False
|
||||
|
||||
async def async_handle_alert_service(service_call):
|
||||
"""Handle calls to alert services."""
|
||||
alert_ids = service.extract_entity_ids(hass, service_call)
|
||||
|
||||
for alert_id in alert_ids:
|
||||
alert = all_alerts[alert_id]
|
||||
alert.async_set_context(service_call.context)
|
||||
if service_call.service == SERVICE_TURN_ON:
|
||||
await alert.async_turn_on()
|
||||
elif service_call.service == SERVICE_TOGGLE:
|
||||
await alert.async_toggle()
|
||||
else:
|
||||
await alert.async_turn_off()
|
||||
for alert in entities:
|
||||
if alert.entity_id != alert_id:
|
||||
continue
|
||||
|
||||
# Setup alerts
|
||||
for entity_id, alert in alerts.items():
|
||||
entity = Alert(hass, entity_id,
|
||||
alert[CONF_NAME], alert.get(CONF_DONE_MESSAGE),
|
||||
alert[CONF_ENTITY_ID], alert[CONF_STATE],
|
||||
alert[CONF_REPEAT], alert[CONF_SKIP_FIRST],
|
||||
alert[CONF_NOTIFIERS], alert[CONF_CAN_ACK])
|
||||
all_alerts[entity.entity_id] = entity
|
||||
alert.async_set_context(service_call.context)
|
||||
if service_call.service == SERVICE_TURN_ON:
|
||||
await alert.async_turn_on()
|
||||
elif service_call.service == SERVICE_TOGGLE:
|
||||
await alert.async_toggle()
|
||||
else:
|
||||
await alert.async_turn_off()
|
||||
|
||||
# Setup service calls
|
||||
hass.services.async_register(
|
||||
|
@ -99,7 +117,7 @@ async def async_setup(hass, config):
|
|||
DOMAIN, SERVICE_TOGGLE, async_handle_alert_service,
|
||||
schema=ALERT_SERVICE_SCHEMA)
|
||||
|
||||
tasks = [alert.async_update_ha_state() for alert in all_alerts.values()]
|
||||
tasks = [alert.async_update_ha_state() for alert in entities]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=hass.loop)
|
||||
|
||||
|
@ -109,16 +127,25 @@ async def async_setup(hass, config):
|
|||
class Alert(ToggleEntity):
|
||||
"""Representation of an alert."""
|
||||
|
||||
def __init__(self, hass, entity_id, name, done_message, watched_entity_id,
|
||||
state, repeat, skip_first, notifiers, can_ack):
|
||||
def __init__(self, hass, entity_id, name, watched_entity_id,
|
||||
state, repeat, skip_first, message_template,
|
||||
done_message_template, notifiers, can_ack):
|
||||
"""Initialize the alert."""
|
||||
self.hass = hass
|
||||
self._name = name
|
||||
self._alert_state = state
|
||||
self._skip_first = skip_first
|
||||
|
||||
self._message_template = message_template
|
||||
if self._message_template is not None:
|
||||
self._message_template.hass = hass
|
||||
|
||||
self._done_message_template = done_message_template
|
||||
if self._done_message_template is not None:
|
||||
self._done_message_template.hass = hass
|
||||
|
||||
self._notifiers = notifiers
|
||||
self._can_ack = can_ack
|
||||
self._done_message = done_message
|
||||
|
||||
self._delay = [timedelta(minutes=val) for val in repeat]
|
||||
self._next_delay = 0
|
||||
|
@ -184,7 +211,7 @@ class Alert(ToggleEntity):
|
|||
self._cancel()
|
||||
self._ack = False
|
||||
self._firing = False
|
||||
if self._done_message and self._send_done_message:
|
||||
if self._send_done_message:
|
||||
await self._notify_done_message()
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
|
@ -204,18 +231,31 @@ class Alert(ToggleEntity):
|
|||
if not self._ack:
|
||||
_LOGGER.info("Alerting: %s", self._name)
|
||||
self._send_done_message = True
|
||||
for target in self._notifiers:
|
||||
await self.hass.services.async_call(
|
||||
DOMAIN_NOTIFY, target, {ATTR_MESSAGE: self._name})
|
||||
|
||||
if self._message_template is not None:
|
||||
message = self._message_template.async_render()
|
||||
else:
|
||||
message = self._name
|
||||
|
||||
await self._send_notification_message(message)
|
||||
await self._schedule_notify()
|
||||
|
||||
async def _notify_done_message(self, *args):
|
||||
"""Send notification of complete alert."""
|
||||
_LOGGER.info("Alerting: %s", self._done_message)
|
||||
_LOGGER.info("Alerting: %s", self._done_message_template)
|
||||
self._send_done_message = False
|
||||
|
||||
if self._done_message_template is None:
|
||||
return
|
||||
|
||||
message = self._done_message_template.async_render()
|
||||
|
||||
await self._send_notification_message(message)
|
||||
|
||||
async def _send_notification_message(self, message):
|
||||
for target in self._notifiers:
|
||||
await self.hass.services.async_call(
|
||||
DOMAIN_NOTIFY, target, {ATTR_MESSAGE: self._done_message})
|
||||
DOMAIN_NOTIFY, target, {ATTR_MESSAGE: message})
|
||||
|
||||
async def async_turn_on(self, **kwargs):
|
||||
"""Async Unacknowledge alert."""
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -225,8 +225,17 @@ class AugustData:
|
|||
for doorbell in self._doorbells:
|
||||
_LOGGER.debug("Updating status for %s",
|
||||
doorbell.device_name)
|
||||
detail_by_id[doorbell.device_id] = self._api.get_doorbell_detail(
|
||||
self._access_token, doorbell.device_id)
|
||||
try:
|
||||
detail_by_id[doorbell.device_id] =\
|
||||
self._api.get_doorbell_detail(
|
||||
self._access_token, doorbell.device_id)
|
||||
except RequestException as ex:
|
||||
_LOGGER.error("Request error trying to retrieve doorbell"
|
||||
" status for %s. %s", doorbell.device_name, ex)
|
||||
detail_by_id[doorbell.device_id] = None
|
||||
except Exception:
|
||||
detail_by_id[doorbell.device_id] = None
|
||||
raise
|
||||
|
||||
_LOGGER.debug("Completed retrieving doorbell details")
|
||||
self._doorbell_detail_by_id = detail_by_id
|
||||
|
@ -260,8 +269,17 @@ class AugustData:
|
|||
for lock in self._locks:
|
||||
_LOGGER.debug("Updating status for %s",
|
||||
lock.device_name)
|
||||
state_by_id[lock.device_id] = self._api.get_lock_door_status(
|
||||
self._access_token, lock.device_id)
|
||||
|
||||
try:
|
||||
state_by_id[lock.device_id] = self._api.get_lock_door_status(
|
||||
self._access_token, lock.device_id)
|
||||
except RequestException as ex:
|
||||
_LOGGER.error("Request error trying to retrieve door"
|
||||
" status for %s. %s", lock.device_name, ex)
|
||||
state_by_id[lock.device_id] = None
|
||||
except Exception:
|
||||
state_by_id[lock.device_id] = None
|
||||
raise
|
||||
|
||||
_LOGGER.debug("Completed retrieving door status")
|
||||
self._door_state_by_id = state_by_id
|
||||
|
@ -275,10 +293,27 @@ class AugustData:
|
|||
for lock in self._locks:
|
||||
_LOGGER.debug("Updating status for %s",
|
||||
lock.device_name)
|
||||
status_by_id[lock.device_id] = self._api.get_lock_status(
|
||||
self._access_token, lock.device_id)
|
||||
detail_by_id[lock.device_id] = self._api.get_lock_detail(
|
||||
self._access_token, lock.device_id)
|
||||
try:
|
||||
status_by_id[lock.device_id] = self._api.get_lock_status(
|
||||
self._access_token, lock.device_id)
|
||||
except RequestException as ex:
|
||||
_LOGGER.error("Request error trying to retrieve door"
|
||||
" status for %s. %s", lock.device_name, ex)
|
||||
status_by_id[lock.device_id] = None
|
||||
except Exception:
|
||||
status_by_id[lock.device_id] = None
|
||||
raise
|
||||
|
||||
try:
|
||||
detail_by_id[lock.device_id] = self._api.get_lock_detail(
|
||||
self._access_token, lock.device_id)
|
||||
except RequestException as ex:
|
||||
_LOGGER.error("Request error trying to retrieve door"
|
||||
" details for %s. %s", lock.device_name, ex)
|
||||
detail_by_id[lock.device_id] = None
|
||||
except Exception:
|
||||
detail_by_id[lock.device_id] = None
|
||||
raise
|
||||
|
||||
_LOGGER.debug("Completed retrieving locks status")
|
||||
self._lock_status_by_id = status_by_id
|
||||
|
|
|
@ -1,5 +1,12 @@
|
|||
{
|
||||
"mfa_setup": {
|
||||
"notify": {
|
||||
"step": {
|
||||
"setup": {
|
||||
"title": "Verificar a configura\u00e7\u00e3o"
|
||||
}
|
||||
}
|
||||
},
|
||||
"totp": {
|
||||
"error": {
|
||||
"invalid_code": "C\u00f3digo inv\u00e1lido, por favor tente novamente. Se voc\u00ea obtiver este erro de forma consistente, certifique-se de que o rel\u00f3gio do sistema Home Assistant esteja correto."
|
||||
|
|
|
@ -10,22 +10,22 @@
|
|||
"step": {
|
||||
"init": {
|
||||
"description": "Por favor, selecione um dos servi\u00e7os de notifica\u00e7\u00e3o:",
|
||||
"title": "Configurar uma palavra passe entregue pela componente de notifica\u00e7\u00e3o"
|
||||
"title": "Configurar uma palavra-passe entregue pela componente de notifica\u00e7\u00e3o"
|
||||
},
|
||||
"setup": {
|
||||
"description": "Foi enviada uma palavra passe atrav\u00e9s de **notify.{notify_service}**. Por favor, insira-a:",
|
||||
"description": "Foi enviada uma palavra-passe atrav\u00e9s de **notify.{notify_service}**. Por favor, insira-a:",
|
||||
"title": "Verificar a configura\u00e7\u00e3o"
|
||||
}
|
||||
},
|
||||
"title": "Notificar palavra passe de uso \u00fanico"
|
||||
"title": "Notificar palavra-passe de uso \u00fanico"
|
||||
},
|
||||
"totp": {
|
||||
"error": {
|
||||
"invalid_code": "C\u00f3digo inv\u00e1lido, por favor, tente novamente. Se receber este erro constantemente, por favor, certifique-se de que o rel\u00f3gio do sistema que hospeda o Home Assistent \u00e9 preciso."
|
||||
"invalid_code": "C\u00f3digo inv\u00e1lido, por favor, tente novamente. Se receber este erro constantemente, por favor, certifique-se de que o rel\u00f3gio do sistema que hospeda o Home Assistant \u00e9 preciso."
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"description": "Para ativar a autentica\u00e7\u00e3o com dois fatores utilizando passwords unicas temporais (OTP), ler o c\u00f3digo QR com a sua aplica\u00e7\u00e3o de autentica\u00e7\u00e3o. Se voc\u00ea n\u00e3o tiver uma, recomendamos [Google Authenticator](https://support.google.com/accounts/answer/1066447) ou [Authy](https://authy.com/).\n\n{qr_code}\n\nDepois de ler o c\u00f3digo, introduza o c\u00f3digo de seis d\u00edgitos fornecido pela sua aplica\u00e7\u00e3o para verificar a configura\u00e7\u00e3o. Se tiver problemas a ler o c\u00f3digo QR, fa\u00e7a uma configura\u00e7\u00e3o manual com o c\u00f3digo **`{c\u00f3digo}`**.",
|
||||
"description": "Para ativar a autentica\u00e7\u00e3o com dois fatores utilizando palavras-passe de uso \u00fanico (OTP), ler o c\u00f3digo QR com a sua aplica\u00e7\u00e3o de autentica\u00e7\u00e3o. Se n\u00e3o tiver uma, recomendamos [Google Authenticator](https://support.google.com/accounts/answer/1066447) ou [Authy](https://authy.com/).\n\n{qr_code}\n\nDepois de ler o c\u00f3digo, introduza o c\u00f3digo de seis d\u00edgitos fornecido pela sua aplica\u00e7\u00e3o para verificar a configura\u00e7\u00e3o. Se tiver problemas a ler o c\u00f3digo QR, fa\u00e7a uma configura\u00e7\u00e3o manual com o c\u00f3digo **`{code}`**.",
|
||||
"title": "Configurar autentica\u00e7\u00e3o com dois fatores usando TOTP"
|
||||
}
|
||||
},
|
||||
|
|
|
@ -129,6 +129,7 @@ from homeassistant.auth.models import User, Credentials, \
|
|||
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.http import KEY_REAL_IP
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.components.http.ban import log_invalid_auth
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.http.view import HomeAssistantView
|
||||
|
@ -169,6 +170,14 @@ SCHEMA_WS_DELETE_REFRESH_TOKEN = \
|
|||
vol.Required('refresh_token_id'): str,
|
||||
})
|
||||
|
||||
WS_TYPE_SIGN_PATH = 'auth/sign_path'
|
||||
SCHEMA_WS_SIGN_PATH = \
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({
|
||||
vol.Required('type'): WS_TYPE_SIGN_PATH,
|
||||
vol.Required('path'): str,
|
||||
vol.Optional('expires', default=30): int,
|
||||
})
|
||||
|
||||
RESULT_TYPE_CREDENTIALS = 'credentials'
|
||||
RESULT_TYPE_USER = 'user'
|
||||
|
||||
|
@ -201,6 +210,11 @@ async def async_setup(hass, config):
|
|||
websocket_delete_refresh_token,
|
||||
SCHEMA_WS_DELETE_REFRESH_TOKEN
|
||||
)
|
||||
hass.components.websocket_api.async_register_command(
|
||||
WS_TYPE_SIGN_PATH,
|
||||
websocket_sign_path,
|
||||
SCHEMA_WS_SIGN_PATH
|
||||
)
|
||||
|
||||
await login_flow.async_setup(hass, store_result)
|
||||
await mfa_setup_flow.async_setup(hass)
|
||||
|
@ -424,54 +438,46 @@ def _create_auth_code_store():
|
|||
|
||||
|
||||
@websocket_api.ws_require_user()
|
||||
@callback
|
||||
def websocket_current_user(
|
||||
@websocket_api.async_response
|
||||
async def websocket_current_user(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg):
|
||||
"""Return the current user."""
|
||||
async def async_get_current_user(user):
|
||||
"""Get current user."""
|
||||
enabled_modules = await hass.auth.async_get_enabled_mfa(user)
|
||||
user = connection.user
|
||||
enabled_modules = await hass.auth.async_get_enabled_mfa(user)
|
||||
|
||||
connection.send_message(
|
||||
websocket_api.result_message(msg['id'], {
|
||||
'id': user.id,
|
||||
'name': user.name,
|
||||
'is_owner': user.is_owner,
|
||||
'credentials': [{'auth_provider_type': c.auth_provider_type,
|
||||
'auth_provider_id': c.auth_provider_id}
|
||||
for c in user.credentials],
|
||||
'mfa_modules': [{
|
||||
'id': module.id,
|
||||
'name': module.name,
|
||||
'enabled': module.id in enabled_modules,
|
||||
} for module in hass.auth.auth_mfa_modules],
|
||||
}))
|
||||
|
||||
hass.async_create_task(async_get_current_user(connection.user))
|
||||
connection.send_message(
|
||||
websocket_api.result_message(msg['id'], {
|
||||
'id': user.id,
|
||||
'name': user.name,
|
||||
'is_owner': user.is_owner,
|
||||
'credentials': [{'auth_provider_type': c.auth_provider_type,
|
||||
'auth_provider_id': c.auth_provider_id}
|
||||
for c in user.credentials],
|
||||
'mfa_modules': [{
|
||||
'id': module.id,
|
||||
'name': module.name,
|
||||
'enabled': module.id in enabled_modules,
|
||||
} for module in hass.auth.auth_mfa_modules],
|
||||
}))
|
||||
|
||||
|
||||
@websocket_api.ws_require_user()
|
||||
@callback
|
||||
def websocket_create_long_lived_access_token(
|
||||
@websocket_api.async_response
|
||||
async def websocket_create_long_lived_access_token(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg):
|
||||
"""Create or a long-lived access token."""
|
||||
async def async_create_long_lived_access_token(user):
|
||||
"""Create or a long-lived access token."""
|
||||
refresh_token = await hass.auth.async_create_refresh_token(
|
||||
user,
|
||||
client_name=msg['client_name'],
|
||||
client_icon=msg.get('client_icon'),
|
||||
token_type=TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN,
|
||||
access_token_expiration=timedelta(days=msg['lifespan']))
|
||||
refresh_token = await hass.auth.async_create_refresh_token(
|
||||
connection.user,
|
||||
client_name=msg['client_name'],
|
||||
client_icon=msg.get('client_icon'),
|
||||
token_type=TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN,
|
||||
access_token_expiration=timedelta(days=msg['lifespan']))
|
||||
|
||||
access_token = hass.auth.async_create_access_token(
|
||||
refresh_token)
|
||||
access_token = hass.auth.async_create_access_token(
|
||||
refresh_token)
|
||||
|
||||
connection.send_message(
|
||||
websocket_api.result_message(msg['id'], access_token))
|
||||
|
||||
hass.async_create_task(
|
||||
async_create_long_lived_access_token(connection.user))
|
||||
connection.send_message(
|
||||
websocket_api.result_message(msg['id'], access_token))
|
||||
|
||||
|
||||
@websocket_api.ws_require_user()
|
||||
|
@ -494,22 +500,28 @@ def websocket_refresh_tokens(
|
|||
|
||||
|
||||
@websocket_api.ws_require_user()
|
||||
@callback
|
||||
def websocket_delete_refresh_token(
|
||||
@websocket_api.async_response
|
||||
async def websocket_delete_refresh_token(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg):
|
||||
"""Handle a delete refresh token request."""
|
||||
async def async_delete_refresh_token(user, refresh_token_id):
|
||||
"""Delete a refresh token."""
|
||||
refresh_token = connection.user.refresh_tokens.get(refresh_token_id)
|
||||
refresh_token = connection.user.refresh_tokens.get(msg['refresh_token_id'])
|
||||
|
||||
if refresh_token is None:
|
||||
return websocket_api.error_message(
|
||||
msg['id'], 'invalid_token_id', 'Received invalid token')
|
||||
if refresh_token is None:
|
||||
return websocket_api.error_message(
|
||||
msg['id'], 'invalid_token_id', 'Received invalid token')
|
||||
|
||||
await hass.auth.async_remove_refresh_token(refresh_token)
|
||||
await hass.auth.async_remove_refresh_token(refresh_token)
|
||||
|
||||
connection.send_message(
|
||||
websocket_api.result_message(msg['id'], {}))
|
||||
connection.send_message(
|
||||
websocket_api.result_message(msg['id'], {}))
|
||||
|
||||
hass.async_create_task(
|
||||
async_delete_refresh_token(connection.user, msg['refresh_token_id']))
|
||||
|
||||
@websocket_api.ws_require_user()
|
||||
@callback
|
||||
def websocket_sign_path(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg):
|
||||
"""Handle a sign path request."""
|
||||
connection.send_message(websocket_api.result_message(msg['id'], {
|
||||
'path': async_sign_path(hass, connection.refresh_token_id, msg['path'],
|
||||
timedelta(seconds=msg['expires']))
|
||||
}))
|
||||
|
|
|
@ -10,24 +10,21 @@ import voluptuous as vol
|
|||
|
||||
from homeassistant.components.discovery import SERVICE_AXIS
|
||||
from homeassistant.const import (
|
||||
ATTR_LOCATION, ATTR_TRIPPED, CONF_EVENT, CONF_HOST, CONF_INCLUDE,
|
||||
ATTR_LOCATION, CONF_EVENT, CONF_HOST, CONF_INCLUDE,
|
||||
CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_TRIGGER_TIME, CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STOP)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import discovery
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util.json import load_json, save_json
|
||||
|
||||
REQUIREMENTS = ['axis==14']
|
||||
REQUIREMENTS = ['axis==16']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = 'axis'
|
||||
CONFIG_FILE = 'axis.conf'
|
||||
|
||||
AXIS_DEVICES = {}
|
||||
|
||||
EVENT_TYPES = ['motion', 'vmd3', 'pir', 'sound',
|
||||
'daynight', 'tampering', 'input']
|
||||
|
||||
|
@ -99,8 +96,6 @@ def request_configuration(hass, config, name, host, serialnumber):
|
|||
return False
|
||||
|
||||
if setup_device(hass, config, device_config):
|
||||
del device_config['events']
|
||||
del device_config['signal']
|
||||
config_file = load_json(hass.config.path(CONFIG_FILE))
|
||||
config_file[serialnumber] = dict(device_config)
|
||||
save_json(hass.config.path(CONFIG_FILE), config_file)
|
||||
|
@ -146,9 +141,11 @@ def request_configuration(hass, config, name, host, serialnumber):
|
|||
|
||||
def setup(hass, config):
|
||||
"""Set up for Axis devices."""
|
||||
hass.data[DOMAIN] = {}
|
||||
|
||||
def _shutdown(call):
|
||||
"""Stop the event stream on shutdown."""
|
||||
for serialnumber, device in AXIS_DEVICES.items():
|
||||
for serialnumber, device in hass.data[DOMAIN].items():
|
||||
_LOGGER.info("Stopping event stream for %s.", serialnumber)
|
||||
device.stop()
|
||||
|
||||
|
@ -160,7 +157,7 @@ def setup(hass, config):
|
|||
name = discovery_info['hostname']
|
||||
serialnumber = discovery_info['properties']['macaddress']
|
||||
|
||||
if serialnumber not in AXIS_DEVICES:
|
||||
if serialnumber not in hass.data[DOMAIN]:
|
||||
config_file = load_json(hass.config.path(CONFIG_FILE))
|
||||
if serialnumber in config_file:
|
||||
# Device config previously saved to file
|
||||
|
@ -178,7 +175,7 @@ def setup(hass, config):
|
|||
request_configuration(hass, config, name, host, serialnumber)
|
||||
else:
|
||||
# Device already registered, but on a different IP
|
||||
device = AXIS_DEVICES[serialnumber]
|
||||
device = hass.data[DOMAIN][serialnumber]
|
||||
device.config.host = host
|
||||
dispatcher_send(hass, DOMAIN + '_' + device.name + '_new_ip', host)
|
||||
|
||||
|
@ -195,7 +192,7 @@ def setup(hass, config):
|
|||
|
||||
def vapix_service(call):
|
||||
"""Service to send a message."""
|
||||
for _, device in AXIS_DEVICES.items():
|
||||
for device in hass.data[DOMAIN].values():
|
||||
if device.name == call.data[CONF_NAME]:
|
||||
response = device.vapix.do_request(
|
||||
call.data[SERVICE_CGI],
|
||||
|
@ -214,7 +211,7 @@ def setup(hass, config):
|
|||
|
||||
def setup_device(hass, config, device_config):
|
||||
"""Set up an Axis device."""
|
||||
from axis import AxisDevice
|
||||
import axis
|
||||
|
||||
def signal_callback(action, event):
|
||||
"""Call to configure events when initialized on event stream."""
|
||||
|
@ -229,18 +226,32 @@ def setup_device(hass, config, device_config):
|
|||
discovery.load_platform(
|
||||
hass, component, DOMAIN, event_config, config)
|
||||
|
||||
event_types = list(filter(lambda x: x in device_config[CONF_INCLUDE],
|
||||
EVENT_TYPES))
|
||||
device_config['events'] = event_types
|
||||
device_config['signal'] = signal_callback
|
||||
device = AxisDevice(hass.loop, **device_config)
|
||||
device.name = device_config[CONF_NAME]
|
||||
event_types = [
|
||||
event
|
||||
for event in device_config[CONF_INCLUDE]
|
||||
if event in EVENT_TYPES
|
||||
]
|
||||
|
||||
if device.serial_number is None:
|
||||
# If there is no serial number a connection could not be made
|
||||
_LOGGER.error("Couldn't connect to %s", device_config[CONF_HOST])
|
||||
device = axis.AxisDevice(
|
||||
loop=hass.loop, host=device_config[CONF_HOST],
|
||||
username=device_config[CONF_USERNAME],
|
||||
password=device_config[CONF_PASSWORD],
|
||||
port=device_config[CONF_PORT], web_proto='http',
|
||||
event_types=event_types, signal=signal_callback)
|
||||
|
||||
try:
|
||||
hass.data[DOMAIN][device.vapix.serial_number] = device
|
||||
|
||||
except axis.Unauthorized:
|
||||
_LOGGER.error("Credentials for %s are faulty",
|
||||
device_config[CONF_HOST])
|
||||
return False
|
||||
|
||||
except axis.RequestError:
|
||||
return False
|
||||
|
||||
device.name = device_config[CONF_NAME]
|
||||
|
||||
for component in device_config[CONF_INCLUDE]:
|
||||
if component == 'camera':
|
||||
camera_config = {
|
||||
|
@ -253,51 +264,6 @@ def setup_device(hass, config, device_config):
|
|||
discovery.load_platform(
|
||||
hass, component, DOMAIN, camera_config, config)
|
||||
|
||||
AXIS_DEVICES[device.serial_number] = device
|
||||
if event_types:
|
||||
hass.add_job(device.start)
|
||||
return True
|
||||
|
||||
|
||||
class AxisDeviceEvent(Entity):
|
||||
"""Representation of a Axis device event."""
|
||||
|
||||
def __init__(self, event_config):
|
||||
"""Initialize the event."""
|
||||
self.axis_event = event_config[CONF_EVENT]
|
||||
self._name = '{}_{}_{}'.format(
|
||||
event_config[CONF_NAME], self.axis_event.event_type,
|
||||
self.axis_event.id)
|
||||
self.location = event_config[ATTR_LOCATION]
|
||||
self.axis_event.callback = self._update_callback
|
||||
|
||||
def _update_callback(self):
|
||||
"""Update the sensor's state, if needed."""
|
||||
self.schedule_update_ha_state(True)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the event."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of the event."""
|
||||
return self.axis_event.event_class
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Return the polling state. No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the event."""
|
||||
attr = {}
|
||||
|
||||
tripped = self.axis_event.is_tripped
|
||||
attr[ATTR_TRIPPED] = 'True' if tripped else 'False'
|
||||
|
||||
attr[ATTR_LOCATION] = self.location
|
||||
|
||||
return attr
|
15
homeassistant/components/axis/services.yaml
Normal file
15
homeassistant/components/axis/services.yaml
Normal file
|
@ -0,0 +1,15 @@
|
|||
vapix_call:
|
||||
description: Configure device using Vapix parameter management.
|
||||
fields:
|
||||
name:
|
||||
description: Name of device to Configure. [Required]
|
||||
example: M1065-W
|
||||
cgi:
|
||||
description: Which cgi to call on device. [Optional] Default is 'param.cgi'
|
||||
example: 'applications/control.cgi'
|
||||
action:
|
||||
description: What type of call. [Optional] Default is 'update'
|
||||
example: 'start'
|
||||
param:
|
||||
description: What parameter to operate on. [Required]
|
||||
example: 'package=VideoMotionDetection'
|
|
@ -19,14 +19,15 @@ SCAN_INTERVAL = timedelta(seconds=5)
|
|||
|
||||
def _retrieve_door_state(data, lock):
|
||||
"""Get the latest state of the DoorSense sensor."""
|
||||
from august.lock import LockDoorStatus
|
||||
doorstate = data.get_door_state(lock.device_id)
|
||||
return doorstate == LockDoorStatus.OPEN
|
||||
return data.get_door_state(lock.device_id)
|
||||
|
||||
|
||||
def _retrieve_online_state(data, doorbell):
|
||||
"""Get the latest state of the sensor."""
|
||||
detail = data.get_doorbell_detail(doorbell.device_id)
|
||||
if detail is None:
|
||||
return None
|
||||
|
||||
return detail.is_online
|
||||
|
||||
|
||||
|
@ -138,9 +139,10 @@ class AugustDoorBinarySensor(BinarySensorDevice):
|
|||
"""Get the latest state of the sensor."""
|
||||
state_provider = SENSOR_TYPES_DOOR[self._sensor_type][2]
|
||||
self._state = state_provider(self._data, self._door)
|
||||
self._available = self._state is not None
|
||||
|
||||
from august.lock import LockDoorStatus
|
||||
self._available = self._state != LockDoorStatus.UNKNOWN
|
||||
self._state = self._state == LockDoorStatus.OPEN
|
||||
|
||||
|
||||
class AugustDoorbellBinarySensor(BinarySensorDevice):
|
||||
|
@ -152,6 +154,12 @@ class AugustDoorbellBinarySensor(BinarySensorDevice):
|
|||
self._sensor_type = sensor_type
|
||||
self._doorbell = doorbell
|
||||
self._state = None
|
||||
self._available = False
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return the availability of this sensor."""
|
||||
return self._available
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
|
@ -173,3 +181,4 @@ class AugustDoorbellBinarySensor(BinarySensorDevice):
|
|||
"""Get the latest state of the sensor."""
|
||||
state_provider = SENSOR_TYPES_DOORBELL[self._sensor_type][2]
|
||||
self._state = state_provider(self._data, self._doorbell)
|
||||
self._available = self._state is not None
|
||||
|
|
|
@ -7,10 +7,11 @@ https://home-assistant.io/components/binary_sensor.axis/
|
|||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.components.axis import AxisDeviceEvent
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.const import CONF_TRIGGER_TIME
|
||||
from homeassistant.helpers.event import track_point_in_utc_time
|
||||
from homeassistant.const import (
|
||||
ATTR_LOCATION, CONF_EVENT, CONF_NAME, CONF_TRIGGER_TIME)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
DEPENDENCIES = ['axis']
|
||||
|
@ -20,48 +21,71 @@ _LOGGER = logging.getLogger(__name__)
|
|||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Set up the Axis binary devices."""
|
||||
add_entities([AxisBinarySensor(hass, discovery_info)], True)
|
||||
add_entities([AxisBinarySensor(discovery_info)], True)
|
||||
|
||||
|
||||
class AxisBinarySensor(AxisDeviceEvent, BinarySensorDevice):
|
||||
class AxisBinarySensor(BinarySensorDevice):
|
||||
"""Representation of a binary Axis event."""
|
||||
|
||||
def __init__(self, hass, event_config):
|
||||
def __init__(self, event_config):
|
||||
"""Initialize the Axis binary sensor."""
|
||||
self.hass = hass
|
||||
self._state = False
|
||||
self._delay = event_config[CONF_TRIGGER_TIME]
|
||||
self._timer = None
|
||||
AxisDeviceEvent.__init__(self, event_config)
|
||||
self.axis_event = event_config[CONF_EVENT]
|
||||
self.device_name = event_config[CONF_NAME]
|
||||
self.location = event_config[ATTR_LOCATION]
|
||||
self.delay = event_config[CONF_TRIGGER_TIME]
|
||||
self.remove_timer = None
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Subscribe sensors events."""
|
||||
self.axis_event.callback = self._update_callback
|
||||
|
||||
def _update_callback(self):
|
||||
"""Update the sensor's state, if needed."""
|
||||
if self.remove_timer is not None:
|
||||
self.remove_timer()
|
||||
self.remove_timer = None
|
||||
|
||||
if self.delay == 0 or self.is_on:
|
||||
self.schedule_update_ha_state()
|
||||
else: # Run timer to delay updating the state
|
||||
@callback
|
||||
def _delay_update(now):
|
||||
"""Timer callback for sensor update."""
|
||||
_LOGGER.debug("%s called delayed (%s sec) update",
|
||||
self.name, self.delay)
|
||||
self.async_schedule_update_ha_state()
|
||||
self.remove_timer = None
|
||||
|
||||
self.remove_timer = async_track_point_in_utc_time(
|
||||
self.hass, _delay_update,
|
||||
utcnow() + timedelta(seconds=self.delay))
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if event is active."""
|
||||
return self._state
|
||||
return self.axis_event.is_tripped
|
||||
|
||||
def update(self):
|
||||
"""Get the latest data and update the state."""
|
||||
self._state = self.axis_event.is_tripped
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the event."""
|
||||
return '{}_{}_{}'.format(
|
||||
self.device_name, self.axis_event.event_type, self.axis_event.id)
|
||||
|
||||
def _update_callback(self):
|
||||
"""Update the sensor's state, if needed."""
|
||||
self.update()
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of the event."""
|
||||
return self.axis_event.event_class
|
||||
|
||||
if self._timer is not None:
|
||||
self._timer()
|
||||
self._timer = None
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
if self._delay > 0 and not self.is_on:
|
||||
# Set timer to wait until updating the state
|
||||
def _delay_update(now):
|
||||
"""Timer callback for sensor update."""
|
||||
_LOGGER.debug("%s called delayed (%s sec) update",
|
||||
self._name, self._delay)
|
||||
self.schedule_update_ha_state()
|
||||
self._timer = None
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the event."""
|
||||
attr = {}
|
||||
|
||||
self._timer = track_point_in_utc_time(
|
||||
self.hass, _delay_update,
|
||||
utcnow() + timedelta(seconds=self._delay))
|
||||
else:
|
||||
self.schedule_update_ha_state()
|
||||
attr[ATTR_LOCATION] = self.location
|
||||
|
||||
return attr
|
||||
|
|
|
@ -7,7 +7,7 @@ https://home-assistant.io/components/binary_sensor.deconz/
|
|||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.components.deconz.const import (
|
||||
ATTR_DARK, ATTR_ON, CONF_ALLOW_CLIP_SENSOR, DOMAIN as DATA_DECONZ,
|
||||
DATA_DECONZ_ID, DATA_DECONZ_UNSUB, DECONZ_DOMAIN)
|
||||
DECONZ_DOMAIN)
|
||||
from homeassistant.const import ATTR_BATTERY_LEVEL
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE
|
||||
|
@ -36,10 +36,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
|||
entities.append(DeconzBinarySensor(sensor))
|
||||
async_add_entities(entities, True)
|
||||
|
||||
hass.data[DATA_DECONZ_UNSUB].append(
|
||||
hass.data[DATA_DECONZ].listeners.append(
|
||||
async_dispatcher_connect(hass, 'deconz_new_sensor', async_add_sensor))
|
||||
|
||||
async_add_sensor(hass.data[DATA_DECONZ].sensors.values())
|
||||
async_add_sensor(hass.data[DATA_DECONZ].api.sensors.values())
|
||||
|
||||
|
||||
class DeconzBinarySensor(BinarySensorDevice):
|
||||
|
@ -52,7 +52,8 @@ class DeconzBinarySensor(BinarySensorDevice):
|
|||
async def async_added_to_hass(self):
|
||||
"""Subscribe sensors events."""
|
||||
self._sensor.register_async_callback(self.async_update_callback)
|
||||
self.hass.data[DATA_DECONZ_ID][self.entity_id] = self._sensor.deconz_id
|
||||
self.hass.data[DATA_DECONZ].deconz_ids[self.entity_id] = \
|
||||
self._sensor.deconz_id
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Disconnect sensor object when removed."""
|
||||
|
@ -127,7 +128,7 @@ class DeconzBinarySensor(BinarySensorDevice):
|
|||
self._sensor.uniqueid.count(':') != 7):
|
||||
return None
|
||||
serial = self._sensor.uniqueid.split('-', 1)[0]
|
||||
bridgeid = self.hass.data[DATA_DECONZ].config.bridgeid
|
||||
bridgeid = self.hass.data[DATA_DECONZ].api.config.bridgeid
|
||||
return {
|
||||
'connections': {(CONNECTION_ZIGBEE, serial)},
|
||||
'identifiers': {(DECONZ_DOMAIN, serial)},
|
||||
|
|
|
@ -131,7 +131,14 @@ class MqttBinarySensor(MqttAvailability, MqttDiscoveryUpdate,
|
|||
await MqttDiscoveryUpdate.async_added_to_hass(self)
|
||||
|
||||
@callback
|
||||
def state_message_received(topic, payload, qos):
|
||||
def off_delay_listener(now):
|
||||
"""Switch device off after a delay."""
|
||||
self._delay_listener = None
|
||||
self._state = False
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
@callback
|
||||
def state_message_received(_topic, payload, _qos):
|
||||
"""Handle a new received MQTT state message."""
|
||||
if self._template is not None:
|
||||
payload = self._template.async_render_with_possible_json_value(
|
||||
|
@ -146,17 +153,10 @@ class MqttBinarySensor(MqttAvailability, MqttDiscoveryUpdate,
|
|||
self._name, self._state_topic)
|
||||
return
|
||||
|
||||
if self._delay_listener is not None:
|
||||
self._delay_listener()
|
||||
|
||||
if (self._state and self._off_delay is not None):
|
||||
@callback
|
||||
def off_delay_listener(now):
|
||||
"""Switch device off after a delay."""
|
||||
self._delay_listener = None
|
||||
self._state = False
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
if self._delay_listener is not None:
|
||||
self._delay_listener()
|
||||
|
||||
self._delay_listener = evt.async_call_later(
|
||||
self.hass, self._off_delay, off_delay_listener)
|
||||
|
||||
|
|
|
@ -37,8 +37,10 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||
vol.Required(CONF_VARIABLE): cv.string,
|
||||
vol.Required(CONF_PAYLOAD): vol.Schema(dict),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON, default='on'): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_OFF, default='off'): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON, default='on'): vol.Any(
|
||||
cv.positive_int, cv.small_float, cv.string),
|
||||
vol.Optional(CONF_PAYLOAD_OFF, default='off'): vol.Any(
|
||||
cv.positive_int, cv.small_float, cv.string),
|
||||
vol.Optional(CONF_DISARM_AFTER_TRIGGER, default=False): cv.boolean,
|
||||
vol.Optional(CONF_RESET_DELAY_SEC, default=30): cv.positive_int
|
||||
})
|
||||
|
|
116
homeassistant/components/binary_sensor/sense.py
Normal file
116
homeassistant/components/binary_sensor/sense.py
Normal file
|
@ -0,0 +1,116 @@
|
|||
"""
|
||||
Support for monitoring a Sense energy sensor device.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.sense/
|
||||
"""
|
||||
import logging
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.components.sense import SENSE_DATA
|
||||
|
||||
DEPENDENCIES = ['sense']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BIN_SENSOR_CLASS = 'power'
|
||||
MDI_ICONS = {'ac': 'air-conditioner',
|
||||
'aquarium': 'fish',
|
||||
'car': 'car-electric',
|
||||
'computer': 'desktop-classic',
|
||||
'cup': 'coffee',
|
||||
'dehumidifier': 'water-off',
|
||||
'dishes': 'dishwasher',
|
||||
'drill': 'toolbox',
|
||||
'fan': 'fan',
|
||||
'freezer': 'fridge-top',
|
||||
'fridge': 'fridge-bottom',
|
||||
'game': 'gamepad-variant',
|
||||
'garage': 'garage',
|
||||
'grill': 'stove',
|
||||
'heat': 'fire',
|
||||
'heater': 'radiatior',
|
||||
'humidifier': 'water',
|
||||
'kettle': 'kettle',
|
||||
'leafblower': 'leaf',
|
||||
'lightbulb': 'lightbulb',
|
||||
'media_console': 'set-top-box',
|
||||
'modem': 'router-wireless',
|
||||
'outlet': 'power-socket-us',
|
||||
'papershredder': 'shredder',
|
||||
'printer': 'printer',
|
||||
'pump': 'water-pump',
|
||||
'settings': 'settings',
|
||||
'skillet': 'pot',
|
||||
'smartcamera': 'webcam',
|
||||
'socket': 'power-plug',
|
||||
'sound': 'speaker',
|
||||
'stove': 'stove',
|
||||
'trash': 'trash-can',
|
||||
'tv': 'television',
|
||||
'vacuum': 'robot-vacuum',
|
||||
'washer': 'washing-machine'}
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Set up the Sense sensor."""
|
||||
if discovery_info is None:
|
||||
return
|
||||
|
||||
data = hass.data[SENSE_DATA]
|
||||
|
||||
sense_devices = data.get_discovered_device_data()
|
||||
devices = [SenseDevice(data, device) for device in sense_devices]
|
||||
add_entities(devices)
|
||||
|
||||
|
||||
def sense_to_mdi(sense_icon):
|
||||
"""Convert sense icon to mdi icon."""
|
||||
return 'mdi:' + MDI_ICONS.get(sense_icon, 'power-plug')
|
||||
|
||||
|
||||
class SenseDevice(BinarySensorDevice):
|
||||
"""Implementation of a Sense energy device binary sensor."""
|
||||
|
||||
def __init__(self, data, device):
|
||||
"""Initialize the sensor."""
|
||||
self._name = device['name']
|
||||
self._id = device['id']
|
||||
self._icon = sense_to_mdi(device['icon'])
|
||||
self._data = data
|
||||
self._state = False
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the binary sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return the id of the binary sensor."""
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon of the binary sensor."""
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the device class of the binary sensor."""
|
||||
return BIN_SENSOR_CLASS
|
||||
|
||||
def update(self):
|
||||
"""Retrieve latest state."""
|
||||
from sense_energy.sense_api import SenseAPITimeoutException
|
||||
try:
|
||||
self._data.get_realtime()
|
||||
except SenseAPITimeoutException:
|
||||
_LOGGER.error("Timeout retrieving data")
|
||||
return
|
||||
self._state = self._name in self._data.active_devices
|
|
@ -15,7 +15,7 @@ from homeassistant.components.binary_sensor import (
|
|||
from homeassistant.const import (
|
||||
ATTR_FRIENDLY_NAME, ATTR_ENTITY_ID, CONF_VALUE_TEMPLATE,
|
||||
CONF_ICON_TEMPLATE, CONF_ENTITY_PICTURE_TEMPLATE,
|
||||
CONF_SENSORS, CONF_DEVICE_CLASS, EVENT_HOMEASSISTANT_START)
|
||||
CONF_SENSORS, CONF_DEVICE_CLASS, EVENT_HOMEASSISTANT_START, MATCH_ALL)
|
||||
from homeassistant.exceptions import TemplateError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
|
@ -55,22 +55,37 @@ async def async_setup_platform(hass, config, async_add_entities,
|
|||
icon_template = device_config.get(CONF_ICON_TEMPLATE)
|
||||
entity_picture_template = device_config.get(
|
||||
CONF_ENTITY_PICTURE_TEMPLATE)
|
||||
entity_ids = (device_config.get(ATTR_ENTITY_ID) or
|
||||
value_template.extract_entities())
|
||||
entity_ids = set()
|
||||
manual_entity_ids = device_config.get(ATTR_ENTITY_ID)
|
||||
|
||||
for template in (
|
||||
value_template,
|
||||
icon_template,
|
||||
entity_picture_template,
|
||||
):
|
||||
if template is None:
|
||||
continue
|
||||
template.hass = hass
|
||||
|
||||
if manual_entity_ids is not None:
|
||||
continue
|
||||
|
||||
template_entity_ids = template.extract_entities()
|
||||
if template_entity_ids == MATCH_ALL:
|
||||
entity_ids = MATCH_ALL
|
||||
elif entity_ids != MATCH_ALL:
|
||||
entity_ids |= set(template_entity_ids)
|
||||
|
||||
if manual_entity_ids is not None:
|
||||
entity_ids = manual_entity_ids
|
||||
elif entity_ids != MATCH_ALL:
|
||||
entity_ids = list(entity_ids)
|
||||
|
||||
friendly_name = device_config.get(ATTR_FRIENDLY_NAME, device)
|
||||
device_class = device_config.get(CONF_DEVICE_CLASS)
|
||||
delay_on = device_config.get(CONF_DELAY_ON)
|
||||
delay_off = device_config.get(CONF_DELAY_OFF)
|
||||
|
||||
if value_template is not None:
|
||||
value_template.hass = hass
|
||||
|
||||
if icon_template is not None:
|
||||
icon_template.hass = hass
|
||||
|
||||
if entity_picture_template is not None:
|
||||
entity_picture_template.hass = hass
|
||||
|
||||
sensors.append(
|
||||
BinarySensorTemplate(
|
||||
hass, device, friendly_name, device_class, value_template,
|
||||
|
|
|
@ -15,14 +15,14 @@ from homeassistant.components.binary_sensor import (
|
|||
BinarySensorDevice)
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, CONF_DEVICE_CLASS, CONF_ENTITY_ID,
|
||||
CONF_FRIENDLY_NAME, STATE_UNKNOWN)
|
||||
CONF_FRIENDLY_NAME, STATE_UNKNOWN, CONF_SENSORS)
|
||||
from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import generate_entity_id
|
||||
from homeassistant.helpers.event import async_track_state_change
|
||||
from homeassistant.util import utcnow
|
||||
|
||||
REQUIREMENTS = ['numpy==1.15.2']
|
||||
REQUIREMENTS = ['numpy==1.15.3']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -38,7 +38,6 @@ CONF_INVERT = 'invert'
|
|||
CONF_MAX_SAMPLES = 'max_samples'
|
||||
CONF_MIN_GRADIENT = 'min_gradient'
|
||||
CONF_SAMPLE_DURATION = 'sample_duration'
|
||||
CONF_SENSORS = 'sensors'
|
||||
|
||||
SENSOR_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
|
@ -78,9 +77,8 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
)
|
||||
if not sensors:
|
||||
_LOGGER.error("No sensors added")
|
||||
return False
|
||||
return
|
||||
add_entities(sensors)
|
||||
return True
|
||||
|
||||
|
||||
class SensorTrend(BinarySensorDevice):
|
||||
|
|
|
@ -357,6 +357,9 @@ class XiaomiVibration(XiaomiBinarySensor):
|
|||
def parse_data(self, data, raw_data):
|
||||
"""Parse data sent by gateway."""
|
||||
value = data.get(self._data_key)
|
||||
if value is None:
|
||||
return False
|
||||
|
||||
if value not in ('vibrate', 'tilt', 'free_fall'):
|
||||
_LOGGER.warning("Unsupported movement_type detected: %s",
|
||||
value)
|
||||
|
|
|
@ -13,7 +13,7 @@ _LOGGER = logging.getLogger(__name__)
|
|||
|
||||
DEPENDENCIES = ['zha']
|
||||
|
||||
# ZigBee Cluster Library Zone Type to Home Assistant device class
|
||||
# Zigbee Cluster Library Zone Type to Home Assistant device class
|
||||
CLASS_MAPPING = {
|
||||
0x000d: 'motion',
|
||||
0x0015: 'opening',
|
||||
|
@ -145,7 +145,7 @@ class Remote(zha.Entity, BinarySensorDevice):
|
|||
_domain = DOMAIN
|
||||
|
||||
class OnOffListener:
|
||||
"""Listener for the OnOff ZigBee cluster."""
|
||||
"""Listener for the OnOff Zigbee cluster."""
|
||||
|
||||
def __init__(self, entity):
|
||||
"""Initialize OnOffListener."""
|
||||
|
@ -170,7 +170,7 @@ class Remote(zha.Entity, BinarySensorDevice):
|
|||
pass
|
||||
|
||||
class LevelListener:
|
||||
"""Listener for the LevelControl ZigBee cluster."""
|
||||
"""Listener for the LevelControl Zigbee cluster."""
|
||||
|
||||
def __init__(self, entity):
|
||||
"""Initialize LevelListener."""
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
Contains functionality to use a ZigBee device as a binary sensor.
|
||||
Contains functionality to use a Zigbee device as a binary sensor.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.zigbee/
|
||||
|
@ -23,7 +23,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Set up the ZigBee binary sensor platform."""
|
||||
"""Set up the Zigbee binary sensor platform."""
|
||||
add_entities(
|
||||
[ZigBeeBinarySensor(hass, ZigBeeDigitalInConfig(config))], True)
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ class BloomSky:
|
|||
"""Handle all communication with the BloomSky API."""
|
||||
|
||||
# API documentation at http://weatherlution.com/bloomsky-api/
|
||||
API_URL = 'https://api.bloomsky.com/api/skydata'
|
||||
API_URL = 'http://api.bloomsky.com/api/skydata'
|
||||
|
||||
def __init__(self, api_key):
|
||||
"""Initialize the BookSky."""
|
||||
|
|
|
@ -299,7 +299,8 @@ class Camera(Entity):
|
|||
a direct stream from the camera.
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
await self.handle_async_still_stream(request, self.frame_interval)
|
||||
return await self.handle_async_still_stream(
|
||||
request, self.frame_interval)
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
|
|
|
@ -59,8 +59,7 @@ class AmcrestCam(Camera):
|
|||
"""Return an MJPEG stream."""
|
||||
# The snapshot implementation is handled by the parent class
|
||||
if self._stream_source == STREAM_SOURCE_LIST['snapshot']:
|
||||
await super().handle_async_mjpeg_stream(request)
|
||||
return
|
||||
return await super().handle_async_mjpeg_stream(request)
|
||||
|
||||
if self._stream_source == STREAM_SOURCE_LIST['mjpeg']:
|
||||
# stream an MJPEG image stream directly from the camera
|
||||
|
@ -69,20 +68,22 @@ class AmcrestCam(Camera):
|
|||
stream_coro = websession.get(
|
||||
streaming_url, auth=self._token, timeout=TIMEOUT)
|
||||
|
||||
await async_aiohttp_proxy_web(self.hass, request, stream_coro)
|
||||
return await async_aiohttp_proxy_web(
|
||||
self.hass, request, stream_coro)
|
||||
|
||||
else:
|
||||
# streaming via fmpeg
|
||||
from haffmpeg import CameraMjpeg
|
||||
# streaming via ffmpeg
|
||||
from haffmpeg import CameraMjpeg
|
||||
|
||||
streaming_url = self._camera.rtsp_url(typeno=self._resolution)
|
||||
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
|
||||
await stream.open_camera(
|
||||
streaming_url, extra_cmd=self._ffmpeg_arguments)
|
||||
streaming_url = self._camera.rtsp_url(typeno=self._resolution)
|
||||
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
|
||||
await stream.open_camera(
|
||||
streaming_url, extra_cmd=self._ffmpeg_arguments)
|
||||
|
||||
await async_aiohttp_proxy_stream(
|
||||
try:
|
||||
return await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
finally:
|
||||
await stream.close()
|
||||
|
||||
@property
|
||||
|
|
|
@ -101,10 +101,12 @@ class ArloCam(Camera):
|
|||
await stream.open_camera(
|
||||
video.video_url, extra_cmd=self._ffmpeg_arguments)
|
||||
|
||||
await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
await stream.close()
|
||||
try:
|
||||
return await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
finally:
|
||||
await stream.close()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
|
|
@ -98,10 +98,12 @@ class CanaryCamera(Camera):
|
|||
self._live_stream_session.live_stream_url,
|
||||
extra_cmd=self._ffmpeg_arguments)
|
||||
|
||||
await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
await stream.close()
|
||||
try:
|
||||
return await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
finally:
|
||||
await stream.close()
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_SESSION_RENEW)
|
||||
def renew_live_stream_session(self):
|
||||
|
|
|
@ -134,8 +134,7 @@ class MjpegCamera(Camera):
|
|||
"""Generate an HTTP MJPEG stream from the camera."""
|
||||
# aiohttp don't support DigestAuth -> Fallback
|
||||
if self._authentication == HTTP_DIGEST_AUTHENTICATION:
|
||||
await super().handle_async_mjpeg_stream(request)
|
||||
return
|
||||
return await super().handle_async_mjpeg_stream(request)
|
||||
|
||||
# connect to stream
|
||||
websession = async_get_clientsession(self.hass)
|
||||
|
|
|
@ -218,10 +218,12 @@ class ONVIFHassCamera(Camera):
|
|||
await stream.open_camera(
|
||||
self._input, extra_cmd=self._ffmpeg_arguments)
|
||||
|
||||
await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
await stream.close()
|
||||
try:
|
||||
return await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
finally:
|
||||
await stream.close()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
|
|
@ -139,10 +139,12 @@ class RingCam(Camera):
|
|||
await stream.open_camera(
|
||||
self._video_url, extra_cmd=self._ffmpeg_arguments)
|
||||
|
||||
await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
await stream.close()
|
||||
try:
|
||||
return await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
finally:
|
||||
await stream.close()
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
|
|
|
@ -92,7 +92,7 @@ class SynologyCamera(Camera):
|
|||
websession = async_get_clientsession(self.hass, self._verify_ssl)
|
||||
stream_coro = websession.get(streaming_url)
|
||||
|
||||
await async_aiohttp_proxy_web(self.hass, request, stream_coro)
|
||||
return await async_aiohttp_proxy_web(self.hass, request, stream_coro)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
|
|
@ -158,7 +158,9 @@ class XiaomiCamera(Camera):
|
|||
await stream.open_camera(
|
||||
self._last_url, extra_cmd=self._extra_arguments)
|
||||
|
||||
await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
await stream.close()
|
||||
try:
|
||||
return await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
finally:
|
||||
await stream.close()
|
||||
|
|
|
@ -144,7 +144,9 @@ class YiCamera(Camera):
|
|||
await stream.open_camera(
|
||||
self._last_url, extra_cmd=self._extra_arguments)
|
||||
|
||||
await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
await stream.close()
|
||||
try:
|
||||
return await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
finally:
|
||||
await stream.close()
|
||||
|
|
|
@ -174,8 +174,10 @@ class DaikinClimate(ClimateDevice):
|
|||
|
||||
daikin_attr = HA_ATTR_TO_DAIKIN.get(attr)
|
||||
if daikin_attr is not None:
|
||||
if value in self._list[attr]:
|
||||
if attr == ATTR_OPERATION_MODE:
|
||||
values[daikin_attr] = HA_STATE_TO_DAIKIN[value]
|
||||
elif value in self._list[attr]:
|
||||
values[daikin_attr] = value.lower()
|
||||
else:
|
||||
_LOGGER.error("Invalid value %s for %s", attr, value)
|
||||
|
||||
|
|
|
@ -232,11 +232,11 @@ class GenericThermostat(ClimateDevice):
|
|||
if operation_mode == STATE_HEAT:
|
||||
self._current_operation = STATE_HEAT
|
||||
self._enabled = True
|
||||
await self._async_control_heating()
|
||||
await self._async_control_heating(force=True)
|
||||
elif operation_mode == STATE_COOL:
|
||||
self._current_operation = STATE_COOL
|
||||
self._enabled = True
|
||||
await self._async_control_heating()
|
||||
await self._async_control_heating(force=True)
|
||||
elif operation_mode == STATE_OFF:
|
||||
self._current_operation = STATE_OFF
|
||||
self._enabled = False
|
||||
|
@ -262,7 +262,7 @@ class GenericThermostat(ClimateDevice):
|
|||
if temperature is None:
|
||||
return
|
||||
self._target_temp = temperature
|
||||
await self._async_control_heating()
|
||||
await self._async_control_heating(force=True)
|
||||
await self.async_update_ha_state()
|
||||
|
||||
@property
|
||||
|
@ -307,7 +307,7 @@ class GenericThermostat(ClimateDevice):
|
|||
except ValueError as ex:
|
||||
_LOGGER.error("Unable to update from sensor: %s", ex)
|
||||
|
||||
async def _async_control_heating(self, time=None):
|
||||
async def _async_control_heating(self, time=None, force=False):
|
||||
"""Check if we need to turn heating on or off."""
|
||||
async with self._temp_lock:
|
||||
if not self._active and None not in (self._cur_temp,
|
||||
|
@ -320,16 +320,21 @@ class GenericThermostat(ClimateDevice):
|
|||
if not self._active or not self._enabled:
|
||||
return
|
||||
|
||||
if self.min_cycle_duration:
|
||||
if self._is_device_active:
|
||||
current_state = STATE_ON
|
||||
else:
|
||||
current_state = STATE_OFF
|
||||
long_enough = condition.state(
|
||||
self.hass, self.heater_entity_id, current_state,
|
||||
self.min_cycle_duration)
|
||||
if not long_enough:
|
||||
return
|
||||
if not force and time is None:
|
||||
# If the `force` argument is True, we
|
||||
# ignore `min_cycle_duration`.
|
||||
# If the `time` argument is not none, we were invoked for
|
||||
# keep-alive purposes, and `min_cycle_duration` is irrelevant.
|
||||
if self.min_cycle_duration:
|
||||
if self._is_device_active:
|
||||
current_state = STATE_ON
|
||||
else:
|
||||
current_state = STATE_OFF
|
||||
long_enough = condition.state(
|
||||
self.hass, self.heater_entity_id, current_state,
|
||||
self.min_cycle_duration)
|
||||
if not long_enough:
|
||||
return
|
||||
|
||||
too_cold = \
|
||||
self._target_temp - self._cur_temp >= self._cold_tolerance
|
||||
|
@ -380,15 +385,19 @@ class GenericThermostat(ClimateDevice):
|
|||
|
||||
async def async_turn_away_mode_on(self):
|
||||
"""Turn away mode on by setting it on away hold indefinitely."""
|
||||
if self._is_away:
|
||||
return
|
||||
self._is_away = True
|
||||
self._saved_target_temp = self._target_temp
|
||||
self._target_temp = self._away_temp
|
||||
await self._async_control_heating()
|
||||
await self._async_control_heating(force=True)
|
||||
await self.async_update_ha_state()
|
||||
|
||||
async def async_turn_away_mode_off(self):
|
||||
"""Turn away off."""
|
||||
if not self._is_away:
|
||||
return
|
||||
self._is_away = False
|
||||
self._target_temp = self._saved_target_temp
|
||||
await self._async_control_heating()
|
||||
await self._async_control_heating(force=True)
|
||||
await self.async_update_ha_state()
|
||||
|
|
|
@ -34,10 +34,11 @@ FAN_MODES = [
|
|||
]
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
async def async_setup_platform(
|
||||
hass, config, async_add_entities, discovery_info=None):
|
||||
"""Iterate through and add all Melissa devices."""
|
||||
api = hass.data[DATA_MELISSA]
|
||||
devices = api.fetch_devices().values()
|
||||
devices = (await api.async_fetch_devices()).values()
|
||||
|
||||
all_devices = []
|
||||
|
||||
|
@ -46,7 +47,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
all_devices.append(MelissaClimate(
|
||||
api, device['serial_number'], device))
|
||||
|
||||
add_entities(all_devices)
|
||||
async_add_entities(all_devices)
|
||||
|
||||
|
||||
class MelissaClimate(ClimateDevice):
|
||||
|
@ -142,48 +143,48 @@ class MelissaClimate(ClimateDevice):
|
|||
"""Return the list of supported features."""
|
||||
return SUPPORT_FLAGS
|
||||
|
||||
def set_temperature(self, **kwargs):
|
||||
async def async_set_temperature(self, **kwargs):
|
||||
"""Set new target temperature."""
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
self.send({self._api.TEMP: temp})
|
||||
await self.async_send({self._api.TEMP: temp})
|
||||
|
||||
def set_fan_mode(self, fan_mode):
|
||||
async def async_set_fan_mode(self, fan_mode):
|
||||
"""Set fan mode."""
|
||||
melissa_fan_mode = self.hass_fan_to_melissa(fan_mode)
|
||||
self.send({self._api.FAN: melissa_fan_mode})
|
||||
await self.async_send({self._api.FAN: melissa_fan_mode})
|
||||
|
||||
def set_operation_mode(self, operation_mode):
|
||||
async def async_set_operation_mode(self, operation_mode):
|
||||
"""Set operation mode."""
|
||||
mode = self.hass_mode_to_melissa(operation_mode)
|
||||
self.send({self._api.MODE: mode})
|
||||
await self.async_send({self._api.MODE: mode})
|
||||
|
||||
def turn_on(self):
|
||||
async def async_turn_on(self):
|
||||
"""Turn on device."""
|
||||
self.send({self._api.STATE: self._api.STATE_ON})
|
||||
await self.async_send({self._api.STATE: self._api.STATE_ON})
|
||||
|
||||
def turn_off(self):
|
||||
async def async_turn_off(self):
|
||||
"""Turn off device."""
|
||||
self.send({self._api.STATE: self._api.STATE_OFF})
|
||||
await self.async_send({self._api.STATE: self._api.STATE_OFF})
|
||||
|
||||
def send(self, value):
|
||||
async def async_send(self, value):
|
||||
"""Send action to service."""
|
||||
try:
|
||||
old_value = self._cur_settings.copy()
|
||||
self._cur_settings.update(value)
|
||||
except AttributeError:
|
||||
old_value = None
|
||||
if not self._api.send(self._serial_number, self._cur_settings):
|
||||
if not await self._api.async_send(
|
||||
self._serial_number, self._cur_settings):
|
||||
self._cur_settings = old_value
|
||||
return False
|
||||
return True
|
||||
|
||||
def update(self):
|
||||
async def async_update(self):
|
||||
"""Get latest data from Melissa."""
|
||||
try:
|
||||
self._data = self._api.status(cached=True)[self._serial_number]
|
||||
self._cur_settings = self._api.cur_settings(
|
||||
self._data = (await self._api.async_status(cached=True))[
|
||||
self._serial_number]
|
||||
self._cur_settings = (await self._api.async_cur_settings(
|
||||
self._serial_number
|
||||
)['controller']['_relation']['command_log']
|
||||
))['controller']['_relation']['command_log']
|
||||
except KeyError:
|
||||
_LOGGER.warning(
|
||||
'Unable to update entity %s', self.entity_id)
|
||||
|
|
|
@ -8,29 +8,45 @@ https://home-assistant.io/components/climate.mill/
|
|||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ClimateDevice, PLATFORM_SCHEMA, SUPPORT_TARGET_TEMPERATURE,
|
||||
SUPPORT_FAN_MODE, SUPPORT_ON_OFF)
|
||||
ClimateDevice, DOMAIN, PLATFORM_SCHEMA, STATE_HEAT,
|
||||
SUPPORT_TARGET_TEMPERATURE, SUPPORT_FAN_MODE,
|
||||
SUPPORT_ON_OFF, SUPPORT_OPERATION_MODE)
|
||||
from homeassistant.const import (
|
||||
ATTR_TEMPERATURE, CONF_PASSWORD, CONF_USERNAME,
|
||||
STATE_ON, STATE_OFF, TEMP_CELSIUS)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
REQUIREMENTS = ['millheater==0.1.2']
|
||||
REQUIREMENTS = ['millheater==0.2.2']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_AWAY_TEMP = 'away_temp'
|
||||
ATTR_COMFORT_TEMP = 'comfort_temp'
|
||||
ATTR_ROOM_NAME = 'room_name'
|
||||
ATTR_SLEEP_TEMP = 'sleep_temp'
|
||||
MAX_TEMP = 35
|
||||
MIN_TEMP = 5
|
||||
SERVICE_SET_ROOM_TEMP = 'mill_set_room_temperature'
|
||||
|
||||
SUPPORT_FLAGS = (SUPPORT_TARGET_TEMPERATURE |
|
||||
SUPPORT_FAN_MODE | SUPPORT_ON_OFF)
|
||||
SUPPORT_FAN_MODE | SUPPORT_ON_OFF |
|
||||
SUPPORT_OPERATION_MODE)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
})
|
||||
|
||||
SET_ROOM_TEMP_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_ROOM_NAME): cv.string,
|
||||
vol.Optional(ATTR_AWAY_TEMP): cv.positive_int,
|
||||
vol.Optional(ATTR_COMFORT_TEMP): cv.positive_int,
|
||||
vol.Optional(ATTR_SLEEP_TEMP): cv.positive_int,
|
||||
})
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, async_add_entities,
|
||||
discovery_info=None):
|
||||
|
@ -43,13 +59,27 @@ async def async_setup_platform(hass, config, async_add_entities,
|
|||
_LOGGER.error("Failed to connect to Mill")
|
||||
return
|
||||
|
||||
await mill_data_connection.update_heaters()
|
||||
await mill_data_connection.find_all_heaters()
|
||||
|
||||
dev = []
|
||||
for heater in mill_data_connection.heaters.values():
|
||||
dev.append(MillHeater(heater, mill_data_connection))
|
||||
async_add_entities(dev)
|
||||
|
||||
async def set_room_temp(service):
|
||||
"""Set room temp."""
|
||||
room_name = service.data.get(ATTR_ROOM_NAME)
|
||||
sleep_temp = service.data.get(ATTR_SLEEP_TEMP)
|
||||
comfort_temp = service.data.get(ATTR_COMFORT_TEMP)
|
||||
away_temp = service.data.get(ATTR_AWAY_TEMP)
|
||||
await mill_data_connection.set_room_temperatures_by_name(room_name,
|
||||
sleep_temp,
|
||||
comfort_temp,
|
||||
away_temp)
|
||||
|
||||
hass.services.async_register(DOMAIN, SERVICE_SET_ROOM_TEMP,
|
||||
set_room_temp, schema=SET_ROOM_TEMP_SCHEMA)
|
||||
|
||||
|
||||
class MillHeater(ClimateDevice):
|
||||
"""Representation of a Mill Thermostat device."""
|
||||
|
@ -79,6 +109,20 @@ class MillHeater(ClimateDevice):
|
|||
"""Return the name of the entity."""
|
||||
return self._heater.name
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
if self._heater.room:
|
||||
room = self._heater.room.name
|
||||
else:
|
||||
room = "Independent device"
|
||||
return {
|
||||
"room": room,
|
||||
"open_window": self._heater.open_window,
|
||||
"heating": self._heater.is_heating,
|
||||
"controlled_by_tibber": self._heater.tibber_control,
|
||||
}
|
||||
|
||||
@property
|
||||
def temperature_unit(self):
|
||||
"""Return the unit of measurement which this thermostat uses."""
|
||||
|
@ -124,6 +168,16 @@ class MillHeater(ClimateDevice):
|
|||
"""Return the maximum temperature."""
|
||||
return MAX_TEMP
|
||||
|
||||
@property
|
||||
def current_operation(self):
|
||||
"""Return current operation."""
|
||||
return STATE_HEAT if self.is_on else STATE_OFF
|
||||
|
||||
@property
|
||||
def operation_list(self):
|
||||
"""List of available operation modes."""
|
||||
return [STATE_HEAT, STATE_OFF]
|
||||
|
||||
async def async_set_temperature(self, **kwargs):
|
||||
"""Set new target temperature."""
|
||||
temperature = kwargs.get(ATTR_TEMPERATURE)
|
||||
|
@ -151,3 +205,12 @@ class MillHeater(ClimateDevice):
|
|||
async def async_update(self):
|
||||
"""Retrieve latest state."""
|
||||
self._heater = await self._conn.update_device(self._heater.device_id)
|
||||
|
||||
async def async_set_operation_mode(self, operation_mode):
|
||||
"""Set operation mode."""
|
||||
if operation_mode == STATE_HEAT:
|
||||
await self.async_turn_on()
|
||||
elif operation_mode == STATE_OFF:
|
||||
await self.async_turn_off()
|
||||
else:
|
||||
_LOGGER.error("Unrecognized operation mode: %s", operation_mode)
|
||||
|
|
|
@ -116,6 +116,22 @@ ecobee_resume_program:
|
|||
description: Resume all events and return to the scheduled program. This default to false which removes only the top event.
|
||||
example: true
|
||||
|
||||
mill_set_room_temperature:
|
||||
description: Set Mill room temperatures.
|
||||
fields:
|
||||
room_name:
|
||||
description: Name of room to change.
|
||||
example: 'kitchen'
|
||||
away_temp:
|
||||
description: Away temp.
|
||||
example: 12
|
||||
comfort_temp:
|
||||
description: Comfort temp.
|
||||
example: 22
|
||||
sleep_temp:
|
||||
description: Sleep temp.
|
||||
example: 17
|
||||
|
||||
nuheat_resume_program:
|
||||
description: Resume the programmed schedule.
|
||||
fields:
|
||||
|
|
|
@ -8,9 +8,12 @@ import logging
|
|||
|
||||
from homeassistant.util import convert
|
||||
from homeassistant.components.climate import (
|
||||
ClimateDevice, ENTITY_ID_FORMAT, SUPPORT_TARGET_TEMPERATURE,
|
||||
ClimateDevice, STATE_AUTO, STATE_COOL,
|
||||
STATE_HEAT, ENTITY_ID_FORMAT, SUPPORT_TARGET_TEMPERATURE,
|
||||
SUPPORT_OPERATION_MODE, SUPPORT_FAN_MODE)
|
||||
from homeassistant.const import (
|
||||
STATE_ON,
|
||||
STATE_OFF,
|
||||
TEMP_FAHRENHEIT,
|
||||
TEMP_CELSIUS,
|
||||
ATTR_TEMPERATURE)
|
||||
|
@ -22,8 +25,8 @@ DEPENDENCIES = ['vera']
|
|||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
OPERATION_LIST = ['Heat', 'Cool', 'Auto Changeover', 'Off']
|
||||
FAN_OPERATION_LIST = ['On', 'Auto', 'Cycle']
|
||||
OPERATION_LIST = [STATE_HEAT, STATE_COOL, STATE_AUTO, STATE_OFF]
|
||||
FAN_OPERATION_LIST = [STATE_ON, STATE_AUTO]
|
||||
|
||||
SUPPORT_FLAGS = (SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE |
|
||||
SUPPORT_FAN_MODE)
|
||||
|
@ -54,13 +57,13 @@ class VeraThermostat(VeraDevice, ClimateDevice):
|
|||
"""Return current operation ie. heat, cool, idle."""
|
||||
mode = self.vera_device.get_hvac_mode()
|
||||
if mode == 'HeatOn':
|
||||
return OPERATION_LIST[0] # heat
|
||||
return OPERATION_LIST[0] # Heat
|
||||
if mode == 'CoolOn':
|
||||
return OPERATION_LIST[1] # cool
|
||||
return OPERATION_LIST[1] # Cool
|
||||
if mode == 'AutoChangeOver':
|
||||
return OPERATION_LIST[2] # auto
|
||||
return OPERATION_LIST[2] # Auto
|
||||
if mode == 'Off':
|
||||
return OPERATION_LIST[3] # off
|
||||
return OPERATION_LIST[3] # Off
|
||||
return 'Off'
|
||||
|
||||
@property
|
||||
|
@ -76,8 +79,6 @@ class VeraThermostat(VeraDevice, ClimateDevice):
|
|||
return FAN_OPERATION_LIST[0] # on
|
||||
if mode == "Auto":
|
||||
return FAN_OPERATION_LIST[1] # auto
|
||||
if mode == "PeriodicOn":
|
||||
return FAN_OPERATION_LIST[2] # cycle
|
||||
return "Auto"
|
||||
|
||||
@property
|
||||
|
@ -89,10 +90,8 @@ class VeraThermostat(VeraDevice, ClimateDevice):
|
|||
"""Set new target temperature."""
|
||||
if fan_mode == FAN_OPERATION_LIST[0]:
|
||||
self.vera_device.fan_on()
|
||||
elif fan_mode == FAN_OPERATION_LIST[1]:
|
||||
else:
|
||||
self.vera_device.fan_auto()
|
||||
elif fan_mode == FAN_OPERATION_LIST[2]:
|
||||
return self.vera_device.fan_cycle()
|
||||
|
||||
@property
|
||||
def current_power_w(self):
|
||||
|
|
|
@ -122,7 +122,7 @@ class Cloud:
|
|||
self.hass = hass
|
||||
self.mode = mode
|
||||
self.alexa_config = alexa
|
||||
self._google_actions = google_actions
|
||||
self.google_actions_user_conf = google_actions
|
||||
self._gactions_config = None
|
||||
self._prefs = None
|
||||
self.id_token = None
|
||||
|
@ -180,7 +180,7 @@ class Cloud:
|
|||
def gactions_config(self):
|
||||
"""Return the Google Assistant config."""
|
||||
if self._gactions_config is None:
|
||||
conf = self._google_actions
|
||||
conf = self.google_actions_user_conf
|
||||
|
||||
def should_expose(entity):
|
||||
"""If an entity should be exposed."""
|
||||
|
|
|
@ -144,7 +144,7 @@ def _authenticate(cloud, email, password):
|
|||
cognito.authenticate(password=password)
|
||||
return cognito
|
||||
|
||||
except ForceChangePasswordException as err:
|
||||
except ForceChangePasswordException:
|
||||
raise PasswordChangeRequired
|
||||
|
||||
except ClientError as err:
|
||||
|
|
|
@ -11,6 +11,8 @@ from homeassistant.components.http import HomeAssistantView
|
|||
from homeassistant.components.http.data_validator import (
|
||||
RequestDataValidator)
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.alexa import smart_home as alexa_sh
|
||||
from homeassistant.components.google_assistant import smart_home as google_sh
|
||||
|
||||
from . import auth_api
|
||||
from .const import DOMAIN, REQUEST_TIMEOUT
|
||||
|
@ -307,5 +309,9 @@ def _account_data(cloud):
|
|||
'email': claims['email'],
|
||||
'cloud': cloud.iot.state,
|
||||
'google_enabled': cloud.google_enabled,
|
||||
'google_entities': cloud.google_actions_user_conf['filter'].config,
|
||||
'google_domains': list(google_sh.DOMAIN_TO_GOOGLE_TYPES),
|
||||
'alexa_enabled': cloud.alexa_enabled,
|
||||
'alexa_entities': cloud.alexa_config.should_expose.config,
|
||||
'alexa_domains': list(alexa_sh.ENTITY_ADAPTERS),
|
||||
}
|
||||
|
|
|
@ -227,11 +227,9 @@ def async_handle_message(hass, cloud, handler_name, payload):
|
|||
@asyncio.coroutine
|
||||
def async_handle_alexa(hass, cloud, payload):
|
||||
"""Handle an incoming IoT message for Alexa."""
|
||||
if not cloud.alexa_enabled:
|
||||
return alexa.turned_off_response(payload)
|
||||
|
||||
result = yield from alexa.async_handle_message(
|
||||
hass, cloud.alexa_config, payload)
|
||||
hass, cloud.alexa_config, payload,
|
||||
enabled=cloud.alexa_enabled)
|
||||
return result
|
||||
|
||||
|
||||
|
|
|
@ -5,8 +5,7 @@ For more details about this platform, please refer to the documentation at
|
|||
https://home-assistant.io/components/cover.deconz/
|
||||
"""
|
||||
from homeassistant.components.deconz.const import (
|
||||
COVER_TYPES, DAMPERS, DOMAIN as DATA_DECONZ, DATA_DECONZ_ID,
|
||||
DATA_DECONZ_UNSUB, DECONZ_DOMAIN, WINDOW_COVERS)
|
||||
COVER_TYPES, DAMPERS, DOMAIN as DATA_DECONZ, DECONZ_DOMAIN, WINDOW_COVERS)
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION, CoverDevice, SUPPORT_CLOSE, SUPPORT_OPEN, SUPPORT_STOP,
|
||||
SUPPORT_SET_POSITION)
|
||||
|
@ -42,10 +41,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
|
|||
entities.append(DeconzCover(light))
|
||||
async_add_entities(entities, True)
|
||||
|
||||
hass.data[DATA_DECONZ_UNSUB].append(
|
||||
hass.data[DATA_DECONZ].listeners.append(
|
||||
async_dispatcher_connect(hass, 'deconz_new_light', async_add_cover))
|
||||
|
||||
async_add_cover(hass.data[DATA_DECONZ].lights.values())
|
||||
async_add_cover(hass.data[DATA_DECONZ].api.lights.values())
|
||||
|
||||
|
||||
class DeconzCover(CoverDevice):
|
||||
|
@ -62,7 +61,8 @@ class DeconzCover(CoverDevice):
|
|||
async def async_added_to_hass(self):
|
||||
"""Subscribe to covers events."""
|
||||
self._cover.register_async_callback(self.async_update_callback)
|
||||
self.hass.data[DATA_DECONZ_ID][self.entity_id] = self._cover.deconz_id
|
||||
self.hass.data[DATA_DECONZ].deconz_ids[self.entity_id] = \
|
||||
self._cover.deconz_id
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Disconnect cover object when removed."""
|
||||
|
@ -103,7 +103,6 @@ class DeconzCover(CoverDevice):
|
|||
return 'damper'
|
||||
if self._cover.type in WINDOW_COVERS:
|
||||
return 'window'
|
||||
return None
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
|
@ -151,7 +150,7 @@ class DeconzCover(CoverDevice):
|
|||
self._cover.uniqueid.count(':') != 7):
|
||||
return None
|
||||
serial = self._cover.uniqueid.split('-', 1)[0]
|
||||
bridgeid = self.hass.data[DATA_DECONZ].config.bridgeid
|
||||
bridgeid = self.hass.data[DATA_DECONZ].api.config.bridgeid
|
||||
return {
|
||||
'connections': {(CONNECTION_ZIGBEE, serial)},
|
||||
'identifiers': {(DECONZ_DOMAIN, serial)},
|
||||
|
|
|
@ -106,7 +106,7 @@ class GaradgetCover(CoverDevice):
|
|||
self._state = STATE_OFFLINE
|
||||
self._available = False
|
||||
self._name = DEFAULT_NAME
|
||||
except KeyError as ex:
|
||||
except KeyError:
|
||||
_LOGGER.warning("Garadget device %(device)s seems to be offline",
|
||||
dict(device=self.device_id))
|
||||
self._name = DEFAULT_NAME
|
||||
|
@ -235,7 +235,7 @@ class GaradgetCover(CoverDevice):
|
|||
_LOGGER.error(
|
||||
"Unable to connect to server: %(reason)s", dict(reason=ex))
|
||||
self._state = STATE_OFFLINE
|
||||
except KeyError as ex:
|
||||
except KeyError:
|
||||
_LOGGER.warning("Garadget device %(device)s seems to be offline",
|
||||
dict(device=self.device_id))
|
||||
self._state = STATE_OFFLINE
|
||||
|
|
|
@ -34,9 +34,11 @@ _LOGGER = logging.getLogger(__name__)
|
|||
|
||||
DEPENDENCIES = ['mqtt']
|
||||
|
||||
CONF_GET_POSITION_TOPIC = 'position_topic'
|
||||
|
||||
CONF_TILT_COMMAND_TOPIC = 'tilt_command_topic'
|
||||
CONF_TILT_STATUS_TOPIC = 'tilt_status_topic'
|
||||
CONF_POSITION_TOPIC = 'set_position_topic'
|
||||
CONF_SET_POSITION_TOPIC = 'set_position_topic'
|
||||
CONF_SET_POSITION_TEMPLATE = 'set_position_template'
|
||||
|
||||
CONF_PAYLOAD_OPEN = 'payload_open'
|
||||
|
@ -44,6 +46,8 @@ CONF_PAYLOAD_CLOSE = 'payload_close'
|
|||
CONF_PAYLOAD_STOP = 'payload_stop'
|
||||
CONF_STATE_OPEN = 'state_open'
|
||||
CONF_STATE_CLOSED = 'state_closed'
|
||||
CONF_POSITION_OPEN = 'position_open'
|
||||
CONF_POSITION_CLOSED = 'position_closed'
|
||||
CONF_TILT_CLOSED_POSITION = 'tilt_closed_value'
|
||||
CONF_TILT_OPEN_POSITION = 'tilt_opened_value'
|
||||
CONF_TILT_MIN = 'tilt_min'
|
||||
|
@ -52,10 +56,15 @@ CONF_TILT_STATE_OPTIMISTIC = 'tilt_optimistic'
|
|||
CONF_TILT_INVERT_STATE = 'tilt_invert_state'
|
||||
CONF_UNIQUE_ID = 'unique_id'
|
||||
|
||||
TILT_PAYLOAD = "tilt"
|
||||
COVER_PAYLOAD = "cover"
|
||||
|
||||
DEFAULT_NAME = 'MQTT Cover'
|
||||
DEFAULT_PAYLOAD_OPEN = 'OPEN'
|
||||
DEFAULT_PAYLOAD_CLOSE = 'CLOSE'
|
||||
DEFAULT_PAYLOAD_STOP = 'STOP'
|
||||
DEFAULT_POSITION_OPEN = 100
|
||||
DEFAULT_POSITION_CLOSED = 0
|
||||
DEFAULT_OPTIMISTIC = False
|
||||
DEFAULT_RETAIN = False
|
||||
DEFAULT_TILT_CLOSED_POSITION = 0
|
||||
|
@ -69,11 +78,25 @@ OPEN_CLOSE_FEATURES = (SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP)
|
|||
TILT_FEATURES = (SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT | SUPPORT_STOP_TILT |
|
||||
SUPPORT_SET_TILT_POSITION)
|
||||
|
||||
PLATFORM_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend({
|
||||
|
||||
def validate_options(value):
|
||||
"""Validate options.
|
||||
|
||||
If set postion topic is set then get position topic is set as well.
|
||||
"""
|
||||
if (CONF_SET_POSITION_TOPIC in value and
|
||||
CONF_GET_POSITION_TOPIC not in value):
|
||||
raise vol.Invalid(
|
||||
"Set position topic must be set together with get position topic.")
|
||||
return value
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_POSITION_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_SET_POSITION_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_SET_POSITION_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
vol.Optional(CONF_GET_POSITION_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
|
@ -82,6 +105,10 @@ PLATFORM_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend({
|
|||
vol.Optional(CONF_PAYLOAD_STOP, default=DEFAULT_PAYLOAD_STOP): cv.string,
|
||||
vol.Optional(CONF_STATE_OPEN, default=STATE_OPEN): cv.string,
|
||||
vol.Optional(CONF_STATE_CLOSED, default=STATE_CLOSED): cv.string,
|
||||
vol.Optional(CONF_POSITION_OPEN,
|
||||
default=DEFAULT_POSITION_OPEN): int,
|
||||
vol.Optional(CONF_POSITION_CLOSED,
|
||||
default=DEFAULT_POSITION_CLOSED): int,
|
||||
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_TILT_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(CONF_TILT_STATUS_TOPIC): valid_subscribe_topic,
|
||||
|
@ -97,7 +124,7 @@ PLATFORM_SCHEMA = mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend({
|
|||
default=DEFAULT_TILT_INVERT_STATE): cv.boolean,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
|
||||
}).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
|
||||
}).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema), validate_options)
|
||||
|
||||
|
||||
async def async_setup_platform(hass: HomeAssistantType, config: ConfigType,
|
||||
|
@ -132,6 +159,7 @@ async def _async_setup_entity(hass, config, async_add_entities,
|
|||
async_add_entities([MqttCover(
|
||||
config.get(CONF_NAME),
|
||||
config.get(CONF_STATE_TOPIC),
|
||||
config.get(CONF_GET_POSITION_TOPIC),
|
||||
config.get(CONF_COMMAND_TOPIC),
|
||||
config.get(CONF_AVAILABILITY_TOPIC),
|
||||
config.get(CONF_TILT_COMMAND_TOPIC),
|
||||
|
@ -140,6 +168,8 @@ async def _async_setup_entity(hass, config, async_add_entities,
|
|||
config.get(CONF_RETAIN),
|
||||
config.get(CONF_STATE_OPEN),
|
||||
config.get(CONF_STATE_CLOSED),
|
||||
config.get(CONF_POSITION_OPEN),
|
||||
config.get(CONF_POSITION_CLOSED),
|
||||
config.get(CONF_PAYLOAD_OPEN),
|
||||
config.get(CONF_PAYLOAD_CLOSE),
|
||||
config.get(CONF_PAYLOAD_STOP),
|
||||
|
@ -153,7 +183,7 @@ async def _async_setup_entity(hass, config, async_add_entities,
|
|||
config.get(CONF_TILT_MAX),
|
||||
config.get(CONF_TILT_STATE_OPTIMISTIC),
|
||||
config.get(CONF_TILT_INVERT_STATE),
|
||||
config.get(CONF_POSITION_TOPIC),
|
||||
config.get(CONF_SET_POSITION_TOPIC),
|
||||
set_position_template,
|
||||
config.get(CONF_UNIQUE_ID),
|
||||
config.get(CONF_DEVICE),
|
||||
|
@ -165,15 +195,16 @@ class MqttCover(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
|
|||
CoverDevice):
|
||||
"""Representation of a cover that can be controlled using MQTT."""
|
||||
|
||||
def __init__(self, name, state_topic, command_topic, availability_topic,
|
||||
def __init__(self, name, state_topic, get_position_topic,
|
||||
command_topic, availability_topic,
|
||||
tilt_command_topic, tilt_status_topic, qos, retain,
|
||||
state_open, state_closed, payload_open, payload_close,
|
||||
payload_stop, payload_available, payload_not_available,
|
||||
optimistic, value_template, tilt_open_position,
|
||||
tilt_closed_position, tilt_min, tilt_max, tilt_optimistic,
|
||||
tilt_invert, position_topic, set_position_template,
|
||||
unique_id: Optional[str], device_config: Optional[ConfigType],
|
||||
discovery_hash):
|
||||
state_open, state_closed, position_open, position_closed,
|
||||
payload_open, payload_close, payload_stop, payload_available,
|
||||
payload_not_available, optimistic, value_template,
|
||||
tilt_open_position, tilt_closed_position, tilt_min, tilt_max,
|
||||
tilt_optimistic, tilt_invert, set_position_topic,
|
||||
set_position_template, unique_id: Optional[str],
|
||||
device_config: Optional[ConfigType], discovery_hash):
|
||||
"""Initialize the cover."""
|
||||
MqttAvailability.__init__(self, availability_topic, qos,
|
||||
payload_available, payload_not_available)
|
||||
|
@ -183,6 +214,7 @@ class MqttCover(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
|
|||
self._state = None
|
||||
self._name = name
|
||||
self._state_topic = state_topic
|
||||
self._get_position_topic = get_position_topic
|
||||
self._command_topic = command_topic
|
||||
self._tilt_command_topic = tilt_command_topic
|
||||
self._tilt_status_topic = tilt_status_topic
|
||||
|
@ -192,17 +224,20 @@ class MqttCover(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
|
|||
self._payload_stop = payload_stop
|
||||
self._state_open = state_open
|
||||
self._state_closed = state_closed
|
||||
self._position_open = position_open
|
||||
self._position_closed = position_closed
|
||||
self._retain = retain
|
||||
self._tilt_open_position = tilt_open_position
|
||||
self._tilt_closed_position = tilt_closed_position
|
||||
self._optimistic = optimistic or state_topic is None
|
||||
self._optimistic = (optimistic or (state_topic is None and
|
||||
get_position_topic is None))
|
||||
self._template = value_template
|
||||
self._tilt_value = None
|
||||
self._tilt_min = tilt_min
|
||||
self._tilt_max = tilt_max
|
||||
self._tilt_optimistic = tilt_optimistic
|
||||
self._tilt_invert = tilt_invert
|
||||
self._position_topic = position_topic
|
||||
self._set_position_topic = set_position_topic
|
||||
self._set_position_template = set_position_template
|
||||
self._unique_id = unique_id
|
||||
self._discovery_hash = discovery_hash
|
||||
|
@ -233,27 +268,43 @@ class MqttCover(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
|
|||
self._state = False
|
||||
elif payload == self._state_closed:
|
||||
self._state = True
|
||||
elif payload.isnumeric() and 0 <= int(payload) <= 100:
|
||||
if int(payload) > 0:
|
||||
self._state = False
|
||||
else:
|
||||
self._state = True
|
||||
self._position = int(payload)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Payload is not True, False, or integer (0-100): %s",
|
||||
payload)
|
||||
_LOGGER.warning("Payload is not True or False: %s", payload)
|
||||
return
|
||||
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
if self._state_topic is None:
|
||||
# Force into optimistic mode.
|
||||
self._optimistic = True
|
||||
else:
|
||||
@callback
|
||||
def position_message_received(topic, payload, qos):
|
||||
"""Handle new MQTT state messages."""
|
||||
if self._template is not None:
|
||||
payload = self._template.async_render_with_possible_json_value(
|
||||
payload)
|
||||
if payload.isnumeric():
|
||||
if 0 <= int(payload) <= 100:
|
||||
percentage_payload = int(payload)
|
||||
else:
|
||||
percentage_payload = self.find_percentage_in_range(
|
||||
float(payload), COVER_PAYLOAD)
|
||||
if 0 <= percentage_payload <= 100:
|
||||
self._position = percentage_payload
|
||||
self._state = self._position == 0
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Payload is not integer within range: %s",
|
||||
payload)
|
||||
return
|
||||
self.async_schedule_update_ha_state()
|
||||
if self._get_position_topic:
|
||||
await mqtt.async_subscribe(
|
||||
self.hass, self._get_position_topic,
|
||||
position_message_received, self._qos)
|
||||
elif self._state_topic:
|
||||
await mqtt.async_subscribe(
|
||||
self.hass, self._state_topic,
|
||||
state_message_received, self._qos)
|
||||
else:
|
||||
# Force into optimistic mode.
|
||||
self._optimistic = True
|
||||
|
||||
if self._tilt_status_topic is None:
|
||||
self._tilt_optimistic = True
|
||||
|
@ -303,7 +354,7 @@ class MqttCover(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
|
|||
if self._command_topic is not None:
|
||||
supported_features = OPEN_CLOSE_FEATURES
|
||||
|
||||
if self._position_topic is not None:
|
||||
if self._set_position_topic is not None:
|
||||
supported_features |= SUPPORT_SET_POSITION
|
||||
|
||||
if self._tilt_command_topic is not None:
|
||||
|
@ -322,6 +373,8 @@ class MqttCover(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
|
|||
if self._optimistic:
|
||||
# Optimistically assume that cover has changed state.
|
||||
self._state = False
|
||||
if self._get_position_topic:
|
||||
self._position = self._position_open
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
async def async_close_cover(self, **kwargs):
|
||||
|
@ -335,6 +388,8 @@ class MqttCover(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
|
|||
if self._optimistic:
|
||||
# Optimistically assume that cover has changed state.
|
||||
self._state = True
|
||||
if self._get_position_topic:
|
||||
self._position = self._position_closed
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
async def async_stop_cover(self, **kwargs):
|
||||
|
@ -381,6 +436,7 @@ class MqttCover(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
|
|||
"""Move the cover to a specific position."""
|
||||
if ATTR_POSITION in kwargs:
|
||||
position = kwargs[ATTR_POSITION]
|
||||
percentage_position = position
|
||||
if self._set_position_template is not None:
|
||||
try:
|
||||
position = self._set_position_template.async_render(
|
||||
|
@ -388,23 +444,36 @@ class MqttCover(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
|
|||
except TemplateError as ex:
|
||||
_LOGGER.error(ex)
|
||||
self._state = None
|
||||
elif self._position_open != 100 and self._position_closed != 0:
|
||||
position = self.find_in_range_from_percent(
|
||||
position, COVER_PAYLOAD)
|
||||
|
||||
mqtt.async_publish(self.hass, self._position_topic,
|
||||
mqtt.async_publish(self.hass, self._set_position_topic,
|
||||
position, self._qos, self._retain)
|
||||
if self._optimistic:
|
||||
self._state = percentage_position == 0
|
||||
self._position = percentage_position
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
def find_percentage_in_range(self, position):
|
||||
def find_percentage_in_range(self, position, range_type=TILT_PAYLOAD):
|
||||
"""Find the 0-100% value within the specified range."""
|
||||
# the range of motion as defined by the min max values
|
||||
tilt_range = self._tilt_max - self._tilt_min
|
||||
if range_type == COVER_PAYLOAD:
|
||||
max_range = self._position_open
|
||||
min_range = self._position_closed
|
||||
else:
|
||||
max_range = self._tilt_max
|
||||
min_range = self._tilt_min
|
||||
current_range = max_range - min_range
|
||||
# offset to be zero based
|
||||
offset_position = position - self._tilt_min
|
||||
# the percentage value within the range
|
||||
position_percentage = float(offset_position) / tilt_range * 100.0
|
||||
if self._tilt_invert:
|
||||
offset_position = position - min_range
|
||||
position_percentage = round(
|
||||
float(offset_position) / current_range * 100.0)
|
||||
if range_type == TILT_PAYLOAD and self._tilt_invert:
|
||||
return 100 - position_percentage
|
||||
return position_percentage
|
||||
|
||||
def find_in_range_from_percent(self, percentage):
|
||||
def find_in_range_from_percent(self, percentage, range_type=TILT_PAYLOAD):
|
||||
"""
|
||||
Find the adjusted value for 0-100% within the specified range.
|
||||
|
||||
|
@ -413,14 +482,19 @@ class MqttCover(MqttAvailability, MqttDiscoveryUpdate, MqttEntityDeviceInfo,
|
|||
by offsetting the max and min, getting the percentage value and
|
||||
returning the offset
|
||||
"""
|
||||
offset = self._tilt_min
|
||||
tilt_range = self._tilt_max - self._tilt_min
|
||||
|
||||
position = round(tilt_range * (percentage / 100.0))
|
||||
if range_type == COVER_PAYLOAD:
|
||||
max_range = self._position_open
|
||||
min_range = self._position_closed
|
||||
else:
|
||||
max_range = self._tilt_max
|
||||
min_range = self._tilt_min
|
||||
offset = min_range
|
||||
current_range = max_range - min_range
|
||||
position = round(current_range * (percentage / 100.0))
|
||||
position += offset
|
||||
|
||||
if self._tilt_invert:
|
||||
position = self._tilt_max - position + offset
|
||||
if range_type == TILT_PAYLOAD and self._tilt_invert:
|
||||
position = max_range - position + offset
|
||||
return position
|
||||
|
||||
@property
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
"allow_clip_sensor": "Permitir a importa\u00e7\u00e3o de sensores virtuais",
|
||||
"allow_deconz_groups": "Permitir a importa\u00e7\u00e3o de grupos deCONZ"
|
||||
},
|
||||
"title": "Op\u00e7\u00f5es extra de configura\u00e7\u00e3o para deCONZ"
|
||||
"title": "Op\u00e7\u00f5es de configura\u00e7\u00e3o extra para deCONZ"
|
||||
}
|
||||
},
|
||||
"title": "Gateway Zigbee deCONZ"
|
||||
|
|
|
@ -8,21 +8,15 @@ import voluptuous as vol
|
|||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY, CONF_EVENT, CONF_HOST,
|
||||
CONF_ID, CONF_PORT, EVENT_HOMEASSISTANT_STOP)
|
||||
from homeassistant.core import EventOrigin, callback
|
||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||
CONF_API_KEY, CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect, async_dispatcher_send)
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.json import load_json
|
||||
|
||||
# Loading the config flow file will register the flow
|
||||
from .config_flow import configured_hosts
|
||||
from .const import (
|
||||
CONF_ALLOW_CLIP_SENSOR, CONFIG_FILE, DATA_DECONZ_EVENT,
|
||||
DATA_DECONZ_ID, DATA_DECONZ_UNSUB, DOMAIN, _LOGGER)
|
||||
from .const import CONFIG_FILE, DOMAIN, _LOGGER
|
||||
from .gateway import DeconzGateway
|
||||
|
||||
REQUIREMENTS = ['pydeconz==47']
|
||||
|
||||
|
@ -43,11 +37,11 @@ SERVICE_FIELD = 'field'
|
|||
SERVICE_ENTITY = 'entity'
|
||||
SERVICE_DATA = 'data'
|
||||
|
||||
SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Exclusive(SERVICE_FIELD, 'deconz_id'): cv.string,
|
||||
vol.Exclusive(SERVICE_ENTITY, 'deconz_id'): cv.entity_id,
|
||||
SERVICE_SCHEMA = vol.All(vol.Schema({
|
||||
vol.Optional(SERVICE_ENTITY): cv.entity_id,
|
||||
vol.Optional(SERVICE_FIELD): cv.matches_regex('/.*'),
|
||||
vol.Required(SERVICE_DATA): dict,
|
||||
})
|
||||
}), cv.has_at_least_one_key(SERVICE_ENTITY, SERVICE_FIELD))
|
||||
|
||||
SERVICE_DEVICE_REFRESH = 'device_refresh'
|
||||
|
||||
|
@ -80,68 +74,34 @@ async def async_setup_entry(hass, config_entry):
|
|||
Load config, group, light and sensor data for server information.
|
||||
Start websocket for push notification of state changes from deCONZ.
|
||||
"""
|
||||
from pydeconz import DeconzSession
|
||||
if DOMAIN in hass.data:
|
||||
_LOGGER.error(
|
||||
"Config entry failed since one deCONZ instance already exists")
|
||||
return False
|
||||
|
||||
@callback
|
||||
def async_add_device_callback(device_type, device):
|
||||
"""Handle event of new device creation in deCONZ."""
|
||||
if not isinstance(device, list):
|
||||
device = [device]
|
||||
async_dispatcher_send(
|
||||
hass, 'deconz_new_{}'.format(device_type), device)
|
||||
gateway = DeconzGateway(hass, config_entry)
|
||||
|
||||
session = aiohttp_client.async_get_clientsession(hass)
|
||||
deconz = DeconzSession(hass.loop, session, **config_entry.data,
|
||||
async_add_device=async_add_device_callback)
|
||||
result = await deconz.async_load_parameters()
|
||||
hass.data[DOMAIN] = gateway
|
||||
|
||||
if result is False:
|
||||
if not await gateway.async_setup():
|
||||
return False
|
||||
|
||||
hass.data[DOMAIN] = deconz
|
||||
hass.data[DATA_DECONZ_ID] = {}
|
||||
hass.data[DATA_DECONZ_EVENT] = []
|
||||
hass.data[DATA_DECONZ_UNSUB] = []
|
||||
|
||||
for component in SUPPORTED_PLATFORMS:
|
||||
hass.async_create_task(hass.config_entries.async_forward_entry_setup(
|
||||
config_entry, component))
|
||||
|
||||
@callback
|
||||
def async_add_remote(sensors):
|
||||
"""Set up remote from deCONZ."""
|
||||
from pydeconz.sensor import SWITCH as DECONZ_REMOTE
|
||||
allow_clip_sensor = config_entry.data.get(CONF_ALLOW_CLIP_SENSOR, True)
|
||||
for sensor in sensors:
|
||||
if sensor.type in DECONZ_REMOTE and \
|
||||
not (not allow_clip_sensor and sensor.type.startswith('CLIP')):
|
||||
hass.data[DATA_DECONZ_EVENT].append(DeconzEvent(hass, sensor))
|
||||
hass.data[DATA_DECONZ_UNSUB].append(
|
||||
async_dispatcher_connect(hass, 'deconz_new_sensor', async_add_remote))
|
||||
|
||||
async_add_remote(deconz.sensors.values())
|
||||
|
||||
deconz.start()
|
||||
|
||||
device_registry = await \
|
||||
hass.helpers.device_registry.async_get_registry()
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={(CONNECTION_NETWORK_MAC, deconz.config.mac)},
|
||||
identifiers={(DOMAIN, deconz.config.bridgeid)},
|
||||
manufacturer='Dresden Elektronik', model=deconz.config.modelid,
|
||||
name=deconz.config.name, sw_version=deconz.config.swversion)
|
||||
connections={(CONNECTION_NETWORK_MAC, gateway.api.config.mac)},
|
||||
identifiers={(DOMAIN, gateway.api.config.bridgeid)},
|
||||
manufacturer='Dresden Elektronik', model=gateway.api.config.modelid,
|
||||
name=gateway.api.config.name, sw_version=gateway.api.config.swversion)
|
||||
|
||||
async def async_configure(call):
|
||||
"""Set attribute of device in deCONZ.
|
||||
|
||||
Field is a string representing a specific device in deCONZ
|
||||
e.g. field='/lights/1/state'.
|
||||
Entity_id can be used to retrieve the proper field.
|
||||
Entity is used to resolve to a device path (e.g. '/lights/1').
|
||||
Field is a string representing either a full path
|
||||
(e.g. '/lights/1/state') when entity is not specified, or a
|
||||
subpath (e.g. '/state') when used together with entity.
|
||||
Data is a json object with what data you want to alter
|
||||
e.g. data={'on': true}.
|
||||
{
|
||||
|
@ -151,128 +111,69 @@ async def async_setup_entry(hass, config_entry):
|
|||
See Dresden Elektroniks REST API documentation for details:
|
||||
http://dresden-elektronik.github.io/deconz-rest-doc/rest/
|
||||
"""
|
||||
field = call.data.get(SERVICE_FIELD)
|
||||
field = call.data.get(SERVICE_FIELD, '')
|
||||
entity_id = call.data.get(SERVICE_ENTITY)
|
||||
data = call.data.get(SERVICE_DATA)
|
||||
deconz = hass.data[DOMAIN]
|
||||
gateway = hass.data[DOMAIN]
|
||||
|
||||
if entity_id:
|
||||
|
||||
entities = hass.data.get(DATA_DECONZ_ID)
|
||||
|
||||
if entities:
|
||||
field = entities.get(entity_id)
|
||||
|
||||
if field is None:
|
||||
try:
|
||||
field = gateway.deconz_ids[entity_id] + field
|
||||
except KeyError:
|
||||
_LOGGER.error('Could not find the entity %s', entity_id)
|
||||
return
|
||||
|
||||
await deconz.async_put_state(field, data)
|
||||
await gateway.api.async_put_state(field, data)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_DECONZ, async_configure, schema=SERVICE_SCHEMA)
|
||||
|
||||
async def async_refresh_devices(call):
|
||||
"""Refresh available devices from deCONZ."""
|
||||
deconz = hass.data[DOMAIN]
|
||||
gateway = hass.data[DOMAIN]
|
||||
|
||||
groups = list(deconz.groups.keys())
|
||||
lights = list(deconz.lights.keys())
|
||||
scenes = list(deconz.scenes.keys())
|
||||
sensors = list(deconz.sensors.keys())
|
||||
groups = set(gateway.api.groups.keys())
|
||||
lights = set(gateway.api.lights.keys())
|
||||
scenes = set(gateway.api.scenes.keys())
|
||||
sensors = set(gateway.api.sensors.keys())
|
||||
|
||||
if not await deconz.async_load_parameters():
|
||||
if not await gateway.api.async_load_parameters():
|
||||
return
|
||||
|
||||
async_add_device_callback(
|
||||
gateway.async_add_device_callback(
|
||||
'group', [group
|
||||
for group_id, group in deconz.groups.items()
|
||||
for group_id, group in gateway.api.groups.items()
|
||||
if group_id not in groups]
|
||||
)
|
||||
|
||||
async_add_device_callback(
|
||||
gateway.async_add_device_callback(
|
||||
'light', [light
|
||||
for light_id, light in deconz.lights.items()
|
||||
for light_id, light in gateway.api.lights.items()
|
||||
if light_id not in lights]
|
||||
)
|
||||
|
||||
async_add_device_callback(
|
||||
gateway.async_add_device_callback(
|
||||
'scene', [scene
|
||||
for scene_id, scene in deconz.scenes.items()
|
||||
for scene_id, scene in gateway.api.scenes.items()
|
||||
if scene_id not in scenes]
|
||||
)
|
||||
|
||||
async_add_device_callback(
|
||||
gateway.async_add_device_callback(
|
||||
'sensor', [sensor
|
||||
for sensor_id, sensor in deconz.sensors.items()
|
||||
for sensor_id, sensor in gateway.api.sensors.items()
|
||||
if sensor_id not in sensors]
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_DEVICE_REFRESH, async_refresh_devices)
|
||||
|
||||
@callback
|
||||
def deconz_shutdown(event):
|
||||
"""
|
||||
Wrap the call to deconz.close.
|
||||
|
||||
Used as an argument to EventBus.async_listen_once - EventBus calls
|
||||
this method with the event as the first argument, which should not
|
||||
be passed on to deconz.close.
|
||||
"""
|
||||
deconz.close()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, deconz_shutdown)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, gateway.shutdown)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass, config_entry):
|
||||
"""Unload deCONZ config entry."""
|
||||
deconz = hass.data.pop(DOMAIN)
|
||||
gateway = hass.data.pop(DOMAIN)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_DECONZ)
|
||||
deconz.close()
|
||||
|
||||
for component in SUPPORTED_PLATFORMS:
|
||||
await hass.config_entries.async_forward_entry_unload(
|
||||
config_entry, component)
|
||||
|
||||
dispatchers = hass.data[DATA_DECONZ_UNSUB]
|
||||
for unsub_dispatcher in dispatchers:
|
||||
unsub_dispatcher()
|
||||
hass.data[DATA_DECONZ_UNSUB] = []
|
||||
|
||||
for event in hass.data[DATA_DECONZ_EVENT]:
|
||||
event.async_will_remove_from_hass()
|
||||
hass.data[DATA_DECONZ_EVENT].remove(event)
|
||||
|
||||
hass.data[DATA_DECONZ_ID] = []
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class DeconzEvent:
|
||||
"""When you want signals instead of entities.
|
||||
|
||||
Stateless sensors such as remotes are expected to generate an event
|
||||
instead of a sensor entity in hass.
|
||||
"""
|
||||
|
||||
def __init__(self, hass, device):
|
||||
"""Register callback that will be used for signals."""
|
||||
self._hass = hass
|
||||
self._device = device
|
||||
self._device.register_async_callback(self.async_update_callback)
|
||||
self._event = 'deconz_{}'.format(CONF_EVENT)
|
||||
self._id = slugify(self._device.name)
|
||||
|
||||
@callback
|
||||
def async_will_remove_from_hass(self) -> None:
|
||||
"""Disconnect event object when removed."""
|
||||
self._device.remove_callback(self.async_update_callback)
|
||||
self._device = None
|
||||
|
||||
@callback
|
||||
def async_update_callback(self, reason):
|
||||
"""Fire the event if reason is that state is updated."""
|
||||
if reason['state']:
|
||||
data = {CONF_ID: self._id, CONF_EVENT: self._device.state}
|
||||
self._hass.bus.async_fire(self._event, data, EventOrigin.remote)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_DEVICE_REFRESH)
|
||||
return await gateway.async_reset()
|
||||
|
|
|
@ -35,10 +35,6 @@ class DeconzFlowHandler(config_entries.ConfigFlow):
|
|||
self.deconz_config = {}
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle a flow initialized by the user."""
|
||||
return await self.async_step_init(user_input)
|
||||
|
||||
async def async_step_init(self, user_input=None):
|
||||
"""Handle a deCONZ config flow start.
|
||||
|
||||
Only allows one instance to be set up.
|
||||
|
@ -67,7 +63,7 @@ class DeconzFlowHandler(config_entries.ConfigFlow):
|
|||
for bridge in self.bridges:
|
||||
hosts.append(bridge[CONF_HOST])
|
||||
return self.async_show_form(
|
||||
step_id='init',
|
||||
step_id='user',
|
||||
data_schema=vol.Schema({
|
||||
vol.Required(CONF_HOST): vol.In(hosts)
|
||||
})
|
||||
|
|
|
@ -13,6 +13,9 @@ DECONZ_DOMAIN = 'deconz'
|
|||
CONF_ALLOW_CLIP_SENSOR = 'allow_clip_sensor'
|
||||
CONF_ALLOW_DECONZ_GROUPS = 'allow_deconz_groups'
|
||||
|
||||
SUPPORTED_PLATFORMS = ['binary_sensor', 'cover',
|
||||
'light', 'scene', 'sensor', 'switch']
|
||||
|
||||
ATTR_DARK = 'dark'
|
||||
ATTR_ON = 'on'
|
||||
|
||||
|
|
165
homeassistant/components/deconz/gateway.py
Normal file
165
homeassistant/components/deconz/gateway.py
Normal file
|
@ -0,0 +1,165 @@
|
|||
"""Representation of a deCONZ gateway."""
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_EVENT, CONF_ID
|
||||
from homeassistant.core import EventOrigin, callback
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect, async_dispatcher_send)
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import (
|
||||
_LOGGER, CONF_ALLOW_CLIP_SENSOR, SUPPORTED_PLATFORMS)
|
||||
|
||||
|
||||
class DeconzGateway:
|
||||
"""Manages a single deCONZ gateway."""
|
||||
|
||||
def __init__(self, hass, config_entry):
|
||||
"""Initialize the system."""
|
||||
self.hass = hass
|
||||
self.config_entry = config_entry
|
||||
self.api = None
|
||||
self._cancel_retry_setup = None
|
||||
|
||||
self.deconz_ids = {}
|
||||
self.events = []
|
||||
self.listeners = []
|
||||
|
||||
async def async_setup(self, tries=0):
|
||||
"""Set up a deCONZ gateway."""
|
||||
hass = self.hass
|
||||
|
||||
self.api = await get_gateway(
|
||||
hass, self.config_entry.data, self.async_add_device_callback
|
||||
)
|
||||
|
||||
if self.api is False:
|
||||
retry_delay = 2 ** (tries + 1)
|
||||
_LOGGER.error(
|
||||
"Error connecting to deCONZ gateway. Retrying in %d seconds",
|
||||
retry_delay)
|
||||
|
||||
async def retry_setup(_now):
|
||||
"""Retry setup."""
|
||||
if await self.async_setup(tries + 1):
|
||||
# This feels hacky, we should find a better way to do this
|
||||
self.config_entry.state = config_entries.ENTRY_STATE_LOADED
|
||||
|
||||
self._cancel_retry_setup = hass.helpers.event.async_call_later(
|
||||
retry_delay, retry_setup)
|
||||
|
||||
return False
|
||||
|
||||
for component in SUPPORTED_PLATFORMS:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(
|
||||
self.config_entry, component))
|
||||
|
||||
self.listeners.append(
|
||||
async_dispatcher_connect(
|
||||
hass, 'deconz_new_sensor', self.async_add_remote))
|
||||
|
||||
self.async_add_remote(self.api.sensors.values())
|
||||
|
||||
self.api.start()
|
||||
|
||||
return True
|
||||
|
||||
@callback
|
||||
def async_add_device_callback(self, device_type, device):
|
||||
"""Handle event of new device creation in deCONZ."""
|
||||
if not isinstance(device, list):
|
||||
device = [device]
|
||||
async_dispatcher_send(
|
||||
self.hass, 'deconz_new_{}'.format(device_type), device)
|
||||
|
||||
@callback
|
||||
def async_add_remote(self, sensors):
|
||||
"""Set up remote from deCONZ."""
|
||||
from pydeconz.sensor import SWITCH as DECONZ_REMOTE
|
||||
allow_clip_sensor = self.config_entry.data.get(
|
||||
CONF_ALLOW_CLIP_SENSOR, True)
|
||||
for sensor in sensors:
|
||||
if sensor.type in DECONZ_REMOTE and \
|
||||
not (not allow_clip_sensor and sensor.type.startswith('CLIP')):
|
||||
self.events.append(DeconzEvent(self.hass, sensor))
|
||||
|
||||
@callback
|
||||
def shutdown(self, event):
|
||||
"""Wrap the call to deconz.close.
|
||||
|
||||
Used as an argument to EventBus.async_listen_once.
|
||||
"""
|
||||
self.api.close()
|
||||
|
||||
async def async_reset(self):
|
||||
"""Reset this gateway to default state.
|
||||
|
||||
Will cancel any scheduled setup retry and will unload
|
||||
the config entry.
|
||||
"""
|
||||
# If we have a retry scheduled, we were never setup.
|
||||
if self._cancel_retry_setup is not None:
|
||||
self._cancel_retry_setup()
|
||||
self._cancel_retry_setup = None
|
||||
return True
|
||||
|
||||
self.api.close()
|
||||
|
||||
for component in SUPPORTED_PLATFORMS:
|
||||
await self.hass.config_entries.async_forward_entry_unload(
|
||||
self.config_entry, component)
|
||||
|
||||
for unsub_dispatcher in self.listeners:
|
||||
unsub_dispatcher()
|
||||
self.listeners = []
|
||||
|
||||
for event in self.events:
|
||||
event.async_will_remove_from_hass()
|
||||
self.events.remove(event)
|
||||
|
||||
self.deconz_ids = {}
|
||||
return True
|
||||
|
||||
|
||||
async def get_gateway(hass, config, async_add_device_callback):
|
||||
"""Create a gateway object and verify configuration."""
|
||||
from pydeconz import DeconzSession
|
||||
|
||||
session = aiohttp_client.async_get_clientsession(hass)
|
||||
deconz = DeconzSession(hass.loop, session, **config,
|
||||
async_add_device=async_add_device_callback)
|
||||
result = await deconz.async_load_parameters()
|
||||
|
||||
if result:
|
||||
return deconz
|
||||
return result
|
||||
|
||||
|
||||
class DeconzEvent:
|
||||
"""When you want signals instead of entities.
|
||||
|
||||
Stateless sensors such as remotes are expected to generate an event
|
||||
instead of a sensor entity in hass.
|
||||
"""
|
||||
|
||||
def __init__(self, hass, device):
|
||||
"""Register callback that will be used for signals."""
|
||||
self._hass = hass
|
||||
self._device = device
|
||||
self._device.register_async_callback(self.async_update_callback)
|
||||
self._event = 'deconz_{}'.format(CONF_EVENT)
|
||||
self._id = slugify(self._device.name)
|
||||
|
||||
@callback
|
||||
def async_will_remove_from_hass(self) -> None:
|
||||
"""Disconnect event object when removed."""
|
||||
self._device.remove_callback(self.async_update_callback)
|
||||
self._device = None
|
||||
|
||||
@callback
|
||||
def async_update_callback(self, reason):
|
||||
"""Fire the event if reason is that state is updated."""
|
||||
if reason['state']:
|
||||
data = {CONF_ID: self._id, CONF_EVENT: self._device.state}
|
||||
self._hass.bus.async_fire(self._event, data, EventOrigin.remote)
|
|
@ -1,12 +1,15 @@
|
|||
configure:
|
||||
description: Set attribute of device in deCONZ. See https://home-assistant.io/components/deconz/#device-services for details.
|
||||
fields:
|
||||
field:
|
||||
description: Field is a string representing a specific device in deCONZ.
|
||||
example: '/lights/1/state'
|
||||
entity:
|
||||
description: Entity id representing a specific device in deCONZ.
|
||||
example: 'light.rgb_light'
|
||||
field:
|
||||
description: >-
|
||||
Field is a string representing a full path to deCONZ endpoint (when
|
||||
entity is not specified) or a subpath of the device path for the
|
||||
entity (when entity is specified).
|
||||
example: '"/lights/1/state" or "/state"'
|
||||
data:
|
||||
description: Data is a json object with what data you want to alter.
|
||||
example: '{"on": true}'
|
||||
|
|
|
@ -15,7 +15,7 @@ from homeassistant.components.light import (
|
|||
ATTR_PROFILE, ATTR_TRANSITION, DOMAIN as DOMAIN_LIGHT)
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_HOME,
|
||||
STATE_NOT_HOME)
|
||||
STATE_NOT_HOME, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET)
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_utc_time, async_track_state_change)
|
||||
from homeassistant.helpers.sun import is_up, get_astral_event_next
|
||||
|
@ -79,7 +79,7 @@ async def async_setup(hass, config):
|
|||
|
||||
Async friendly.
|
||||
"""
|
||||
next_setting = get_astral_event_next(hass, 'sunset')
|
||||
next_setting = get_astral_event_next(hass, SUN_EVENT_SUNSET)
|
||||
if not next_setting:
|
||||
return None
|
||||
return next_setting - LIGHT_TRANSITION_TIME * len(light_ids)
|
||||
|
@ -123,7 +123,8 @@ async def async_setup(hass, config):
|
|||
start_point + index * LIGHT_TRANSITION_TIME)
|
||||
|
||||
async_track_point_in_utc_time(hass, schedule_light_turn_on,
|
||||
get_astral_event_next(hass, 'sunrise'))
|
||||
get_astral_event_next(hass,
|
||||
SUN_EVENT_SUNRISE))
|
||||
|
||||
# If the sun is already above horizon schedule the time-based pre-sun set
|
||||
# event.
|
||||
|
@ -153,7 +154,8 @@ async def async_setup(hass, config):
|
|||
# Check this by seeing if current time is later then the point
|
||||
# in time when we would start putting the lights on.
|
||||
elif (start_point and
|
||||
start_point < now < get_astral_event_next(hass, 'sunset')):
|
||||
start_point < now < get_astral_event_next(hass,
|
||||
SUN_EVENT_SUNSET)):
|
||||
|
||||
# Check for every light if it would be on if someone was home
|
||||
# when the fading in started and turn it on if so
|
||||
|
|
|
@ -699,8 +699,8 @@ def async_setup_scanner_platform(hass: HomeAssistantType, config: ConfigType,
|
|||
seen.add(mac)
|
||||
|
||||
try:
|
||||
extra_attributes = (await
|
||||
scanner.async_get_extra_attributes(mac))
|
||||
extra_attributes = \
|
||||
await scanner.async_get_extra_attributes(mac)
|
||||
except NotImplementedError:
|
||||
extra_attributes = dict()
|
||||
|
||||
|
|
|
@ -5,10 +5,6 @@ For more details about this platform, please refer to the documentation at
|
|||
https://home-assistant.io/components/device_tracker.asuswrt/
|
||||
"""
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
import telnetlib
|
||||
from collections import namedtuple
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
|
@ -19,7 +15,7 @@ from homeassistant.const import (
|
|||
CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_PORT, CONF_MODE,
|
||||
CONF_PROTOCOL)
|
||||
|
||||
REQUIREMENTS = ['pexpect==4.6.0']
|
||||
REQUIREMENTS = ['aioasuswrt==1.1.2']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -44,345 +40,53 @@ PLATFORM_SCHEMA = vol.All(
|
|||
}))
|
||||
|
||||
|
||||
_LEASES_CMD = 'cat /var/lib/misc/dnsmasq.leases'
|
||||
_LEASES_REGEX = re.compile(
|
||||
r'\w+\s' +
|
||||
r'(?P<mac>(([0-9a-f]{2}[:-]){5}([0-9a-f]{2})))\s' +
|
||||
r'(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})\s' +
|
||||
r'(?P<host>([^\s]+))')
|
||||
|
||||
# Command to get both 5GHz and 2.4GHz clients
|
||||
_WL_CMD = 'for dev in `nvram get wl_ifnames`; do wl -i $dev assoclist; done'
|
||||
_WL_REGEX = re.compile(
|
||||
r'\w+\s' +
|
||||
r'(?P<mac>(([0-9A-F]{2}[:-]){5}([0-9A-F]{2})))')
|
||||
|
||||
_IP_NEIGH_CMD = 'ip neigh'
|
||||
_IP_NEIGH_REGEX = re.compile(
|
||||
r'(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3}|'
|
||||
r'([0-9a-fA-F]{1,4}:){1,7}[0-9a-fA-F]{0,4}(:[0-9a-fA-F]{1,4}){1,7})\s'
|
||||
r'\w+\s'
|
||||
r'\w+\s'
|
||||
r'(\w+\s(?P<mac>(([0-9a-f]{2}[:-]){5}([0-9a-f]{2}))))?\s'
|
||||
r'\s?(router)?'
|
||||
r'\s?(nud)?'
|
||||
r'(?P<status>(\w+))')
|
||||
|
||||
_ARP_CMD = 'arp -n'
|
||||
_ARP_REGEX = re.compile(
|
||||
r'.+\s' +
|
||||
r'\((?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})\)\s' +
|
||||
r'.+\s' +
|
||||
r'(?P<mac>(([0-9a-f]{2}[:-]){5}([0-9a-f]{2})))' +
|
||||
r'\s' +
|
||||
r'.*')
|
||||
|
||||
|
||||
def get_scanner(hass, config):
|
||||
async def async_get_scanner(hass, config):
|
||||
"""Validate the configuration and return an ASUS-WRT scanner."""
|
||||
scanner = AsusWrtDeviceScanner(config[DOMAIN])
|
||||
|
||||
await scanner.async_connect()
|
||||
return scanner if scanner.success_init else None
|
||||
|
||||
|
||||
def _parse_lines(lines, regex):
|
||||
"""Parse the lines using the given regular expression.
|
||||
|
||||
If a line can't be parsed it is logged and skipped in the output.
|
||||
"""
|
||||
results = []
|
||||
for line in lines:
|
||||
match = regex.search(line)
|
||||
if not match:
|
||||
_LOGGER.debug("Could not parse row: %s", line)
|
||||
continue
|
||||
results.append(match.groupdict())
|
||||
return results
|
||||
|
||||
|
||||
Device = namedtuple('Device', ['mac', 'ip', 'name'])
|
||||
|
||||
|
||||
class AsusWrtDeviceScanner(DeviceScanner):
|
||||
"""This class queries a router running ASUSWRT firmware."""
|
||||
|
||||
# Eighth attribute needed for mode (AP mode vs router mode)
|
||||
def __init__(self, config):
|
||||
"""Initialize the scanner."""
|
||||
self.host = config[CONF_HOST]
|
||||
self.username = config[CONF_USERNAME]
|
||||
self.password = config.get(CONF_PASSWORD, '')
|
||||
self.ssh_key = config.get('ssh_key', config.get('pub_key', ''))
|
||||
self.protocol = config[CONF_PROTOCOL]
|
||||
self.mode = config[CONF_MODE]
|
||||
self.port = config[CONF_PORT]
|
||||
self.require_ip = config[CONF_REQUIRE_IP]
|
||||
|
||||
if self.protocol == 'ssh':
|
||||
self.connection = SshConnection(
|
||||
self.host, self.port, self.username, self.password,
|
||||
self.ssh_key)
|
||||
else:
|
||||
self.connection = TelnetConnection(
|
||||
self.host, self.port, self.username, self.password)
|
||||
from aioasuswrt.asuswrt import AsusWrt
|
||||
|
||||
self.last_results = {}
|
||||
self.success_init = False
|
||||
self.connection = AsusWrt(config[CONF_HOST], config[CONF_PORT],
|
||||
config[CONF_PROTOCOL] == 'telnet',
|
||||
config[CONF_USERNAME],
|
||||
config.get(CONF_PASSWORD, ''),
|
||||
config.get('ssh_key',
|
||||
config.get('pub_key', '')),
|
||||
config[CONF_MODE], config[CONF_REQUIRE_IP])
|
||||
|
||||
async def async_connect(self):
|
||||
"""Initialize connection to the router."""
|
||||
# Test the router is accessible.
|
||||
data = self.get_asuswrt_data()
|
||||
data = await self.connection.async_get_connected_devices()
|
||||
self.success_init = data is not None
|
||||
|
||||
def scan_devices(self):
|
||||
async def async_scan_devices(self):
|
||||
"""Scan for new devices and return a list with found device IDs."""
|
||||
self._update_info()
|
||||
await self.async_update_info()
|
||||
return list(self.last_results.keys())
|
||||
|
||||
def get_device_name(self, device):
|
||||
async def async_get_device_name(self, device):
|
||||
"""Return the name of the given device or None if we don't know."""
|
||||
if device not in self.last_results:
|
||||
return None
|
||||
return self.last_results[device].name
|
||||
|
||||
def _update_info(self):
|
||||
async def async_update_info(self):
|
||||
"""Ensure the information from the ASUSWRT router is up to date.
|
||||
|
||||
Return boolean if scanning successful.
|
||||
"""
|
||||
if not self.success_init:
|
||||
return False
|
||||
|
||||
_LOGGER.info('Checking Devices')
|
||||
data = self.get_asuswrt_data()
|
||||
if not data:
|
||||
return False
|
||||
|
||||
self.last_results = data
|
||||
return True
|
||||
|
||||
def get_asuswrt_data(self):
|
||||
"""Retrieve data from ASUSWRT.
|
||||
|
||||
Calls various commands on the router and returns the superset of all
|
||||
responses. Some commands will not work on some routers.
|
||||
"""
|
||||
devices = {}
|
||||
devices.update(self._get_wl())
|
||||
devices.update(self._get_arp())
|
||||
devices.update(self._get_neigh(devices))
|
||||
if not self.mode == 'ap':
|
||||
devices.update(self._get_leases(devices))
|
||||
|
||||
ret_devices = {}
|
||||
for key in devices:
|
||||
if not self.require_ip or devices[key].ip is not None:
|
||||
ret_devices[key] = devices[key]
|
||||
return ret_devices
|
||||
|
||||
def _get_wl(self):
|
||||
lines = self.connection.run_command(_WL_CMD)
|
||||
if not lines:
|
||||
return {}
|
||||
result = _parse_lines(lines, _WL_REGEX)
|
||||
devices = {}
|
||||
for device in result:
|
||||
mac = device['mac'].upper()
|
||||
devices[mac] = Device(mac, None, None)
|
||||
return devices
|
||||
|
||||
def _get_leases(self, cur_devices):
|
||||
lines = self.connection.run_command(_LEASES_CMD)
|
||||
if not lines:
|
||||
return {}
|
||||
lines = [line for line in lines if not line.startswith('duid ')]
|
||||
result = _parse_lines(lines, _LEASES_REGEX)
|
||||
devices = {}
|
||||
for device in result:
|
||||
# For leases where the client doesn't set a hostname, ensure it
|
||||
# is blank and not '*', which breaks entity_id down the line.
|
||||
host = device['host']
|
||||
if host == '*':
|
||||
host = ''
|
||||
mac = device['mac'].upper()
|
||||
if mac in cur_devices:
|
||||
devices[mac] = Device(mac, device['ip'], host)
|
||||
return devices
|
||||
|
||||
def _get_neigh(self, cur_devices):
|
||||
lines = self.connection.run_command(_IP_NEIGH_CMD)
|
||||
if not lines:
|
||||
return {}
|
||||
result = _parse_lines(lines, _IP_NEIGH_REGEX)
|
||||
devices = {}
|
||||
for device in result:
|
||||
status = device['status']
|
||||
if status is None or status.upper() != 'REACHABLE':
|
||||
continue
|
||||
if device['mac'] is not None:
|
||||
mac = device['mac'].upper()
|
||||
old_device = cur_devices.get(mac)
|
||||
old_ip = old_device.ip if old_device else None
|
||||
devices[mac] = Device(mac, device.get('ip', old_ip), None)
|
||||
return devices
|
||||
|
||||
def _get_arp(self):
|
||||
lines = self.connection.run_command(_ARP_CMD)
|
||||
if not lines:
|
||||
return {}
|
||||
result = _parse_lines(lines, _ARP_REGEX)
|
||||
devices = {}
|
||||
for device in result:
|
||||
if device['mac'] is not None:
|
||||
mac = device['mac'].upper()
|
||||
devices[mac] = Device(mac, device['ip'], None)
|
||||
return devices
|
||||
|
||||
|
||||
class _Connection:
|
||||
def __init__(self):
|
||||
self._connected = False
|
||||
|
||||
@property
|
||||
def connected(self):
|
||||
"""Return connection state."""
|
||||
return self._connected
|
||||
|
||||
def connect(self):
|
||||
"""Mark current connection state as connected."""
|
||||
self._connected = True
|
||||
|
||||
def disconnect(self):
|
||||
"""Mark current connection state as disconnected."""
|
||||
self._connected = False
|
||||
|
||||
|
||||
class SshConnection(_Connection):
|
||||
"""Maintains an SSH connection to an ASUS-WRT router."""
|
||||
|
||||
def __init__(self, host, port, username, password, ssh_key):
|
||||
"""Initialize the SSH connection properties."""
|
||||
super().__init__()
|
||||
|
||||
self._ssh = None
|
||||
self._host = host
|
||||
self._port = port
|
||||
self._username = username
|
||||
self._password = password
|
||||
self._ssh_key = ssh_key
|
||||
|
||||
def run_command(self, command):
|
||||
"""Run commands through an SSH connection.
|
||||
|
||||
Connect to the SSH server if not currently connected, otherwise
|
||||
use the existing connection.
|
||||
"""
|
||||
from pexpect import pxssh, exceptions
|
||||
|
||||
try:
|
||||
if not self.connected:
|
||||
self.connect()
|
||||
self._ssh.sendline(command)
|
||||
self._ssh.prompt()
|
||||
lines = self._ssh.before.split(b'\n')[1:-1]
|
||||
return [line.decode('utf-8') for line in lines]
|
||||
except exceptions.EOF as err:
|
||||
_LOGGER.error("Connection refused. %s", self._ssh.before)
|
||||
self.disconnect()
|
||||
return None
|
||||
except pxssh.ExceptionPxssh as err:
|
||||
_LOGGER.error("Unexpected SSH error: %s", err)
|
||||
self.disconnect()
|
||||
return None
|
||||
except AssertionError as err:
|
||||
_LOGGER.error("Connection to router unavailable: %s", err)
|
||||
self.disconnect()
|
||||
return None
|
||||
|
||||
def connect(self):
|
||||
"""Connect to the ASUS-WRT SSH server."""
|
||||
from pexpect import pxssh
|
||||
|
||||
self._ssh = pxssh.pxssh()
|
||||
if self._ssh_key:
|
||||
self._ssh.login(self._host, self._username, quiet=False,
|
||||
ssh_key=self._ssh_key, port=self._port)
|
||||
else:
|
||||
self._ssh.login(self._host, self._username, quiet=False,
|
||||
password=self._password, port=self._port)
|
||||
|
||||
super().connect()
|
||||
|
||||
def disconnect(self):
|
||||
"""Disconnect the current SSH connection."""
|
||||
try:
|
||||
self._ssh.logout()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
finally:
|
||||
self._ssh = None
|
||||
|
||||
super().disconnect()
|
||||
|
||||
|
||||
class TelnetConnection(_Connection):
|
||||
"""Maintains a Telnet connection to an ASUS-WRT router."""
|
||||
|
||||
def __init__(self, host, port, username, password):
|
||||
"""Initialize the Telnet connection properties."""
|
||||
super().__init__()
|
||||
|
||||
self._telnet = None
|
||||
self._host = host
|
||||
self._port = port
|
||||
self._username = username
|
||||
self._password = password
|
||||
self._prompt_string = None
|
||||
|
||||
def run_command(self, command):
|
||||
"""Run a command through a Telnet connection.
|
||||
|
||||
Connect to the Telnet server if not currently connected, otherwise
|
||||
use the existing connection.
|
||||
"""
|
||||
try:
|
||||
if not self.connected:
|
||||
self.connect()
|
||||
self._telnet.write('{}\n'.format(command).encode('ascii'))
|
||||
data = (self._telnet.read_until(self._prompt_string).
|
||||
split(b'\n')[1:-1])
|
||||
return [line.decode('utf-8') for line in data]
|
||||
except EOFError:
|
||||
_LOGGER.error("Unexpected response from router")
|
||||
self.disconnect()
|
||||
return None
|
||||
except ConnectionRefusedError:
|
||||
_LOGGER.error("Connection refused by router. Telnet enabled?")
|
||||
self.disconnect()
|
||||
return None
|
||||
except socket.gaierror as exc:
|
||||
_LOGGER.error("Socket exception: %s", exc)
|
||||
self.disconnect()
|
||||
return None
|
||||
except OSError as exc:
|
||||
_LOGGER.error("OSError: %s", exc)
|
||||
self.disconnect()
|
||||
return None
|
||||
|
||||
def connect(self):
|
||||
"""Connect to the ASUS-WRT Telnet server."""
|
||||
self._telnet = telnetlib.Telnet(self._host)
|
||||
self._telnet.read_until(b'login: ')
|
||||
self._telnet.write((self._username + '\n').encode('ascii'))
|
||||
self._telnet.read_until(b'Password: ')
|
||||
self._telnet.write((self._password + '\n').encode('ascii'))
|
||||
self._prompt_string = self._telnet.read_until(b'#').split(b'\n')[-1]
|
||||
|
||||
super().connect()
|
||||
|
||||
def disconnect(self):
|
||||
"""Disconnect the current Telnet connection."""
|
||||
try:
|
||||
self._telnet.write('exit\n'.encode('ascii'))
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
super().disconnect()
|
||||
self.last_results = await self.connection.async_get_connected_devices()
|
||||
|
|
|
@ -44,7 +44,10 @@ def setup_scanner(hass, config, see, discovery_info=None):
|
|||
new_devices[address] = 1
|
||||
return
|
||||
|
||||
see(mac=BLE_PREFIX + address, host_name=name.strip("\x00"),
|
||||
if name is not None:
|
||||
name = name.strip("\x00")
|
||||
|
||||
see(mac=BLE_PREFIX + address, host_name=name,
|
||||
source_type=SOURCE_TYPE_BLUETOOTH_LE)
|
||||
|
||||
def discover_ble_devices():
|
||||
|
|
97
homeassistant/components/device_tracker/bt_smarthub.py
Normal file
97
homeassistant/components/device_tracker/bt_smarthub.py
Normal file
|
@ -0,0 +1,97 @@
|
|||
"""
|
||||
Support for BT Smart Hub (Sometimes referred to as BT Home Hub 6).
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/device_tracker.bt_smarthub/
|
||||
"""
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.device_tracker import (
|
||||
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
|
||||
from homeassistant.const import CONF_HOST
|
||||
|
||||
REQUIREMENTS = ['btsmarthub_devicelist==0.1.1']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_DEFAULT_IP = '192.168.1.254'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_HOST, default=CONF_DEFAULT_IP): cv.string,
|
||||
})
|
||||
|
||||
|
||||
def get_scanner(hass, config):
|
||||
"""Return a BT Smart Hub scanner if successful."""
|
||||
scanner = BTSmartHubScanner(config[DOMAIN])
|
||||
|
||||
return scanner if scanner.success_init else None
|
||||
|
||||
|
||||
class BTSmartHubScanner(DeviceScanner):
|
||||
"""This class queries a BT Smart Hub."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialise the scanner."""
|
||||
_LOGGER.debug("Initialising BT Smart Hub")
|
||||
self.host = config[CONF_HOST]
|
||||
self.last_results = {}
|
||||
self.success_init = False
|
||||
|
||||
# Test the router is accessible
|
||||
data = self.get_bt_smarthub_data()
|
||||
if data:
|
||||
self.success_init = True
|
||||
else:
|
||||
_LOGGER.info("Failed to connect to %s", self.host)
|
||||
|
||||
def scan_devices(self):
|
||||
"""Scan for new devices and return a list with found device IDs."""
|
||||
self._update_info()
|
||||
return [client['mac'] for client in self.last_results]
|
||||
|
||||
def get_device_name(self, device):
|
||||
"""Return the name of the given device or None if we don't know."""
|
||||
if not self.last_results:
|
||||
return None
|
||||
for client in self.last_results:
|
||||
if client['mac'] == device:
|
||||
return client['host']
|
||||
return None
|
||||
|
||||
def _update_info(self):
|
||||
"""Ensure the information from the BT Smart Hub is up to date."""
|
||||
if not self.success_init:
|
||||
return
|
||||
|
||||
_LOGGER.info("Scanning")
|
||||
data = self.get_bt_smarthub_data()
|
||||
if not data:
|
||||
_LOGGER.warning("Error scanning devices")
|
||||
return
|
||||
|
||||
clients = [client for client in data.values()]
|
||||
self.last_results = clients
|
||||
|
||||
def get_bt_smarthub_data(self):
|
||||
"""Retrieve data from BT Smart Hub and return parsed result."""
|
||||
import btsmarthub_devicelist
|
||||
# Request data from bt smarthub into a list of dicts.
|
||||
data = btsmarthub_devicelist.get_devicelist(
|
||||
router_ip=self.host, only_active_devices=True)
|
||||
# Renaming keys from parsed result.
|
||||
devices = {}
|
||||
for device in data:
|
||||
try:
|
||||
devices[device['UserHostName']] = {
|
||||
'ip': device['IPAddress'],
|
||||
'mac': device['PhysAddress'],
|
||||
'host': device['UserHostName'],
|
||||
'status': device['Active']
|
||||
}
|
||||
except KeyError:
|
||||
pass
|
||||
return devices
|
|
@ -22,7 +22,7 @@ from homeassistant.components.device_tracker import (
|
|||
from homeassistant.const import (
|
||||
CONF_HOST, CONF_PORT)
|
||||
|
||||
REQUIREMENTS = ['aiofreepybox==0.0.4']
|
||||
REQUIREMENTS = ['aiofreepybox==0.0.5']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ from homeassistant.helpers.event import track_time_interval
|
|||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import slugify, dt as dt_util
|
||||
|
||||
REQUIREMENTS = ['locationsharinglib==3.0.6']
|
||||
REQUIREMENTS = ['locationsharinglib==3.0.7']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ Device = namedtuple('Device', ['name', 'ip', 'mac', 'state'])
|
|||
class HuaweiDeviceScanner(DeviceScanner):
|
||||
"""This class queries a router running HUAWEI firmware."""
|
||||
|
||||
ARRAY_REGEX = re.compile(r'var UserDevinfo = new Array\((.*),null\);')
|
||||
ARRAY_REGEX = re.compile(r'var UserDevinfo = new Array\((.*)null\);')
|
||||
DEVICE_REGEX = re.compile(r'new USERDevice\((.*?)\),')
|
||||
DEVICE_ATTR_REGEX = re.compile(
|
||||
'"(?P<Domain>.*?)","(?P<IpAddr>.*?)",'
|
||||
|
|
|
@ -15,7 +15,7 @@ from homeassistant.const import (
|
|||
CONF_HOST, CONF_PORT, CONF_PASSWORD, CONF_USERNAME
|
||||
)
|
||||
|
||||
REQUIREMENTS = ['ndms2_client==0.0.4']
|
||||
REQUIREMENTS = ['ndms2_client==0.0.5']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ from homeassistant.const import (
|
|||
CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_PORT, CONF_SSL,
|
||||
CONF_DEVICES, CONF_EXCLUDE)
|
||||
|
||||
REQUIREMENTS = ['pynetgear==0.5.0']
|
||||
REQUIREMENTS = ['pynetgear==0.5.1']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -254,7 +254,7 @@ class Tplink3DeviceScanner(Tplink1DeviceScanner):
|
|||
self.sysauth = regex_result.group(1)
|
||||
_LOGGER.info(self.sysauth)
|
||||
return True
|
||||
except (ValueError, KeyError) as _:
|
||||
except (ValueError, KeyError):
|
||||
_LOGGER.error("Couldn't fetch auth tokens! Response was: %s",
|
||||
response.text)
|
||||
return False
|
||||
|
|
|
@ -13,7 +13,7 @@ from homeassistant.components.device_tracker import (
|
|||
from homeassistant.const import CONF_HOST, CONF_TOKEN
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['python-miio==0.4.2', 'construct==2.9.45']
|
||||
REQUIREMENTS = ['python-miio==0.4.3', 'construct==2.9.45']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
18
homeassistant/components/dialogflow/.translations/ca.json
Normal file
18
homeassistant/components/dialogflow/.translations/ca.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"not_internet_accessible": "La vostra inst\u00e0ncia de Home Assistant ha de ser accessible des d'Internet per rebre missatges de Dialogflow.",
|
||||
"one_instance_allowed": "Nom\u00e9s cal una sola inst\u00e0ncia."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "Per enviar esdeveniments a Home Assistant, haureu de configurar [integraci\u00f3 webhook de Dialogflow]({dialogflow_url}). \n\n Ompliu la informaci\u00f3 seg\u00fcent: \n\n - URL: `{webhook_url}` \n - M\u00e8tode: POST \n - Tipus de contingut: application/json\n\nConsulteu [la documentaci\u00f3]({docs_url}) per a m\u00e9s detalls."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Esteu segur que voleu configurar Dialogflow?",
|
||||
"title": "Configureu el Webhook de Dialogflow"
|
||||
}
|
||||
},
|
||||
"title": "Dialogflow"
|
||||
}
|
||||
}
|
18
homeassistant/components/dialogflow/.translations/en.json
Normal file
18
homeassistant/components/dialogflow/.translations/en.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"not_internet_accessible": "Your Home Assistant instance needs to be accessible from the internet to receive Dialogflow messages.",
|
||||
"one_instance_allowed": "Only a single instance is necessary."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "To send events to Home Assistant, you will need to setup [webhook integration of Dialogflow]({dialogflow_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\nSee [the documentation]({docs_url}) for further details."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Are you sure you want to set up Dialogflow?",
|
||||
"title": "Set up the Dialogflow Webhook"
|
||||
}
|
||||
},
|
||||
"title": "Dialogflow"
|
||||
}
|
||||
}
|
18
homeassistant/components/dialogflow/.translations/ko.json
Normal file
18
homeassistant/components/dialogflow/.translations/ko.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"not_internet_accessible": "Dialogflow \uba54\uc2dc\uc9c0\ub97c \ubc1b\uc73c\ub824\uba74 \uc778\ud130\ub137\uc5d0\uc11c Home Assistant \uc778\uc2a4\ud134\uc2a4\uc5d0 \uc561\uc138\uc2a4 \ud560 \uc218 \uc788\uc5b4\uc57c\ud569\ub2c8\ub2e4.",
|
||||
"one_instance_allowed": "\ud558\ub098\uc758 \uc778\uc2a4\ud134\uc2a4\ub9cc \ud544\uc694\ud569\ub2c8\ub2e4."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "Home Assistant \ub85c \uc774\ubca4\ud2b8\ub97c \ubcf4\ub0b4\ub824\uba74 [Dialogflow Webhook]({dialogflow_url}) \uc744 \uc124\uc815\ud574\uc57c\ud569\ub2c8\ub2e4. \n\n\ub2e4\uc74c \uc815\ubcf4\ub97c \uc785\ub825\ud574 \uc8fc\uc138\uc694. \n\n - URL: `{webhook_url}`\n - Method: POST\n - Content Type: application/json\n \n \uc790\uc138\ud55c \uc815\ubcf4\ub294 [\uc548\ub0b4]({docs_url}) \ub97c \ucc38\uc870\ud574 \uc8fc\uc138\uc694."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Dialogflow \uc744 \uc124\uc815 \ud558\uc2dc\uaca0\uc2b5\ub2c8\uae4c?",
|
||||
"title": "Dialogflow Webhook \uc124\uc815"
|
||||
}
|
||||
},
|
||||
"title": "Dialogflow"
|
||||
}
|
||||
}
|
18
homeassistant/components/dialogflow/.translations/lb.json
Normal file
18
homeassistant/components/dialogflow/.translations/lb.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"not_internet_accessible": "\u00c4r Home Assistant Instanz muss iwwert Internet accessibel si fir Dialogflow Noriichten z'empf\u00e4nken.",
|
||||
"one_instance_allowed": "N\u00ebmmen eng eenzeg Instanz ass n\u00e9ideg."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "Fir Evenementer un Home Assistant ze sch\u00e9cken, muss [Webhook Integratioun mat Dialogflow]({dialogflow_url}) ageriicht ginn.\n\nF\u00ebllt folgend Informatiounen aus:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/x-www-form-urlencoded\n\nLiest [Dokumentatioun]({docs_url}) fir w\u00e9ider D\u00e9tailer."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "S\u00e9cher fir Dialogflowanzeriichten?",
|
||||
"title": "Dialogflow Webhook ariichten"
|
||||
}
|
||||
},
|
||||
"title": "Dialogflow"
|
||||
}
|
||||
}
|
18
homeassistant/components/dialogflow/.translations/no.json
Normal file
18
homeassistant/components/dialogflow/.translations/no.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"not_internet_accessible": "Din Home Assistant forekomst m\u00e5 v\u00e6re tilgjengelig fra internett for \u00e5 kunne motta Dialogflow meldinger.",
|
||||
"one_instance_allowed": "Kun en enkelt forekomst er n\u00f8dvendig."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "For \u00e5 sende hendelser til Home Assistant, m\u00e5 du sette opp [webhook integrasjon av Dialogflow]({dialogflow_url}). \n\nFyll ut f\u00f8lgende informasjon: \n\n- URL: `{webhook_url}` \n- Metode: POST\n- Innholdstype: application/json\n\nSe [dokumentasjonen]({docs_url}) for ytterligere detaljer."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Er du sikker p\u00e5 at du \u00f8nsker \u00e5 sette opp Dialogflow?",
|
||||
"title": "Sett opp Dialogflow Webhook"
|
||||
}
|
||||
},
|
||||
"title": "Dialogflow"
|
||||
}
|
||||
}
|
18
homeassistant/components/dialogflow/.translations/pl.json
Normal file
18
homeassistant/components/dialogflow/.translations/pl.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"not_internet_accessible": "Tw\u00f3j Home Assistant musi by\u0107 dost\u0119pny z Internetu, aby odbiera\u0107 komunikaty Dialogflow.",
|
||||
"one_instance_allowed": "Wymagana jest tylko jedna instancja."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "Aby wysy\u0142a\u0107 zdarzenia do Home Assistant'a, musisz skonfigurowa\u0107 [Dialogflow Webhook]({twilio_url}). \n\n Wprowad\u017a nast\u0119puj\u0105ce dane:\n\n - URL: `{webhook_url}` \n - Metoda: POST \n - Typ zawarto\u015bci: application/json\n\nZapoznaj si\u0119 z [dokumentacj\u0105]({docs_url}) by pozna\u0107 szczeg\u00f3\u0142y."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Czy chcesz skonfigurowa\u0107 Dialogflow?",
|
||||
"title": "Konfiguracja Dialogflow Webhook"
|
||||
}
|
||||
},
|
||||
"title": "Dialogflow"
|
||||
}
|
||||
}
|
18
homeassistant/components/dialogflow/.translations/pt.json
Normal file
18
homeassistant/components/dialogflow/.translations/pt.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"not_internet_accessible": "A sua inst\u00e2ncia Home Assistant precisa de ser acess\u00edvel a partir da internet para receber mensagens Dialogflow.",
|
||||
"one_instance_allowed": "Apenas uma \u00fanica inst\u00e2ncia \u00e9 necess\u00e1ria."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "Para enviar eventos para o Home Assistant, \u00e9 necess\u00e1rio configurar o [Dialogflow Webhook] ({dialogflow_url}). \n\n Preencha as seguintes informa\u00e7\u00f5es: \n\n - URL: `{webhook_url}`\n - M\u00e9todo: POST \n - Tipo de Conte\u00fado: application/json\n\n Veja [a documenta\u00e7\u00e3o] ({docs_url}) para obter mais detalhes."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Tem certeza de que deseja configurar o Dialogflow?",
|
||||
"title": "Configurar o Dialogflow Webhook"
|
||||
}
|
||||
},
|
||||
"title": "Dialogflow"
|
||||
}
|
||||
}
|
18
homeassistant/components/dialogflow/.translations/ru.json
Normal file
18
homeassistant/components/dialogflow/.translations/ru.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"not_internet_accessible": "\u0412\u0430\u0448 Home Assistant \u0434\u043e\u043b\u0436\u0435\u043d \u0431\u044b\u0442\u044c \u0434\u043e\u0441\u0442\u0443\u043f\u0435\u043d \u0438\u0437 \u0438\u043d\u0442\u0435\u0440\u043d\u0435\u0442\u0430 \u0434\u043b\u044f \u043f\u043e\u043b\u0443\u0447\u0435\u043d\u0438\u044f \u0441\u043e\u043e\u0431\u0449\u0435\u043d\u0438\u0439 Dialogflow.",
|
||||
"one_instance_allowed": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u043a\u043e\u043c\u043f\u043e\u043d\u0435\u043d\u0442\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "\u0414\u043b\u044f \u043e\u0442\u043f\u0440\u0430\u0432\u043a\u0438 \u0441\u043e\u0431\u044b\u0442\u0438\u0439 \u0432 Home Assistant \u0432\u044b \u0434\u043e\u043b\u0436\u043d\u044b \u043d\u0430\u0441\u0442\u0440\u043e\u0438\u0442\u044c [webhooks \u0434\u043b\u044f Dialogflow]({dialogflow_url}).\n\n\u0414\u043b\u044f \u043d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u0443\u0439\u0442\u0435 \u0441\u043b\u0435\u0434\u0443\u044e\u0449\u0443\u044e \u0438\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0438\u044e:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\n\u041e\u0437\u043d\u0430\u043a\u043e\u043c\u044c\u0442\u0435\u0441\u044c \u0441 [\u0434\u043e\u043a\u0443\u043c\u0435\u043d\u0442\u0430\u0446\u0438\u0435\u0439]({docs_url}) \u0434\u043b\u044f \u043f\u043e\u043b\u0443\u0447\u0435\u043d\u0438\u044f \u0431\u043e\u043b\u0435\u0435 \u043f\u043e\u0434\u0440\u043e\u0431\u043d\u043e\u0439 \u0438\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0438\u0438."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "\u0412\u044b \u0443\u0432\u0435\u0440\u0435\u043d\u044b, \u0447\u0442\u043e \u0445\u043e\u0442\u0438\u0442\u0435 \u043d\u0430\u0441\u0442\u0440\u043e\u0438\u0442\u044c Dialogflow?",
|
||||
"title": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 Dialogflow Webhook"
|
||||
}
|
||||
},
|
||||
"title": "Dialogflow"
|
||||
}
|
||||
}
|
18
homeassistant/components/dialogflow/.translations/sl.json
Normal file
18
homeassistant/components/dialogflow/.translations/sl.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"not_internet_accessible": " \u010ce \u017eelite prejemati sporo\u010dila dialogflow, mora biti Home Assistent dostopen prek interneta.",
|
||||
"one_instance_allowed": "Potrebna je samo ena instanca."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "Za po\u0161iljanje dogodkov Home Assistent-u, boste morali nastaviti [webhook z dialogflow]({twilio_url}).\n\nIzpolnite naslednje informacije:\n\n- URL: `{webhook_url}`\n- Metoda: POST\n- Vrsta vsebine: application/x-www-form-urlencoded\n\nGlej [dokumentacijo]({docs_url}) za nadaljna navodila."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Ali ste prepri\u010dani, da \u017eelite nastaviti dialogflow?",
|
||||
"title": "Nastavite Dialogflow Webhook"
|
||||
}
|
||||
},
|
||||
"title": "Dialogflow"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"not_internet_accessible": "Home Assistant \u5be6\u4f8b\u5fc5\u9808\u80fd\u5920\u7531\u7db2\u969b\u7db2\u8def\u5b58\u53d6\uff0c\u65b9\u80fd\u63a5\u53d7 Dialogflow \u8a0a\u606f\u3002",
|
||||
"one_instance_allowed": "\u50c5\u9700\u8a2d\u5b9a\u4e00\u7d44\u7269\u4ef6\u5373\u53ef\u3002"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "\u6b32\u50b3\u9001\u4e8b\u4ef6\u81f3 Home Assistant\uff0c\u5c07\u9700\u8a2d\u5b9a [webhook integration of Dialogflow]({dialogflow_url})\u3002\n\n\u8acb\u586b\u5beb\u4e0b\u5217\u8cc7\u8a0a\uff1a\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\n\u8acb\u53c3\u95b1 [\u6587\u4ef6]({docs_url})\u4ee5\u4e86\u89e3\u66f4\u8a73\u7d30\u8cc7\u6599\u3002"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "\u662f\u5426\u8981\u8a2d\u5b9a Dialogflow\uff1f",
|
||||
"title": "\u8a2d\u5b9a Dialogflow Webhook"
|
||||
}
|
||||
},
|
||||
"title": "Dialogflow"
|
||||
}
|
||||
}
|
|
@ -7,24 +7,16 @@ https://home-assistant.io/components/dialogflow/
|
|||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
from aiohttp import web
|
||||
|
||||
from homeassistant.const import CONF_WEBHOOK_ID
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import intent, template
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
|
||||
from homeassistant.helpers import intent, template, config_entry_flow
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_INTENTS = 'intents'
|
||||
CONF_SPEECH = 'speech'
|
||||
CONF_ACTION = 'action'
|
||||
CONF_ASYNC_ACTION = 'async_action'
|
||||
|
||||
DEFAULT_CONF_ASYNC_ACTION = False
|
||||
DEPENDENCIES = ['http']
|
||||
DEPENDENCIES = ['webhook']
|
||||
DOMAIN = 'dialogflow'
|
||||
|
||||
INTENTS_API_ENDPOINT = '/api/dialogflow'
|
||||
|
||||
SOURCE = "Home Assistant Dialogflow"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
|
@ -38,52 +30,72 @@ class DialogFlowError(HomeAssistantError):
|
|||
|
||||
async def async_setup(hass, config):
|
||||
"""Set up Dialogflow component."""
|
||||
hass.http.register_view(DialogflowIntentsView)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class DialogflowIntentsView(HomeAssistantView):
|
||||
"""Handle Dialogflow requests."""
|
||||
async def handle_webhook(hass, webhook_id, request):
|
||||
"""Handle incoming webhook with Dialogflow requests."""
|
||||
message = await request.json()
|
||||
|
||||
url = INTENTS_API_ENDPOINT
|
||||
name = 'api:dialogflow'
|
||||
_LOGGER.debug("Received Dialogflow request: %s", message)
|
||||
|
||||
async def post(self, request):
|
||||
"""Handle Dialogflow."""
|
||||
hass = request.app['hass']
|
||||
message = await request.json()
|
||||
try:
|
||||
response = await async_handle_message(hass, message)
|
||||
return b'' if response is None else web.json_response(response)
|
||||
|
||||
_LOGGER.debug("Received Dialogflow request: %s", message)
|
||||
except DialogFlowError as err:
|
||||
_LOGGER.warning(str(err))
|
||||
return web.json_response(
|
||||
dialogflow_error_response(message, str(err))
|
||||
)
|
||||
|
||||
try:
|
||||
response = await async_handle_message(hass, message)
|
||||
return b'' if response is None else self.json(response)
|
||||
except intent.UnknownIntent as err:
|
||||
_LOGGER.warning(str(err))
|
||||
return web.json_response(
|
||||
dialogflow_error_response(
|
||||
message,
|
||||
"This intent is not yet configured within Home Assistant."
|
||||
)
|
||||
)
|
||||
|
||||
except DialogFlowError as err:
|
||||
_LOGGER.warning(str(err))
|
||||
return self.json(dialogflow_error_response(
|
||||
hass, message, str(err)))
|
||||
except intent.InvalidSlotInfo as err:
|
||||
_LOGGER.warning(str(err))
|
||||
return web.json_response(
|
||||
dialogflow_error_response(
|
||||
message,
|
||||
"Invalid slot information received for this intent."
|
||||
)
|
||||
)
|
||||
|
||||
except intent.UnknownIntent as err:
|
||||
_LOGGER.warning(str(err))
|
||||
return self.json(dialogflow_error_response(
|
||||
hass, message,
|
||||
"This intent is not yet configured within Home Assistant."))
|
||||
|
||||
except intent.InvalidSlotInfo as err:
|
||||
_LOGGER.warning(str(err))
|
||||
return self.json(dialogflow_error_response(
|
||||
hass, message,
|
||||
"Invalid slot information received for this intent."))
|
||||
|
||||
except intent.IntentError as err:
|
||||
_LOGGER.warning(str(err))
|
||||
return self.json(dialogflow_error_response(
|
||||
hass, message, "Error handling intent."))
|
||||
except intent.IntentError as err:
|
||||
_LOGGER.warning(str(err))
|
||||
return web.json_response(
|
||||
dialogflow_error_response(message, "Error handling intent."))
|
||||
|
||||
|
||||
def dialogflow_error_response(hass, message, error):
|
||||
async def async_setup_entry(hass, entry):
|
||||
"""Configure based on config entry."""
|
||||
hass.components.webhook.async_register(
|
||||
entry.data[CONF_WEBHOOK_ID], handle_webhook)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass, entry):
|
||||
"""Unload a config entry."""
|
||||
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
|
||||
return True
|
||||
|
||||
config_entry_flow.register_webhook_flow(
|
||||
DOMAIN,
|
||||
'Dialogflow Webhook',
|
||||
{
|
||||
'dialogflow_url': 'https://dialogflow.com/docs/fulfillment#webhook',
|
||||
'docs_url': 'https://www.home-assistant.io/components/dialogflow/'
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def dialogflow_error_response(message, error):
|
||||
"""Return a response saying the error message."""
|
||||
dialogflow_response = DialogflowResponse(message['result']['parameters'])
|
||||
dialogflow_response.add_speech(error)
|
18
homeassistant/components/dialogflow/strings.json
Normal file
18
homeassistant/components/dialogflow/strings.json
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"title": "Dialogflow",
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Set up the Dialogflow Webhook",
|
||||
"description": "Are you sure you want to set up Dialogflow?"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"one_instance_allowed": "Only a single instance is necessary.",
|
||||
"not_internet_accessible": "Your Home Assistant instance needs to be accessible from the internet to receive Dialogflow messages."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "To send events to Home Assistant, you will need to setup [webhook integration of Dialogflow]({dialogflow_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\nSee [the documentation]({docs_url}) for further details."
|
||||
}
|
||||
}
|
||||
}
|
|
@ -51,7 +51,6 @@ CONFIG_ENTRY_HANDLERS = {
|
|||
SERVICE_HUE: 'hue',
|
||||
SERVICE_IKEA_TRADFRI: 'tradfri',
|
||||
'sonos': 'sonos',
|
||||
'igd': 'upnp',
|
||||
}
|
||||
|
||||
SERVICE_HANDLERS = {
|
||||
|
|
|
@ -14,7 +14,7 @@ from homeassistant.const import CONF_HOST, CONF_USERNAME, \
|
|||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.util import slugify
|
||||
|
||||
REQUIREMENTS = ['DoorBirdPy==0.1.3']
|
||||
REQUIREMENTS = ['doorbirdpy==2.0.4']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -22,22 +22,31 @@ DOMAIN = 'doorbird'
|
|||
|
||||
API_URL = '/api/{}'.format(DOMAIN)
|
||||
|
||||
CONF_DOORBELL_EVENTS = 'doorbell_events'
|
||||
CONF_CUSTOM_URL = 'hass_url_override'
|
||||
CONF_DOORBELL_EVENTS = 'doorbell_events'
|
||||
CONF_DOORBELL_NUMS = 'doorbell_numbers'
|
||||
CONF_MOTION_EVENTS = 'motion_events'
|
||||
CONF_TOKEN = 'token'
|
||||
|
||||
DOORBELL_EVENT = 'doorbell'
|
||||
MOTION_EVENT = 'motionsensor'
|
||||
|
||||
# Sensor types: Name, device_class, event
|
||||
SENSOR_TYPES = {
|
||||
'doorbell': ['Button', 'occupancy', DOORBELL_EVENT],
|
||||
'motion': ['Motion', 'motion', MOTION_EVENT],
|
||||
'doorbell': {
|
||||
'name': 'Button',
|
||||
'device_class': 'occupancy',
|
||||
},
|
||||
'motion': {
|
||||
'name': 'Motion',
|
||||
'device_class': 'motion',
|
||||
},
|
||||
}
|
||||
|
||||
RESET_DEVICE_FAVORITES = 'doorbird_reset_favorites'
|
||||
|
||||
DEVICE_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_DOORBELL_NUMS, default=[1]): vol.All(
|
||||
cv.ensure_list, [cv.positive_int]),
|
||||
vol.Optional(CONF_CUSTOM_URL): cv.string,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MONITORED_CONDITIONS, default=[]):
|
||||
|
@ -46,6 +55,7 @@ DEVICE_SCHEMA = vol.Schema({
|
|||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_TOKEN): cv.string,
|
||||
vol.Required(CONF_DEVICES): vol.All(cv.ensure_list, [DEVICE_SCHEMA])
|
||||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
@ -55,8 +65,13 @@ def setup(hass, config):
|
|||
"""Set up the DoorBird component."""
|
||||
from doorbirdpy import DoorBird
|
||||
|
||||
token = config[DOMAIN].get(CONF_TOKEN)
|
||||
|
||||
# Provide an endpoint for the doorstations to call to trigger events
|
||||
hass.http.register_view(DoorbirdRequestView())
|
||||
hass.http.register_view(DoorBirdRequestView(token))
|
||||
|
||||
# Provide an endpoint for the user to call to clear device changes
|
||||
hass.http.register_view(DoorBirdCleanupView(token))
|
||||
|
||||
doorstations = []
|
||||
|
||||
|
@ -64,6 +79,7 @@ def setup(hass, config):
|
|||
device_ip = doorstation_config.get(CONF_HOST)
|
||||
username = doorstation_config.get(CONF_USERNAME)
|
||||
password = doorstation_config.get(CONF_PASSWORD)
|
||||
doorbell_nums = doorstation_config.get(CONF_DOORBELL_NUMS)
|
||||
custom_url = doorstation_config.get(CONF_CUSTOM_URL)
|
||||
events = doorstation_config.get(CONF_MONITORED_CONDITIONS)
|
||||
name = (doorstation_config.get(CONF_NAME)
|
||||
|
@ -73,68 +89,73 @@ def setup(hass, config):
|
|||
status = device.ready()
|
||||
|
||||
if status[0]:
|
||||
_LOGGER.info("Connected to DoorBird at %s as %s", device_ip,
|
||||
username)
|
||||
doorstation = ConfiguredDoorbird(device, name, events, custom_url)
|
||||
doorstation = ConfiguredDoorBird(device, name, events, custom_url,
|
||||
doorbell_nums, token)
|
||||
doorstations.append(doorstation)
|
||||
_LOGGER.info('Connected to DoorBird "%s" as %s@%s',
|
||||
doorstation.name, username, device_ip)
|
||||
elif status[1] == 401:
|
||||
_LOGGER.error("Authorization rejected by DoorBird at %s",
|
||||
device_ip)
|
||||
_LOGGER.error("Authorization rejected by DoorBird for %s@%s",
|
||||
username, device_ip)
|
||||
return False
|
||||
else:
|
||||
_LOGGER.error("Could not connect to DoorBird at %s: Error %s",
|
||||
device_ip, str(status[1]))
|
||||
_LOGGER.error("Could not connect to DoorBird as %s@%s: Error %s",
|
||||
username, device_ip, str(status[1]))
|
||||
return False
|
||||
|
||||
# SETUP EVENT SUBSCRIBERS
|
||||
# Subscribe to doorbell or motion events
|
||||
if events is not None:
|
||||
# This will make HA the only service that receives events.
|
||||
doorstation.device.reset_notifications()
|
||||
|
||||
# Subscribe to doorbell or motion events
|
||||
subscribe_events(hass, doorstation)
|
||||
doorstation.update_schedule(hass)
|
||||
|
||||
hass.data[DOMAIN] = doorstations
|
||||
|
||||
def _reset_device_favorites_handler(event):
|
||||
"""Handle clearing favorites on device."""
|
||||
slug = event.data.get('slug')
|
||||
|
||||
if slug is None:
|
||||
return
|
||||
|
||||
doorstation = get_doorstation_by_slug(hass, slug)
|
||||
|
||||
if doorstation is None:
|
||||
_LOGGER.error('Device not found %s', format(slug))
|
||||
|
||||
# Clear webhooks
|
||||
favorites = doorstation.device.favorites()
|
||||
|
||||
for favorite_type in favorites:
|
||||
for favorite_id in favorites[favorite_type]:
|
||||
doorstation.device.delete_favorite(favorite_type, favorite_id)
|
||||
|
||||
hass.bus.listen(RESET_DEVICE_FAVORITES, _reset_device_favorites_handler)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def subscribe_events(hass, doorstation):
|
||||
"""Initialize the subscriber."""
|
||||
for sensor_type in doorstation.monitored_events:
|
||||
name = '{} {}'.format(doorstation.name,
|
||||
SENSOR_TYPES[sensor_type][0])
|
||||
event_type = SENSOR_TYPES[sensor_type][2]
|
||||
|
||||
# Get the URL of this server
|
||||
hass_url = hass.config.api.base_url
|
||||
|
||||
# Override url if another is specified onth configuration
|
||||
if doorstation.custom_url is not None:
|
||||
hass_url = doorstation.custom_url
|
||||
|
||||
slug = slugify(name)
|
||||
|
||||
url = '{}{}/{}'.format(hass_url, API_URL, slug)
|
||||
|
||||
_LOGGER.info("DoorBird will connect to this instance via %s",
|
||||
url)
|
||||
|
||||
_LOGGER.info("You may use the following event name for automations"
|
||||
": %s_%s", DOMAIN, slug)
|
||||
|
||||
doorstation.device.subscribe_notification(event_type, url)
|
||||
def get_doorstation_by_slug(hass, slug):
|
||||
"""Get doorstation by slug."""
|
||||
for doorstation in hass.data[DOMAIN]:
|
||||
if slugify(doorstation.name) in slug:
|
||||
return doorstation
|
||||
|
||||
|
||||
class ConfiguredDoorbird():
|
||||
def handle_event(event):
|
||||
"""Handle dummy events."""
|
||||
return None
|
||||
|
||||
|
||||
class ConfiguredDoorBird():
|
||||
"""Attach additional information to pass along with configured device."""
|
||||
|
||||
def __init__(self, device, name, events=None, custom_url=None):
|
||||
def __init__(self, device, name, events, custom_url, doorbell_nums, token):
|
||||
"""Initialize configured device."""
|
||||
self._name = name
|
||||
self._device = device
|
||||
self._custom_url = custom_url
|
||||
self._monitored_events = events
|
||||
self._doorbell_nums = doorbell_nums
|
||||
self._token = token
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@ -151,16 +172,139 @@ class ConfiguredDoorbird():
|
|||
"""Get custom url for device."""
|
||||
return self._custom_url
|
||||
|
||||
@property
|
||||
def monitored_events(self):
|
||||
"""Get monitored events."""
|
||||
if self._monitored_events is None:
|
||||
return []
|
||||
def update_schedule(self, hass):
|
||||
"""Register monitored sensors and deregister others."""
|
||||
from doorbirdpy import DoorBirdScheduleEntrySchedule
|
||||
|
||||
return self._monitored_events
|
||||
# Create a new schedule (24/7)
|
||||
schedule = DoorBirdScheduleEntrySchedule()
|
||||
schedule.add_weekday(0, 604800) # seconds in a week
|
||||
|
||||
# Get the URL of this server
|
||||
hass_url = hass.config.api.base_url
|
||||
|
||||
# Override url if another is specified in the configuration
|
||||
if self.custom_url is not None:
|
||||
hass_url = self.custom_url
|
||||
|
||||
# For all sensor types (enabled + disabled)
|
||||
for sensor_type in SENSOR_TYPES:
|
||||
name = '{} {}'.format(self.name, SENSOR_TYPES[sensor_type]['name'])
|
||||
slug = slugify(name)
|
||||
|
||||
url = '{}{}/{}?token={}'.format(hass_url, API_URL, slug,
|
||||
self._token)
|
||||
if sensor_type in self._monitored_events:
|
||||
# Enabled -> register
|
||||
self._register_event(url, sensor_type, schedule)
|
||||
_LOGGER.info('Registered for %s pushes from DoorBird "%s". '
|
||||
'Use the "%s_%s" event for automations.',
|
||||
sensor_type, self.name, DOMAIN, slug)
|
||||
|
||||
# Register a dummy listener so event is listed in GUI
|
||||
hass.bus.listen('{}_{}'.format(DOMAIN, slug), handle_event)
|
||||
else:
|
||||
# Disabled -> deregister
|
||||
self._deregister_event(url, sensor_type)
|
||||
_LOGGER.info('Deregistered %s pushes from DoorBird "%s". '
|
||||
'If any old favorites or schedules remain, '
|
||||
'follow the instructions in the component '
|
||||
'documentation to clear device registrations.',
|
||||
sensor_type, self.name)
|
||||
|
||||
def _register_event(self, hass_url, event, schedule):
|
||||
"""Add a schedule entry in the device for a sensor."""
|
||||
from doorbirdpy import DoorBirdScheduleEntryOutput
|
||||
|
||||
# Register HA URL as webhook if not already, then get the ID
|
||||
if not self.webhook_is_registered(hass_url):
|
||||
self.device.change_favorite('http',
|
||||
'Home Assistant on {} ({} events)'
|
||||
.format(hass_url, event), hass_url)
|
||||
fav_id = self.get_webhook_id(hass_url)
|
||||
|
||||
if not fav_id:
|
||||
_LOGGER.warning('Could not find favorite for URL "%s". '
|
||||
'Skipping sensor "%s".', hass_url, event)
|
||||
return
|
||||
|
||||
# Add event handling to device schedule
|
||||
output = DoorBirdScheduleEntryOutput(event='http',
|
||||
param=fav_id,
|
||||
schedule=schedule)
|
||||
|
||||
if event == 'doorbell':
|
||||
# Repeat edit for each monitored doorbell number
|
||||
for doorbell in self._doorbell_nums:
|
||||
entry = self.device.get_schedule_entry(event, str(doorbell))
|
||||
entry.output.append(output)
|
||||
self.device.change_schedule(entry)
|
||||
else:
|
||||
entry = self.device.get_schedule_entry(event)
|
||||
entry.output.append(output)
|
||||
self.device.change_schedule(entry)
|
||||
|
||||
def _deregister_event(self, hass_url, event):
|
||||
"""Remove the schedule entry in the device for a sensor."""
|
||||
# Find the right favorite and delete it
|
||||
fav_id = self.get_webhook_id(hass_url)
|
||||
if not fav_id:
|
||||
return
|
||||
|
||||
self._device.delete_favorite('http', fav_id)
|
||||
|
||||
if event == 'doorbell':
|
||||
# Delete the matching schedule for each doorbell number
|
||||
for doorbell in self._doorbell_nums:
|
||||
self._delete_schedule_action(event, fav_id, str(doorbell))
|
||||
else:
|
||||
self._delete_schedule_action(event, fav_id)
|
||||
|
||||
def _delete_schedule_action(self, sensor, fav_id, param=""):
|
||||
"""Remove the HA output from a schedule."""
|
||||
entries = self._device.schedule()
|
||||
for entry in entries:
|
||||
if entry.input != sensor or entry.param != param:
|
||||
continue
|
||||
|
||||
for action in entry.output:
|
||||
if action.event == 'http' and action.param == fav_id:
|
||||
entry.output.remove(action)
|
||||
|
||||
self._device.change_schedule(entry)
|
||||
|
||||
def webhook_is_registered(self, ha_url, favs=None) -> bool:
|
||||
"""Return whether the given URL is registered as a device favorite."""
|
||||
favs = favs if favs else self.device.favorites()
|
||||
|
||||
if 'http' not in favs:
|
||||
return False
|
||||
|
||||
for fav in favs['http'].values():
|
||||
if fav['value'] == ha_url:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def get_webhook_id(self, ha_url, favs=None) -> str or None:
|
||||
"""
|
||||
Return the device favorite ID for the given URL.
|
||||
|
||||
The favorite must exist or there will be problems.
|
||||
"""
|
||||
favs = favs if favs else self.device.favorites()
|
||||
|
||||
if 'http' not in favs:
|
||||
return None
|
||||
|
||||
for fav_id in favs['http']:
|
||||
if favs['http'][fav_id]['value'] == ha_url:
|
||||
return fav_id
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class DoorbirdRequestView(HomeAssistantView):
|
||||
class DoorBirdRequestView(HomeAssistantView):
|
||||
"""Provide a page for the device to call."""
|
||||
|
||||
requires_auth = False
|
||||
|
@ -168,11 +312,63 @@ class DoorbirdRequestView(HomeAssistantView):
|
|||
name = API_URL[1:].replace('/', ':')
|
||||
extra_urls = [API_URL + '/{sensor}']
|
||||
|
||||
def __init__(self, token):
|
||||
"""Initialize view."""
|
||||
HomeAssistantView.__init__(self)
|
||||
self._token = token
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
async def get(self, request, sensor):
|
||||
"""Respond to requests from the device."""
|
||||
from aiohttp import web
|
||||
hass = request.app['hass']
|
||||
|
||||
request_token = request.query.get('token')
|
||||
|
||||
authenticated = request_token == self._token
|
||||
|
||||
if request_token == '' or not authenticated:
|
||||
return web.Response(status=401, text='Unauthorized')
|
||||
|
||||
hass.bus.async_fire('{}_{}'.format(DOMAIN, sensor))
|
||||
|
||||
return 'OK'
|
||||
return web.Response(status=200, text='OK')
|
||||
|
||||
|
||||
class DoorBirdCleanupView(HomeAssistantView):
|
||||
"""Provide a URL to call to delete ALL webhooks/schedules."""
|
||||
|
||||
requires_auth = False
|
||||
url = API_URL + '/clear/{slug}'
|
||||
name = 'DoorBird Cleanup'
|
||||
|
||||
def __init__(self, token):
|
||||
"""Initialize view."""
|
||||
HomeAssistantView.__init__(self)
|
||||
self._token = token
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
async def get(self, request, slug):
|
||||
"""Act on requests."""
|
||||
from aiohttp import web
|
||||
hass = request.app['hass']
|
||||
|
||||
request_token = request.query.get('token')
|
||||
|
||||
authenticated = request_token == self._token
|
||||
|
||||
if request_token == '' or not authenticated:
|
||||
return web.Response(status=401, text='Unauthorized')
|
||||
|
||||
device = get_doorstation_by_slug(hass, slug)
|
||||
|
||||
# No matching device
|
||||
if device is None:
|
||||
return web.Response(status=404,
|
||||
text='Device slug {} not found'.format(slug))
|
||||
|
||||
hass.bus.async_fire(RESET_DEVICE_FAVORITES,
|
||||
{'slug': slug})
|
||||
|
||||
message = 'Clearing schedule for {}'.format(slug)
|
||||
return web.Response(status=200, text=message)
|
||||
|
|
|
@ -204,3 +204,13 @@ xiaomi_miio_set_dry_off:
|
|||
entity_id:
|
||||
description: Name of the xiaomi miio entity.
|
||||
example: 'fan.xiaomi_miio_device'
|
||||
|
||||
wemo_set_humidity:
|
||||
description: Set the target humidity of WeMo humidifier devices.
|
||||
fields:
|
||||
entity_id:
|
||||
description: Names of the WeMo humidifier entities (0 or more entities, if no entity_id is provided, all WeMo humidifiers will have the target humidity set).
|
||||
example: 'fan.wemo_humidifier'
|
||||
target_humidity:
|
||||
description: Target humidity. This is a float value between 0 and 100, but will be mapped to the humidity levels that WeMo humidifiers support (45, 50, 55, 60, and 100/Max) by rounding the value down to the nearest supported value.
|
||||
example: 56.5
|
||||
|
|
305
homeassistant/components/fan/wemo.py
Normal file
305
homeassistant/components/fan/wemo.py
Normal file
|
@ -0,0 +1,305 @@
|
|||
"""
|
||||
Support for WeMo humidifier.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/fan.wemo/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
import requests
|
||||
import async_timeout
|
||||
import voluptuous as vol
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from homeassistant.components.fan import (
|
||||
DOMAIN, SUPPORT_SET_SPEED, FanEntity,
|
||||
SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH)
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
|
||||
DEPENDENCIES = ['wemo']
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
DATA_KEY = 'fan.wemo'
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_CURRENT_HUMIDITY = 'current_humidity'
|
||||
ATTR_TARGET_HUMIDITY = 'target_humidity'
|
||||
ATTR_FAN_MODE = 'fan_mode'
|
||||
ATTR_FILTER_LIFE = 'filter_life'
|
||||
ATTR_FILTER_EXPIRED = 'filter_expired'
|
||||
ATTR_WATER_LEVEL = 'water_level'
|
||||
|
||||
# The WEMO_ constants below come from pywemo itself
|
||||
WEMO_ON = 1
|
||||
WEMO_OFF = 0
|
||||
|
||||
WEMO_HUMIDITY_45 = 0
|
||||
WEMO_HUMIDITY_50 = 1
|
||||
WEMO_HUMIDITY_55 = 2
|
||||
WEMO_HUMIDITY_60 = 3
|
||||
WEMO_HUMIDITY_100 = 4
|
||||
|
||||
WEMO_FAN_OFF = 0
|
||||
WEMO_FAN_MINIMUM = 1
|
||||
WEMO_FAN_LOW = 2 # Not used due to limitations of the base fan implementation
|
||||
WEMO_FAN_MEDIUM = 3
|
||||
WEMO_FAN_HIGH = 4 # Not used due to limitations of the base fan implementation
|
||||
WEMO_FAN_MAXIMUM = 5
|
||||
|
||||
WEMO_WATER_EMPTY = 0
|
||||
WEMO_WATER_LOW = 1
|
||||
WEMO_WATER_GOOD = 2
|
||||
|
||||
SUPPORTED_SPEEDS = [
|
||||
SPEED_OFF, SPEED_LOW,
|
||||
SPEED_MEDIUM, SPEED_HIGH]
|
||||
|
||||
SUPPORTED_FEATURES = SUPPORT_SET_SPEED
|
||||
|
||||
# Since the base fan object supports a set list of fan speeds,
|
||||
# we have to reuse some of them when mapping to the 5 WeMo speeds
|
||||
WEMO_FAN_SPEED_TO_HASS = {
|
||||
WEMO_FAN_OFF: SPEED_OFF,
|
||||
WEMO_FAN_MINIMUM: SPEED_LOW,
|
||||
WEMO_FAN_LOW: SPEED_LOW, # Reusing SPEED_LOW
|
||||
WEMO_FAN_MEDIUM: SPEED_MEDIUM,
|
||||
WEMO_FAN_HIGH: SPEED_HIGH, # Reusing SPEED_HIGH
|
||||
WEMO_FAN_MAXIMUM: SPEED_HIGH
|
||||
}
|
||||
|
||||
# Because we reused mappings in the previous dict, we have to filter them
|
||||
# back out in this dict, or else we would have duplicate keys
|
||||
HASS_FAN_SPEED_TO_WEMO = {v: k for (k, v) in WEMO_FAN_SPEED_TO_HASS.items()
|
||||
if k not in [WEMO_FAN_LOW, WEMO_FAN_HIGH]}
|
||||
|
||||
SERVICE_SET_HUMIDITY = 'wemo_set_humidity'
|
||||
|
||||
SET_HUMIDITY_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required(ATTR_TARGET_HUMIDITY):
|
||||
vol.All(vol.Coerce(float), vol.Range(min=0, max=100))
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
"""Set up discovered WeMo humidifiers."""
|
||||
from pywemo import discovery
|
||||
|
||||
if DATA_KEY not in hass.data:
|
||||
hass.data[DATA_KEY] = {}
|
||||
|
||||
if discovery_info is None:
|
||||
return
|
||||
|
||||
location = discovery_info['ssdp_description']
|
||||
mac = discovery_info['mac_address']
|
||||
|
||||
try:
|
||||
device = WemoHumidifier(
|
||||
discovery.device_from_description(location, mac))
|
||||
except (requests.exceptions.ConnectionError,
|
||||
requests.exceptions.Timeout) as err:
|
||||
_LOGGER.error('Unable to access %s (%s)', location, err)
|
||||
raise PlatformNotReady
|
||||
|
||||
hass.data[DATA_KEY][device.entity_id] = device
|
||||
add_entities([device])
|
||||
|
||||
def service_handle(service):
|
||||
"""Handle the WeMo humidifier services."""
|
||||
entity_ids = service.data.get(ATTR_ENTITY_ID)
|
||||
target_humidity = service.data.get(ATTR_TARGET_HUMIDITY)
|
||||
|
||||
if entity_ids:
|
||||
humidifiers = [device for device in hass.data[DATA_KEY].values() if
|
||||
device.entity_id in entity_ids]
|
||||
else:
|
||||
humidifiers = hass.data[DATA_KEY].values()
|
||||
|
||||
for humidifier in humidifiers:
|
||||
humidifier.set_humidity(target_humidity)
|
||||
|
||||
# Register service(s)
|
||||
hass.services.register(
|
||||
DOMAIN, SERVICE_SET_HUMIDITY, service_handle,
|
||||
schema=SET_HUMIDITY_SCHEMA)
|
||||
|
||||
|
||||
class WemoHumidifier(FanEntity):
|
||||
"""Representation of a WeMo humidifier."""
|
||||
|
||||
def __init__(self, device):
|
||||
"""Initialize the WeMo switch."""
|
||||
self.wemo = device
|
||||
self._state = None
|
||||
self._available = True
|
||||
self._update_lock = None
|
||||
|
||||
self._fan_mode = None
|
||||
self._target_humidity = None
|
||||
self._current_humidity = None
|
||||
self._water_level = None
|
||||
self._filter_life = None
|
||||
self._filter_expired = None
|
||||
self._last_fan_on_mode = WEMO_FAN_MEDIUM
|
||||
|
||||
# look up model name, name, and serial number
|
||||
# once as it incurs network traffic
|
||||
self._model_name = self.wemo.model_name
|
||||
self._name = self.wemo.name
|
||||
self._serialnumber = self.wemo.serialnumber
|
||||
|
||||
def _subscription_callback(self, _device, _type, _params):
|
||||
"""Update the state by the Wemo device."""
|
||||
_LOGGER.info("Subscription update for %s", self.name)
|
||||
updated = self.wemo.subscription_update(_type, _params)
|
||||
self.hass.add_job(
|
||||
self._async_locked_subscription_callback(not updated))
|
||||
|
||||
async def _async_locked_subscription_callback(self, force_update):
|
||||
"""Handle an update from a subscription."""
|
||||
# If an update is in progress, we don't do anything
|
||||
if self._update_lock.locked():
|
||||
return
|
||||
|
||||
await self._async_locked_update(force_update)
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return the ID of this WeMo humidifier."""
|
||||
return self._serialnumber
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the humidifier if any."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if switch is on. Standby is on."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return true if switch is available."""
|
||||
return self._available
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon of device based on its type."""
|
||||
return 'mdi:water-percent'
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return device specific state attributes."""
|
||||
return {
|
||||
ATTR_CURRENT_HUMIDITY: self._current_humidity,
|
||||
ATTR_TARGET_HUMIDITY: self._target_humidity,
|
||||
ATTR_FAN_MODE: self._fan_mode,
|
||||
ATTR_WATER_LEVEL: self._water_level,
|
||||
ATTR_FILTER_LIFE: self._filter_life,
|
||||
ATTR_FILTER_EXPIRED: self._filter_expired
|
||||
}
|
||||
|
||||
@property
|
||||
def speed(self) -> str:
|
||||
"""Return the current speed."""
|
||||
return WEMO_FAN_SPEED_TO_HASS.get(self._fan_mode)
|
||||
|
||||
@property
|
||||
def speed_list(self: FanEntity) -> list:
|
||||
"""Get the list of available speeds."""
|
||||
return SUPPORTED_SPEEDS
|
||||
|
||||
@property
|
||||
def supported_features(self: FanEntity) -> int:
|
||||
"""Flag supported features."""
|
||||
return SUPPORTED_FEATURES
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Wemo humidifier added to HASS."""
|
||||
# Define inside async context so we know our event loop
|
||||
self._update_lock = asyncio.Lock()
|
||||
|
||||
registry = self.hass.components.wemo.SUBSCRIPTION_REGISTRY
|
||||
await self.hass.async_add_executor_job(registry.register, self.wemo)
|
||||
registry.on(self.wemo, None, self._subscription_callback)
|
||||
|
||||
async def async_update(self):
|
||||
"""Update WeMo state.
|
||||
|
||||
Wemo has an aggressive retry logic that sometimes can take over a
|
||||
minute to return. If we don't get a state after 5 seconds, assume the
|
||||
Wemo humidifier is unreachable. If update goes through, it will be made
|
||||
available again.
|
||||
"""
|
||||
# If an update is in progress, we don't do anything
|
||||
if self._update_lock.locked():
|
||||
return
|
||||
|
||||
try:
|
||||
with async_timeout.timeout(5):
|
||||
await asyncio.shield(self._async_locked_update(True))
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning('Lost connection to %s', self.name)
|
||||
self._available = False
|
||||
|
||||
async def _async_locked_update(self, force_update):
|
||||
"""Try updating within an async lock."""
|
||||
async with self._update_lock:
|
||||
await self.hass.async_add_executor_job(self._update, force_update)
|
||||
|
||||
def _update(self, force_update=True):
|
||||
"""Update the device state."""
|
||||
try:
|
||||
self._state = self.wemo.get_state(force_update)
|
||||
|
||||
self._fan_mode = self.wemo.fan_mode_string
|
||||
self._target_humidity = self.wemo.desired_humidity_percent
|
||||
self._current_humidity = self.wemo.current_humidity_percent
|
||||
self._water_level = self.wemo.water_level_string
|
||||
self._filter_life = self.wemo.filter_life_percent
|
||||
self._filter_expired = self.wemo.filter_expired
|
||||
|
||||
if self.wemo.fan_mode != WEMO_FAN_OFF:
|
||||
self._last_fan_on_mode = self.wemo.fan_mode
|
||||
|
||||
if not self._available:
|
||||
_LOGGER.info('Reconnected to %s', self.name)
|
||||
self._available = True
|
||||
except AttributeError as err:
|
||||
_LOGGER.warning("Could not update status for %s (%s)",
|
||||
self.name, err)
|
||||
self._available = False
|
||||
|
||||
def turn_on(self: FanEntity, speed: str = None, **kwargs) -> None:
|
||||
"""Turn the switch on."""
|
||||
if speed is None:
|
||||
self.wemo.set_state(self._last_fan_on_mode)
|
||||
else:
|
||||
self.set_speed(speed)
|
||||
|
||||
def turn_off(self: FanEntity, **kwargs) -> None:
|
||||
"""Turn the switch off."""
|
||||
self.wemo.set_state(WEMO_FAN_OFF)
|
||||
|
||||
def set_speed(self: FanEntity, speed: str) -> None:
|
||||
"""Set the fan_mode of the Humidifier."""
|
||||
self.wemo.set_state(HASS_FAN_SPEED_TO_WEMO.get(speed))
|
||||
|
||||
def set_humidity(self: FanEntity, humidity: float) -> None:
|
||||
"""Set the target humidity level for the Humidifier."""
|
||||
if humidity < 50:
|
||||
self.wemo.set_humidity(WEMO_HUMIDITY_45)
|
||||
elif 50 <= humidity < 55:
|
||||
self.wemo.set_humidity(WEMO_HUMIDITY_50)
|
||||
elif 55 <= humidity < 60:
|
||||
self.wemo.set_humidity(WEMO_HUMIDITY_55)
|
||||
elif 60 <= humidity < 100:
|
||||
self.wemo.set_humidity(WEMO_HUMIDITY_60)
|
||||
elif humidity >= 100:
|
||||
self.wemo.set_humidity(WEMO_HUMIDITY_100)
|
|
@ -18,7 +18,7 @@ from homeassistant.const import (
|
|||
from homeassistant.exceptions import PlatformNotReady
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['python-miio==0.4.2', 'construct==2.9.45']
|
||||
REQUIREMENTS = ['python-miio==0.4.3', 'construct==2.9.45']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -348,7 +348,7 @@ async def async_setup_platform(hass, config, async_add_entities,
|
|||
device = XiaomiAirPurifier(name, air_purifier, model, unique_id)
|
||||
elif model.startswith('zhimi.humidifier.'):
|
||||
from miio import AirHumidifier
|
||||
air_humidifier = AirHumidifier(host, token)
|
||||
air_humidifier = AirHumidifier(host, token, model=model)
|
||||
device = XiaomiAirHumidifier(name, air_humidifier, model, unique_id)
|
||||
else:
|
||||
_LOGGER.error(
|
||||
|
|
|
@ -37,14 +37,12 @@ CONF_INPUT = 'input'
|
|||
CONF_FFMPEG_BIN = 'ffmpeg_bin'
|
||||
CONF_EXTRA_ARGUMENTS = 'extra_arguments'
|
||||
CONF_OUTPUT = 'output'
|
||||
CONF_RUN_TEST = 'run_test'
|
||||
|
||||
DEFAULT_BINARY = 'ffmpeg'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Optional(CONF_FFMPEG_BIN, default=DEFAULT_BINARY): cv.string,
|
||||
vol.Optional(CONF_RUN_TEST): cv.boolean,
|
||||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ from homeassistant.core import callback
|
|||
from homeassistant.helpers.translation import async_get_translations
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
REQUIREMENTS = ['home-assistant-frontend==20181026.4']
|
||||
REQUIREMENTS = ['home-assistant-frontend==20181103.3']
|
||||
|
||||
DOMAIN = 'frontend'
|
||||
DEPENDENCIES = ['api', 'websocket_api', 'http', 'system_log',
|
||||
|
|
|
@ -20,7 +20,7 @@ from homeassistant.helpers.dispatcher import (
|
|||
async_dispatcher_connect, dispatcher_send)
|
||||
from homeassistant.helpers.event import track_time_interval
|
||||
|
||||
REQUIREMENTS = ['geojson_client==0.1']
|
||||
REQUIREMENTS = ['geojson_client==0.3']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ from homeassistant.helpers.dispatcher import (
|
|||
async_dispatcher_connect, dispatcher_send)
|
||||
from homeassistant.helpers.event import track_time_interval
|
||||
|
||||
REQUIREMENTS = ['geojson_client==0.1']
|
||||
REQUIREMENTS = ['geojson_client==0.3']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -14,7 +14,8 @@ CONF_ROOM_HINT = 'room'
|
|||
|
||||
DEFAULT_EXPOSE_BY_DEFAULT = True
|
||||
DEFAULT_EXPOSED_DOMAINS = [
|
||||
'switch', 'light', 'group', 'media_player', 'fan', 'cover', 'climate'
|
||||
'climate', 'cover', 'fan', 'group', 'input_boolean', 'light',
|
||||
'media_player', 'scene', 'script', 'switch', 'vacuum',
|
||||
]
|
||||
CLIMATE_MODE_HEATCOOL = 'heatcool'
|
||||
CLIMATE_SUPPORTED_MODES = {'heat', 'cool', 'off', 'on', CLIMATE_MODE_HEATCOOL}
|
||||
|
@ -22,7 +23,9 @@ CLIMATE_SUPPORTED_MODES = {'heat', 'cool', 'off', 'on', CLIMATE_MODE_HEATCOOL}
|
|||
PREFIX_TYPES = 'action.devices.types.'
|
||||
TYPE_LIGHT = PREFIX_TYPES + 'LIGHT'
|
||||
TYPE_SWITCH = PREFIX_TYPES + 'SWITCH'
|
||||
TYPE_VACUUM = PREFIX_TYPES + 'VACUUM'
|
||||
TYPE_SCENE = PREFIX_TYPES + 'SCENE'
|
||||
TYPE_FAN = PREFIX_TYPES + 'FAN'
|
||||
TYPE_THERMOSTAT = PREFIX_TYPES + 'THERMOSTAT'
|
||||
|
||||
SERVICE_REQUEST_SYNC = 'request_sync'
|
||||
|
|
|
@ -19,11 +19,13 @@ from homeassistant.components import (
|
|||
scene,
|
||||
script,
|
||||
switch,
|
||||
vacuum,
|
||||
)
|
||||
|
||||
from . import trait
|
||||
from .const import (
|
||||
TYPE_LIGHT, TYPE_SCENE, TYPE_SWITCH, TYPE_THERMOSTAT,
|
||||
TYPE_LIGHT, TYPE_SCENE, TYPE_SWITCH, TYPE_VACUUM,
|
||||
TYPE_THERMOSTAT, TYPE_FAN,
|
||||
CONF_ALIASES, CONF_ROOM_HINT,
|
||||
ERR_NOT_SUPPORTED, ERR_PROTOCOL_ERROR, ERR_DEVICE_OFFLINE,
|
||||
ERR_UNKNOWN_ERROR
|
||||
|
@ -36,7 +38,7 @@ _LOGGER = logging.getLogger(__name__)
|
|||
DOMAIN_TO_GOOGLE_TYPES = {
|
||||
climate.DOMAIN: TYPE_THERMOSTAT,
|
||||
cover.DOMAIN: TYPE_SWITCH,
|
||||
fan.DOMAIN: TYPE_SWITCH,
|
||||
fan.DOMAIN: TYPE_FAN,
|
||||
group.DOMAIN: TYPE_SWITCH,
|
||||
input_boolean.DOMAIN: TYPE_SWITCH,
|
||||
light.DOMAIN: TYPE_LIGHT,
|
||||
|
@ -44,6 +46,7 @@ DOMAIN_TO_GOOGLE_TYPES = {
|
|||
scene.DOMAIN: TYPE_SCENE,
|
||||
script.DOMAIN: TYPE_SCENE,
|
||||
switch.DOMAIN: TYPE_SWITCH,
|
||||
vacuum.DOMAIN: TYPE_VACUUM,
|
||||
}
|
||||
|
||||
|
||||
|
@ -213,7 +216,7 @@ async def _process(hass, config, message):
|
|||
'requestId': request_id,
|
||||
'payload': {'errorCode': err.code}
|
||||
}
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception('Unexpected error')
|
||||
return {
|
||||
'requestId': request_id,
|
||||
|
|
|
@ -13,6 +13,7 @@ from homeassistant.components import (
|
|||
scene,
|
||||
script,
|
||||
switch,
|
||||
vacuum,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
|
@ -21,6 +22,7 @@ from homeassistant.const import (
|
|||
STATE_OFF,
|
||||
TEMP_CELSIUS,
|
||||
TEMP_FAHRENHEIT,
|
||||
ATTR_SUPPORTED_FEATURES,
|
||||
)
|
||||
from homeassistant.util import color as color_util, temperature as temp_util
|
||||
|
||||
|
@ -31,6 +33,8 @@ _LOGGER = logging.getLogger(__name__)
|
|||
|
||||
PREFIX_TRAITS = 'action.devices.traits.'
|
||||
TRAIT_ONOFF = PREFIX_TRAITS + 'OnOff'
|
||||
TRAIT_DOCK = PREFIX_TRAITS + 'Dock'
|
||||
TRAIT_STARTSTOP = PREFIX_TRAITS + 'StartStop'
|
||||
TRAIT_BRIGHTNESS = PREFIX_TRAITS + 'Brightness'
|
||||
TRAIT_COLOR_SPECTRUM = PREFIX_TRAITS + 'ColorSpectrum'
|
||||
TRAIT_COLOR_TEMP = PREFIX_TRAITS + 'ColorTemperature'
|
||||
|
@ -39,6 +43,9 @@ TRAIT_TEMPERATURE_SETTING = PREFIX_TRAITS + 'TemperatureSetting'
|
|||
|
||||
PREFIX_COMMANDS = 'action.devices.commands.'
|
||||
COMMAND_ONOFF = PREFIX_COMMANDS + 'OnOff'
|
||||
COMMAND_DOCK = PREFIX_COMMANDS + 'Dock'
|
||||
COMMAND_STARTSTOP = PREFIX_COMMANDS + 'StartStop'
|
||||
COMMAND_PAUSEUNPAUSE = PREFIX_COMMANDS + 'PauseUnpause'
|
||||
COMMAND_BRIGHTNESS_ABSOLUTE = PREFIX_COMMANDS + 'BrightnessAbsolute'
|
||||
COMMAND_COLOR_ABSOLUTE = PREFIX_COMMANDS + 'ColorAbsolute'
|
||||
COMMAND_ACTIVATE_SCENE = PREFIX_COMMANDS + 'ActivateScene'
|
||||
|
@ -392,6 +399,96 @@ class SceneTrait(_Trait):
|
|||
}, blocking=self.state.domain != script.DOMAIN)
|
||||
|
||||
|
||||
@register_trait
|
||||
class DockTrait(_Trait):
|
||||
"""Trait to offer dock functionality.
|
||||
|
||||
https://developers.google.com/actions/smarthome/traits/dock
|
||||
"""
|
||||
|
||||
name = TRAIT_DOCK
|
||||
commands = [
|
||||
COMMAND_DOCK
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def supported(domain, features):
|
||||
"""Test if state is supported."""
|
||||
return domain == vacuum.DOMAIN
|
||||
|
||||
def sync_attributes(self):
|
||||
"""Return dock attributes for a sync request."""
|
||||
return {}
|
||||
|
||||
def query_attributes(self):
|
||||
"""Return dock query attributes."""
|
||||
return {'isDocked': self.state.state == vacuum.STATE_DOCKED}
|
||||
|
||||
async def execute(self, command, params):
|
||||
"""Execute a dock command."""
|
||||
await self.hass.services.async_call(
|
||||
self.state.domain, vacuum.SERVICE_RETURN_TO_BASE, {
|
||||
ATTR_ENTITY_ID: self.state.entity_id
|
||||
}, blocking=True)
|
||||
|
||||
|
||||
@register_trait
|
||||
class StartStopTrait(_Trait):
|
||||
"""Trait to offer StartStop functionality.
|
||||
|
||||
https://developers.google.com/actions/smarthome/traits/startstop
|
||||
"""
|
||||
|
||||
name = TRAIT_STARTSTOP
|
||||
commands = [
|
||||
COMMAND_STARTSTOP,
|
||||
COMMAND_PAUSEUNPAUSE
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def supported(domain, features):
|
||||
"""Test if state is supported."""
|
||||
return domain == vacuum.DOMAIN
|
||||
|
||||
def sync_attributes(self):
|
||||
"""Return StartStop attributes for a sync request."""
|
||||
return {'pausable':
|
||||
self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
& vacuum.SUPPORT_PAUSE != 0}
|
||||
|
||||
def query_attributes(self):
|
||||
"""Return StartStop query attributes."""
|
||||
return {
|
||||
'isRunning': self.state.state == vacuum.STATE_CLEANING,
|
||||
'isPaused': self.state.state == vacuum.STATE_PAUSED,
|
||||
}
|
||||
|
||||
async def execute(self, command, params):
|
||||
"""Execute a StartStop command."""
|
||||
if command == COMMAND_STARTSTOP:
|
||||
if params['start']:
|
||||
await self.hass.services.async_call(
|
||||
self.state.domain, vacuum.SERVICE_START, {
|
||||
ATTR_ENTITY_ID: self.state.entity_id
|
||||
}, blocking=True)
|
||||
else:
|
||||
await self.hass.services.async_call(
|
||||
self.state.domain, vacuum.SERVICE_STOP, {
|
||||
ATTR_ENTITY_ID: self.state.entity_id
|
||||
}, blocking=True)
|
||||
elif command == COMMAND_PAUSEUNPAUSE:
|
||||
if params['pause']:
|
||||
await self.hass.services.async_call(
|
||||
self.state.domain, vacuum.SERVICE_PAUSE, {
|
||||
ATTR_ENTITY_ID: self.state.entity_id
|
||||
}, blocking=True)
|
||||
else:
|
||||
await self.hass.services.async_call(
|
||||
self.state.domain, vacuum.SERVICE_START, {
|
||||
ATTR_ENTITY_ID: self.state.entity_id
|
||||
}, blocking=True)
|
||||
|
||||
|
||||
@register_trait
|
||||
class TemperatureSettingTrait(_Trait):
|
||||
"""Trait to offer handling both temperature point and modes functionality.
|
||||
|
|
171
homeassistant/components/greeneye_monitor.py
Normal file
171
homeassistant/components/greeneye_monitor.py
Normal file
|
@ -0,0 +1,171 @@
|
|||
"""
|
||||
Support for monitoring a GreenEye Monitor energy monitor.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/greeneye_monitor/
|
||||
"""
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_PORT,
|
||||
CONF_TEMPERATURE_UNIT,
|
||||
EVENT_HOMEASSISTANT_STOP)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
|
||||
REQUIREMENTS = ['greeneye_monitor==0.1']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_CHANNELS = 'channels'
|
||||
CONF_COUNTED_QUANTITY = 'counted_quantity'
|
||||
CONF_COUNTED_QUANTITY_PER_PULSE = 'counted_quantity_per_pulse'
|
||||
CONF_MONITOR_SERIAL_NUMBER = 'monitor'
|
||||
CONF_MONITORS = 'monitors'
|
||||
CONF_NET_METERING = 'net_metering'
|
||||
CONF_NUMBER = 'number'
|
||||
CONF_PULSE_COUNTERS = 'pulse_counters'
|
||||
CONF_SERIAL_NUMBER = 'serial_number'
|
||||
CONF_SENSORS = 'sensors'
|
||||
CONF_SENSOR_TYPE = 'sensor_type'
|
||||
CONF_TEMPERATURE_SENSORS = 'temperature_sensors'
|
||||
CONF_TIME_UNIT = 'time_unit'
|
||||
|
||||
DATA_GREENEYE_MONITOR = 'greeneye_monitor'
|
||||
DOMAIN = 'greeneye_monitor'
|
||||
|
||||
SENSOR_TYPE_CURRENT = 'current_sensor'
|
||||
SENSOR_TYPE_PULSE_COUNTER = 'pulse_counter'
|
||||
SENSOR_TYPE_TEMPERATURE = 'temperature_sensor'
|
||||
|
||||
TEMPERATURE_UNIT_CELSIUS = 'C'
|
||||
|
||||
TIME_UNIT_SECOND = 's'
|
||||
TIME_UNIT_MINUTE = 'min'
|
||||
TIME_UNIT_HOUR = 'h'
|
||||
|
||||
TEMPERATURE_SENSOR_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_NUMBER): vol.Range(1, 8),
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
})
|
||||
|
||||
TEMPERATURE_SENSORS_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_TEMPERATURE_UNIT): cv.temperature_unit,
|
||||
vol.Required(CONF_SENSORS): vol.All(cv.ensure_list,
|
||||
[TEMPERATURE_SENSOR_SCHEMA]),
|
||||
})
|
||||
|
||||
PULSE_COUNTER_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_NUMBER): vol.Range(1, 4),
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_COUNTED_QUANTITY): cv.string,
|
||||
vol.Optional(
|
||||
CONF_COUNTED_QUANTITY_PER_PULSE, default=1.0): vol.Coerce(float),
|
||||
vol.Optional(CONF_TIME_UNIT, default=TIME_UNIT_SECOND): vol.Any(
|
||||
TIME_UNIT_SECOND,
|
||||
TIME_UNIT_MINUTE,
|
||||
TIME_UNIT_HOUR),
|
||||
})
|
||||
|
||||
PULSE_COUNTERS_SCHEMA = vol.All(cv.ensure_list, [PULSE_COUNTER_SCHEMA])
|
||||
|
||||
CHANNEL_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_NUMBER): vol.Range(1, 48),
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_NET_METERING, default=False): cv.boolean,
|
||||
})
|
||||
|
||||
CHANNELS_SCHEMA = vol.All(cv.ensure_list, [CHANNEL_SCHEMA])
|
||||
|
||||
MONITOR_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_SERIAL_NUMBER): cv.positive_int,
|
||||
vol.Optional(CONF_CHANNELS, default=[]): CHANNELS_SCHEMA,
|
||||
vol.Optional(
|
||||
CONF_TEMPERATURE_SENSORS,
|
||||
default={
|
||||
CONF_TEMPERATURE_UNIT: TEMPERATURE_UNIT_CELSIUS,
|
||||
CONF_SENSORS: [],
|
||||
}): TEMPERATURE_SENSORS_SCHEMA,
|
||||
vol.Optional(CONF_PULSE_COUNTERS, default=[]): PULSE_COUNTERS_SCHEMA,
|
||||
})
|
||||
|
||||
MONITORS_SCHEMA = vol.All(cv.ensure_list, [MONITOR_SCHEMA])
|
||||
|
||||
COMPONENT_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Required(CONF_MONITORS): MONITORS_SCHEMA,
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: COMPONENT_SCHEMA,
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
async def async_setup(hass, config):
|
||||
"""Set up the GreenEye Monitor component."""
|
||||
from greeneye import Monitors
|
||||
|
||||
monitors = Monitors()
|
||||
hass.data[DATA_GREENEYE_MONITOR] = monitors
|
||||
|
||||
server_config = config[DOMAIN]
|
||||
server = await monitors.start_server(server_config[CONF_PORT])
|
||||
|
||||
async def close_server(*args):
|
||||
"""Close the monitoring server."""
|
||||
await server.close()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_server)
|
||||
|
||||
all_sensors = []
|
||||
for monitor_config in server_config[CONF_MONITORS]:
|
||||
monitor_serial_number = {
|
||||
CONF_MONITOR_SERIAL_NUMBER: monitor_config[CONF_SERIAL_NUMBER],
|
||||
}
|
||||
|
||||
channel_configs = monitor_config[CONF_CHANNELS]
|
||||
for channel_config in channel_configs:
|
||||
all_sensors.append({
|
||||
CONF_SENSOR_TYPE: SENSOR_TYPE_CURRENT,
|
||||
**monitor_serial_number,
|
||||
**channel_config,
|
||||
})
|
||||
|
||||
sensor_configs = \
|
||||
monitor_config[CONF_TEMPERATURE_SENSORS]
|
||||
if sensor_configs:
|
||||
temperature_unit = {
|
||||
CONF_TEMPERATURE_UNIT: sensor_configs[CONF_TEMPERATURE_UNIT],
|
||||
}
|
||||
for sensor_config in sensor_configs[CONF_SENSORS]:
|
||||
all_sensors.append({
|
||||
CONF_SENSOR_TYPE: SENSOR_TYPE_TEMPERATURE,
|
||||
**monitor_serial_number,
|
||||
**temperature_unit,
|
||||
**sensor_config,
|
||||
})
|
||||
|
||||
counter_configs = monitor_config[CONF_PULSE_COUNTERS]
|
||||
for counter_config in counter_configs:
|
||||
all_sensors.append({
|
||||
CONF_SENSOR_TYPE: SENSOR_TYPE_PULSE_COUNTER,
|
||||
**monitor_serial_number,
|
||||
**counter_config,
|
||||
})
|
||||
|
||||
if not all_sensors:
|
||||
_LOGGER.error("Configuration must specify at least one "
|
||||
"channel, pulse counter or temperature sensor")
|
||||
return False
|
||||
|
||||
hass.async_create_task(async_load_platform(
|
||||
hass,
|
||||
'sensor',
|
||||
DOMAIN,
|
||||
all_sensors,
|
||||
config))
|
||||
|
||||
return True
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue