commit
70ea16bdc0
200 changed files with 7412 additions and 2099 deletions
14
.coveragerc
14
.coveragerc
|
@ -20,6 +20,9 @@ omit =
|
|||
homeassistant/components/android_ip_webcam.py
|
||||
homeassistant/components/*/android_ip_webcam.py
|
||||
|
||||
homeassistant/components/axis.py
|
||||
homeassistant/components/*/axis.py
|
||||
|
||||
homeassistant/components/bbb_gpio.py
|
||||
homeassistant/components/*/bbb_gpio.py
|
||||
|
||||
|
@ -59,6 +62,9 @@ omit =
|
|||
homeassistant/components/isy994.py
|
||||
homeassistant/components/*/isy994.py
|
||||
|
||||
homeassistant/components/kira.py
|
||||
homeassistant/components/*/kira.py
|
||||
|
||||
homeassistant/components/lutron.py
|
||||
homeassistant/components/*/lutron.py
|
||||
|
||||
|
@ -83,12 +89,18 @@ omit =
|
|||
homeassistant/components/qwikswitch.py
|
||||
homeassistant/components/*/qwikswitch.py
|
||||
|
||||
homeassistant/components/raspihats.py
|
||||
homeassistant/components/*/raspihats.py
|
||||
|
||||
homeassistant/components/rfxtrx.py
|
||||
homeassistant/components/*/rfxtrx.py
|
||||
|
||||
homeassistant/components/rpi_gpio.py
|
||||
homeassistant/components/*/rpi_gpio.py
|
||||
|
||||
homeassistant/components/rpi_pfio.py
|
||||
homeassistant/components/*/rpi_pfio.py
|
||||
|
||||
homeassistant/components/scsgate.py
|
||||
homeassistant/components/*/scsgate.py
|
||||
|
||||
|
@ -175,6 +187,7 @@ omit =
|
|||
homeassistant/components/binary_sensor/flic.py
|
||||
homeassistant/components/binary_sensor/hikvision.py
|
||||
homeassistant/components/binary_sensor/iss.py
|
||||
homeassistant/components/binary_sensor/mystrom.py
|
||||
homeassistant/components/binary_sensor/pilight.py
|
||||
homeassistant/components/binary_sensor/ping.py
|
||||
homeassistant/components/binary_sensor/rest.py
|
||||
|
@ -239,6 +252,7 @@ omit =
|
|||
homeassistant/components/ifttt.py
|
||||
homeassistant/components/image_processing/dlib_face_detect.py
|
||||
homeassistant/components/image_processing/dlib_face_identify.py
|
||||
homeassistant/components/image_processing/seven_segments.py
|
||||
homeassistant/components/joaoapps_join.py
|
||||
homeassistant/components/keyboard.py
|
||||
homeassistant/components/keyboard_remote.py
|
||||
|
|
14
.travis.yml
14
.travis.yml
|
@ -1,13 +1,15 @@
|
|||
sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libudev-dev
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- python: "3.4.2"
|
||||
env: TOXENV=py34
|
||||
- python: "3.4.2"
|
||||
env: TOXENV=requirements
|
||||
- python: "3.4.2"
|
||||
env: TOXENV=lint
|
||||
- python: "3.4.2"
|
||||
env: TOXENV=py34
|
||||
# - python: "3.5"
|
||||
# env: TOXENV=typing
|
||||
- python: "3.5"
|
||||
|
@ -16,6 +18,8 @@ matrix:
|
|||
env: TOXENV=py36
|
||||
- python: "3.6-dev"
|
||||
env: TOXENV=py36
|
||||
- python: "3.4.2"
|
||||
env: TOXENV=requirements
|
||||
# allow_failures:
|
||||
# - python: "3.5"
|
||||
# env: TOXENV=typing
|
||||
|
@ -25,5 +29,5 @@ cache:
|
|||
- $HOME/.cache/pip
|
||||
install: pip install -U tox coveralls
|
||||
language: python
|
||||
script: tox
|
||||
script: travis_wait tox
|
||||
after_success: coveralls
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.5
|
||||
FROM python:3.6
|
||||
MAINTAINER Paulus Schoutsen <Paulus@PaulusSchoutsen.nl>
|
||||
|
||||
# Uncomment any of the following lines to disable the installation.
|
||||
|
|
|
@ -4,6 +4,7 @@ Interfaces with Wink Cameras.
|
|||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/alarm_control_panel.wink/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import homeassistant.components.alarm_control_panel as alarm
|
||||
|
@ -42,6 +43,11 @@ class WinkCameraDevice(WinkDevice, alarm.AlarmControlPanel):
|
|||
"""Initialize the Wink alarm."""
|
||||
super().__init__(wink, hass)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Callback when entity is added to hass."""
|
||||
self.hass.data[DOMAIN]['entities']['alarm_control_panel'].append(self)
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the device."""
|
||||
|
|
|
@ -17,7 +17,6 @@ from homeassistant.core import callback
|
|||
from homeassistant.const import HTTP_BAD_REQUEST
|
||||
from homeassistant.helpers import template, script, config_validation as cv
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -36,7 +35,6 @@ CONF_TEXT = 'text'
|
|||
|
||||
CONF_FLASH_BRIEFINGS = 'flash_briefings'
|
||||
CONF_UID = 'uid'
|
||||
CONF_DATE = 'date'
|
||||
CONF_TITLE = 'title'
|
||||
CONF_AUDIO = 'audio'
|
||||
CONF_TEXT = 'text'
|
||||
|
@ -88,7 +86,6 @@ CONFIG_SCHEMA = vol.Schema({
|
|||
CONF_FLASH_BRIEFINGS: {
|
||||
cv.string: vol.All(cv.ensure_list, [{
|
||||
vol.Required(CONF_UID, default=str(uuid.uuid4())): cv.string,
|
||||
vol.Optional(CONF_DATE, default=datetime.utcnow()): cv.string,
|
||||
vol.Required(CONF_TITLE): cv.template,
|
||||
vol.Optional(CONF_AUDIO): cv.template,
|
||||
vol.Required(CONF_TEXT, default=""): cv.template,
|
||||
|
@ -331,10 +328,7 @@ class AlexaFlashBriefingView(HomeAssistantView):
|
|||
else:
|
||||
output[ATTR_REDIRECTION_URL] = item.get(CONF_DISPLAY_URL)
|
||||
|
||||
if isinstance(item[CONF_DATE], str):
|
||||
item[CONF_DATE] = dt_util.parse_datetime(item[CONF_DATE])
|
||||
|
||||
output[ATTR_UPDATE_DATE] = item[CONF_DATE].strftime(DATE_FORMAT)
|
||||
output[ATTR_UPDATE_DATE] = datetime.now().strftime(DATE_FORMAT)
|
||||
|
||||
briefing.append(output)
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ from homeassistant.core import CoreState
|
|||
from homeassistant import config as conf_util
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID, CONF_PLATFORM, STATE_ON, SERVICE_TURN_ON, SERVICE_TURN_OFF,
|
||||
SERVICE_TOGGLE, SERVICE_RELOAD, EVENT_HOMEASSISTANT_START)
|
||||
SERVICE_TOGGLE, SERVICE_RELOAD, EVENT_HOMEASSISTANT_START, CONF_ID)
|
||||
from homeassistant.components import logbook
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import extract_domain_configs, script, condition
|
||||
|
@ -26,6 +26,7 @@ from homeassistant.helpers.restore_state import async_get_last_state
|
|||
from homeassistant.loader import get_platform
|
||||
from homeassistant.util.dt import utcnow
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.frontend import register_built_in_panel
|
||||
|
||||
DOMAIN = 'automation'
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
|
@ -81,6 +82,8 @@ _TRIGGER_SCHEMA = vol.All(
|
|||
_CONDITION_SCHEMA = vol.All(cv.ensure_list, [cv.CONDITION_SCHEMA])
|
||||
|
||||
PLATFORM_SCHEMA = vol.Schema({
|
||||
# str on purpose
|
||||
CONF_ID: str,
|
||||
CONF_ALIAS: cv.string,
|
||||
vol.Optional(CONF_INITIAL_STATE): cv.boolean,
|
||||
vol.Optional(CONF_HIDE_ENTITY, default=DEFAULT_HIDE_ENTITY): cv.boolean,
|
||||
|
@ -139,16 +142,21 @@ def reload(hass):
|
|||
hass.services.call(DOMAIN, SERVICE_RELOAD)
|
||||
|
||||
|
||||
def async_reload(hass):
|
||||
"""Reload the automation from config.
|
||||
|
||||
Returns a coroutine object.
|
||||
"""
|
||||
return hass.services.async_call(DOMAIN, SERVICE_RELOAD)
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
"""Set up the automation."""
|
||||
component = EntityComponent(_LOGGER, DOMAIN, hass,
|
||||
group_name=GROUP_NAME_ALL_AUTOMATIONS)
|
||||
|
||||
success = yield from _async_process_config(hass, config, component)
|
||||
|
||||
if not success:
|
||||
return False
|
||||
yield from _async_process_config(hass, config, component)
|
||||
|
||||
descriptions = yield from hass.loop.run_in_executor(
|
||||
None, conf_util.load_yaml_config_file, os.path.join(
|
||||
|
@ -215,15 +223,20 @@ def async_setup(hass, config):
|
|||
DOMAIN, service, turn_onoff_service_handler,
|
||||
descriptions.get(service), schema=SERVICE_SCHEMA)
|
||||
|
||||
if 'frontend' in hass.config.components:
|
||||
register_built_in_panel(hass, 'automation', 'Automations',
|
||||
'mdi:playlist-play')
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class AutomationEntity(ToggleEntity):
|
||||
"""Entity to show status of entity."""
|
||||
|
||||
def __init__(self, name, async_attach_triggers, cond_func, async_action,
|
||||
hidden, initial_state):
|
||||
def __init__(self, automation_id, name, async_attach_triggers, cond_func,
|
||||
async_action, hidden, initial_state):
|
||||
"""Initialize an automation entity."""
|
||||
self._id = automation_id
|
||||
self._name = name
|
||||
self._async_attach_triggers = async_attach_triggers
|
||||
self._async_detach_triggers = None
|
||||
|
@ -346,6 +359,16 @@ class AutomationEntity(ToggleEntity):
|
|||
self.async_trigger)
|
||||
yield from self.async_update_ha_state()
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return automation attributes."""
|
||||
if self._id is None:
|
||||
return None
|
||||
|
||||
return {
|
||||
CONF_ID: self._id
|
||||
}
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def _async_process_config(hass, config, component):
|
||||
|
@ -359,6 +382,7 @@ def _async_process_config(hass, config, component):
|
|||
conf = config[config_key]
|
||||
|
||||
for list_no, config_block in enumerate(conf):
|
||||
automation_id = config_block.get(CONF_ID)
|
||||
name = config_block.get(CONF_ALIAS) or "{} {}".format(config_key,
|
||||
list_no)
|
||||
|
||||
|
@ -383,16 +407,14 @@ def _async_process_config(hass, config, component):
|
|||
config_block.get(CONF_TRIGGER, []), name
|
||||
)
|
||||
entity = AutomationEntity(
|
||||
name, async_attach_triggers, cond_func, action, hidden,
|
||||
initial_state)
|
||||
automation_id, name, async_attach_triggers, cond_func, action,
|
||||
hidden, initial_state)
|
||||
|
||||
entities.append(entity)
|
||||
|
||||
if entities:
|
||||
yield from component.async_add_entities(entities)
|
||||
|
||||
return len(entities) > 0
|
||||
|
||||
|
||||
def _async_get_action(hass, config, name):
|
||||
"""Return an action based on a configuration."""
|
||||
|
|
|
@ -9,8 +9,8 @@ import logging
|
|||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback, CoreState
|
||||
from homeassistant.const import CONF_PLATFORM, EVENT_HOMEASSISTANT_START
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.const import CONF_PLATFORM
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
CONF_EVENT_TYPE = 'event_type'
|
||||
|
@ -31,19 +31,6 @@ def async_trigger(hass, config, action):
|
|||
event_type = config.get(CONF_EVENT_TYPE)
|
||||
event_data = config.get(CONF_EVENT_DATA)
|
||||
|
||||
if (event_type == EVENT_HOMEASSISTANT_START and
|
||||
hass.state == CoreState.starting):
|
||||
_LOGGER.warning('Deprecation: Automations should not listen to event '
|
||||
"'homeassistant_start'. Use platform 'homeassistant' "
|
||||
'instead. Feature will be removed in 0.45')
|
||||
hass.async_run_job(action, {
|
||||
'trigger': {
|
||||
'platform': 'event',
|
||||
'event': None,
|
||||
},
|
||||
})
|
||||
return lambda: None
|
||||
|
||||
@callback
|
||||
def handle_event(event):
|
||||
"""Listen for events and calls the action when data matches."""
|
||||
|
|
|
@ -79,6 +79,10 @@ def async_trigger(hass, config, action):
|
|||
call_action()
|
||||
return
|
||||
|
||||
# If only state attributes changed, ignore this event
|
||||
if from_s.last_changed == to_s.last_changed:
|
||||
return
|
||||
|
||||
@callback
|
||||
def state_for_listener(now):
|
||||
"""Fire on state changes after a delay and calls action."""
|
||||
|
|
|
@ -16,8 +16,6 @@ from homeassistant.const import (
|
|||
from homeassistant.helpers.event import async_track_sunrise, async_track_sunset
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
DEPENDENCIES = ['sun']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TRIGGER_SCHEMA = vol.Schema({
|
||||
|
|
314
homeassistant/components/axis.py
Normal file
314
homeassistant/components/axis.py
Normal file
|
@ -0,0 +1,314 @@
|
|||
"""
|
||||
Support for Axis devices.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/axis/
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (ATTR_LOCATION, ATTR_TRIPPED,
|
||||
CONF_HOST, CONF_INCLUDE, CONF_NAME,
|
||||
CONF_PASSWORD, CONF_TRIGGER_TIME,
|
||||
CONF_USERNAME, EVENT_HOMEASSISTANT_STOP)
|
||||
from homeassistant.components.discovery import SERVICE_AXIS
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import discovery
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.loader import get_component
|
||||
|
||||
|
||||
REQUIREMENTS = ['axis==7']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = 'axis'
|
||||
CONFIG_FILE = 'axis.conf'
|
||||
|
||||
AXIS_DEVICES = {}
|
||||
|
||||
EVENT_TYPES = ['motion', 'vmd3', 'pir', 'sound',
|
||||
'daynight', 'tampering', 'input']
|
||||
|
||||
PLATFORMS = ['camera']
|
||||
|
||||
AXIS_INCLUDE = EVENT_TYPES + PLATFORMS
|
||||
|
||||
AXIS_DEFAULT_HOST = '192.168.0.90'
|
||||
AXIS_DEFAULT_USERNAME = 'root'
|
||||
AXIS_DEFAULT_PASSWORD = 'pass'
|
||||
|
||||
DEVICE_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_INCLUDE):
|
||||
vol.All(cv.ensure_list, [vol.In(AXIS_INCLUDE)]),
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_HOST, default=AXIS_DEFAULT_HOST): cv.string,
|
||||
vol.Optional(CONF_USERNAME, default=AXIS_DEFAULT_USERNAME): cv.string,
|
||||
vol.Optional(CONF_PASSWORD, default=AXIS_DEFAULT_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_TRIGGER_TIME, default=0): cv.positive_int,
|
||||
vol.Optional(ATTR_LOCATION, default=''): cv.string,
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
cv.slug: DEVICE_SCHEMA,
|
||||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
def request_configuration(hass, name, host, serialnumber):
|
||||
"""Request configuration steps from the user."""
|
||||
configurator = get_component('configurator')
|
||||
|
||||
def configuration_callback(callback_data):
|
||||
"""Called when config is submitted."""
|
||||
if CONF_INCLUDE not in callback_data:
|
||||
configurator.notify_errors(request_id,
|
||||
"Functionality mandatory.")
|
||||
return False
|
||||
callback_data[CONF_INCLUDE] = callback_data[CONF_INCLUDE].split()
|
||||
callback_data[CONF_HOST] = host
|
||||
if CONF_NAME not in callback_data:
|
||||
callback_data[CONF_NAME] = name
|
||||
try:
|
||||
config = DEVICE_SCHEMA(callback_data)
|
||||
except vol.Invalid:
|
||||
configurator.notify_errors(request_id,
|
||||
"Bad input, please check spelling.")
|
||||
return False
|
||||
|
||||
if setup_device(hass, config):
|
||||
config_file = _read_config(hass)
|
||||
config_file[serialnumber] = dict(config)
|
||||
del config_file[serialnumber]['hass']
|
||||
_write_config(hass, config_file)
|
||||
configurator.request_done(request_id)
|
||||
else:
|
||||
configurator.notify_errors(request_id,
|
||||
"Failed to register, please try again.")
|
||||
return False
|
||||
|
||||
title = '{} ({})'.format(name, host)
|
||||
request_id = configurator.request_config(
|
||||
hass, title, configuration_callback,
|
||||
description='Functionality: ' + str(AXIS_INCLUDE),
|
||||
entity_picture="/static/images/logo_axis.png",
|
||||
link_name='Axis platform documentation',
|
||||
link_url='https://home-assistant.io/components/axis/',
|
||||
submit_caption="Confirm",
|
||||
fields=[
|
||||
{'id': CONF_NAME,
|
||||
'name': "Device name",
|
||||
'type': 'text'},
|
||||
{'id': CONF_USERNAME,
|
||||
'name': "User name",
|
||||
'type': 'text'},
|
||||
{'id': CONF_PASSWORD,
|
||||
'name': 'Password',
|
||||
'type': 'password'},
|
||||
{'id': CONF_INCLUDE,
|
||||
'name': "Device functionality (space separated list)",
|
||||
'type': 'text'},
|
||||
{'id': ATTR_LOCATION,
|
||||
'name': "Physical location of device (optional)",
|
||||
'type': 'text'},
|
||||
{'id': CONF_TRIGGER_TIME,
|
||||
'name': "Sensor update interval (optional)",
|
||||
'type': 'number'},
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def setup(hass, base_config):
|
||||
"""Common setup for Axis devices."""
|
||||
def _shutdown(call): # pylint: disable=unused-argument
|
||||
"""Stop the metadatastream on shutdown."""
|
||||
for serialnumber, device in AXIS_DEVICES.items():
|
||||
_LOGGER.info("Stopping metadatastream for %s.", serialnumber)
|
||||
device.stop_metadatastream()
|
||||
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
||||
|
||||
def axis_device_discovered(service, discovery_info):
|
||||
"""Called when axis devices has been found."""
|
||||
host = discovery_info['host']
|
||||
name = discovery_info['hostname']
|
||||
serialnumber = discovery_info['properties']['macaddress']
|
||||
|
||||
if serialnumber not in AXIS_DEVICES:
|
||||
config_file = _read_config(hass)
|
||||
if serialnumber in config_file:
|
||||
try:
|
||||
config = DEVICE_SCHEMA(config_file[serialnumber])
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Bad data from %s. %s", CONFIG_FILE, err)
|
||||
return False
|
||||
if not setup_device(hass, config):
|
||||
_LOGGER.error("Couldn\'t set up %s", config['name'])
|
||||
else:
|
||||
request_configuration(hass, name, host, serialnumber)
|
||||
|
||||
discovery.listen(hass, SERVICE_AXIS, axis_device_discovered)
|
||||
|
||||
if DOMAIN in base_config:
|
||||
for device in base_config[DOMAIN]:
|
||||
config = base_config[DOMAIN][device]
|
||||
if CONF_NAME not in config:
|
||||
config[CONF_NAME] = device
|
||||
if not setup_device(hass, config):
|
||||
_LOGGER.error("Couldn\'t set up %s", config['name'])
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def setup_device(hass, config):
|
||||
"""Set up device."""
|
||||
from axis import AxisDevice
|
||||
|
||||
config['hass'] = hass
|
||||
device = AxisDevice(config) # Initialize device
|
||||
enable_metadatastream = False
|
||||
|
||||
if device.serial_number is None:
|
||||
# If there is no serial number a connection could not be made
|
||||
_LOGGER.error("Couldn\'t connect to %s", config[CONF_HOST])
|
||||
return False
|
||||
|
||||
for component in config[CONF_INCLUDE]:
|
||||
if component in EVENT_TYPES:
|
||||
# Sensors are created by device calling event_initialized
|
||||
# when receiving initialize messages on metadatastream
|
||||
device.add_event_topic(convert(component, 'type', 'subscribe'))
|
||||
if not enable_metadatastream:
|
||||
enable_metadatastream = True
|
||||
else:
|
||||
discovery.load_platform(hass, component, DOMAIN, config)
|
||||
|
||||
if enable_metadatastream:
|
||||
device.initialize_new_event = event_initialized
|
||||
device.initiate_metadatastream()
|
||||
AXIS_DEVICES[device.serial_number] = device
|
||||
return True
|
||||
|
||||
|
||||
def _read_config(hass):
|
||||
"""Read Axis config."""
|
||||
path = hass.config.path(CONFIG_FILE)
|
||||
|
||||
if not os.path.isfile(path):
|
||||
return {}
|
||||
|
||||
with open(path) as f_handle:
|
||||
# Guard against empty file
|
||||
return json.loads(f_handle.read() or '{}')
|
||||
|
||||
|
||||
def _write_config(hass, config):
|
||||
"""Write Axis config."""
|
||||
data = json.dumps(config)
|
||||
with open(hass.config.path(CONFIG_FILE), 'w', encoding='utf-8') as outfile:
|
||||
outfile.write(data)
|
||||
|
||||
|
||||
def event_initialized(event):
|
||||
"""Register event initialized on metadatastream here."""
|
||||
hass = event.device_config('hass')
|
||||
discovery.load_platform(hass,
|
||||
convert(event.topic, 'topic', 'platform'),
|
||||
DOMAIN, {'axis_event': event})
|
||||
|
||||
|
||||
class AxisDeviceEvent(Entity):
|
||||
"""Representation of a Axis device event."""
|
||||
|
||||
def __init__(self, axis_event):
|
||||
"""Initialize the event."""
|
||||
self.axis_event = axis_event
|
||||
self._event_class = convert(self.axis_event.topic, 'topic', 'class')
|
||||
self._name = '{}_{}_{}'.format(self.axis_event.device_name,
|
||||
convert(self.axis_event.topic,
|
||||
'topic', 'type'),
|
||||
self.axis_event.id)
|
||||
self.axis_event.callback = self._update_callback
|
||||
|
||||
def _update_callback(self):
|
||||
"""Update the sensor's state, if needed."""
|
||||
self.update()
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the event."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of the event."""
|
||||
return self._event_class
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the event."""
|
||||
attr = {}
|
||||
|
||||
tripped = self.axis_event.is_tripped
|
||||
attr[ATTR_TRIPPED] = 'True' if tripped else 'False'
|
||||
|
||||
location = self.axis_event.device_config(ATTR_LOCATION)
|
||||
if location:
|
||||
attr[ATTR_LOCATION] = location
|
||||
|
||||
return attr
|
||||
|
||||
|
||||
def convert(item, from_key, to_key):
|
||||
"""Translate between Axis and HASS syntax."""
|
||||
for entry in REMAP:
|
||||
if entry[from_key] == item:
|
||||
return entry[to_key]
|
||||
|
||||
|
||||
REMAP = [{'type': 'motion',
|
||||
'class': 'motion',
|
||||
'topic': 'tns1:VideoAnalytics/tnsaxis:MotionDetection',
|
||||
'subscribe': 'onvif:VideoAnalytics/axis:MotionDetection',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'vmd3',
|
||||
'class': 'motion',
|
||||
'topic': 'tns1:RuleEngine/tnsaxis:VMD3/vmd3_video_1',
|
||||
'subscribe': 'onvif:RuleEngine/axis:VMD3/vmd3_video_1',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'pir',
|
||||
'class': 'motion',
|
||||
'topic': 'tns1:Device/tnsaxis:Sensor/PIR',
|
||||
'subscribe': 'onvif:Device/axis:Sensor/axis:PIR',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'sound',
|
||||
'class': 'sound',
|
||||
'topic': 'tns1:AudioSource/tnsaxis:TriggerLevel',
|
||||
'subscribe': 'onvif:AudioSource/axis:TriggerLevel',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'daynight',
|
||||
'class': 'light',
|
||||
'topic': 'tns1:VideoSource/tnsaxis:DayNightVision',
|
||||
'subscribe': 'onvif:VideoSource/axis:DayNightVision',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'tampering',
|
||||
'class': 'safety',
|
||||
'topic': 'tns1:VideoSource/tnsaxis:Tampering',
|
||||
'subscribe': 'onvif:VideoSource/axis:Tampering',
|
||||
'platform': 'binary_sensor'},
|
||||
{'type': 'input',
|
||||
'class': 'input',
|
||||
'topic': 'tns1:Device/tnsaxis:IO/Port',
|
||||
'subscribe': 'onvif:Device/axis:IO/Port',
|
||||
'platform': 'sensor'}, ]
|
68
homeassistant/components/binary_sensor/axis.py
Normal file
68
homeassistant/components/binary_sensor/axis.py
Normal file
|
@ -0,0 +1,68 @@
|
|||
"""
|
||||
Support for Axis binary sensors.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.axis/
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
from homeassistant.components.binary_sensor import (BinarySensorDevice)
|
||||
from homeassistant.components.axis import (AxisDeviceEvent)
|
||||
from homeassistant.const import (CONF_TRIGGER_TIME)
|
||||
from homeassistant.helpers.event import track_point_in_utc_time
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
DEPENDENCIES = ['axis']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Setup Axis device event."""
|
||||
add_devices([AxisBinarySensor(discovery_info['axis_event'], hass)], True)
|
||||
|
||||
|
||||
class AxisBinarySensor(AxisDeviceEvent, BinarySensorDevice):
|
||||
"""Representation of a binary Axis event."""
|
||||
|
||||
def __init__(self, axis_event, hass):
|
||||
"""Initialize the binary sensor."""
|
||||
self.hass = hass
|
||||
self._state = False
|
||||
self._delay = axis_event.device_config(CONF_TRIGGER_TIME)
|
||||
self._timer = None
|
||||
AxisDeviceEvent.__init__(self, axis_event)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if event is active."""
|
||||
return self._state
|
||||
|
||||
def update(self):
|
||||
"""Get the latest data and update the state."""
|
||||
self._state = self.axis_event.is_tripped
|
||||
|
||||
def _update_callback(self):
|
||||
"""Update the sensor's state, if needed."""
|
||||
self.update()
|
||||
|
||||
if self._timer is not None:
|
||||
self._timer()
|
||||
self._timer = None
|
||||
|
||||
if self._delay > 0 and not self.is_on:
|
||||
# Set timer to wait until updating the state
|
||||
def _delay_update(now):
|
||||
"""Timer callback for sensor update."""
|
||||
_LOGGER.debug("%s Called delayed (%s sec) update.",
|
||||
self._name, self._delay)
|
||||
self.schedule_update_ha_state()
|
||||
self._timer = None
|
||||
|
||||
self._timer = track_point_in_utc_time(
|
||||
self.hass, _delay_update,
|
||||
utcnow() + timedelta(seconds=self._delay))
|
||||
else:
|
||||
self.schedule_update_ha_state()
|
95
homeassistant/components/binary_sensor/mystrom.py
Normal file
95
homeassistant/components/binary_sensor/mystrom.py
Normal file
|
@ -0,0 +1,95 @@
|
|||
"""
|
||||
Support for the myStrom buttons.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.mystrom/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from homeassistant.components.binary_sensor import (BinarySensorDevice, DOMAIN)
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.const import HTTP_UNPROCESSABLE_ENTITY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['http']
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up myStrom Binary Sensor."""
|
||||
hass.http.register_view(MyStromView(async_add_devices))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class MyStromView(HomeAssistantView):
|
||||
"""View to handle requests from myStrom buttons."""
|
||||
|
||||
url = '/api/mystrom'
|
||||
name = 'api:mystrom'
|
||||
|
||||
def __init__(self, add_devices):
|
||||
"""Initialize the myStrom URL endpoint."""
|
||||
self.buttons = {}
|
||||
self.add_devices = add_devices
|
||||
|
||||
@asyncio.coroutine
|
||||
def get(self, request):
|
||||
"""The GET request received from a myStrom button."""
|
||||
res = yield from self._handle(request.app['hass'], request.GET)
|
||||
return res
|
||||
|
||||
@asyncio.coroutine
|
||||
def _handle(self, hass, data):
|
||||
"""Handle requests to the myStrom endpoint."""
|
||||
button_action = list(data.keys())[0]
|
||||
button_id = data[button_action]
|
||||
entity_id = '{}.{}_{}'.format(DOMAIN, button_id, button_action)
|
||||
|
||||
if button_action not in ['single', 'double', 'long', 'touch']:
|
||||
_LOGGER.error(
|
||||
"Received unidentified message from myStrom button: %s", data)
|
||||
return ("Received unidentified message: {}".format(data),
|
||||
HTTP_UNPROCESSABLE_ENTITY)
|
||||
|
||||
if entity_id not in self.buttons:
|
||||
_LOGGER.info("New myStrom button/action detected: %s/%s",
|
||||
button_id, button_action)
|
||||
self.buttons[entity_id] = MyStromBinarySensor(
|
||||
'{}_{}'.format(button_id, button_action))
|
||||
hass.async_add_job(self.add_devices, [self.buttons[entity_id]])
|
||||
else:
|
||||
new_state = True if self.buttons[entity_id].state == 'off' \
|
||||
else False
|
||||
self.buttons[entity_id].async_on_update(new_state)
|
||||
|
||||
|
||||
class MyStromBinarySensor(BinarySensorDevice):
|
||||
"""Representation of a myStrom button."""
|
||||
|
||||
def __init__(self, button_id):
|
||||
"""Initialize the myStrom Binary sensor."""
|
||||
self._button_id = button_id
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._button_id
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self._state
|
||||
|
||||
def async_on_update(self, value):
|
||||
"""Receive an update."""
|
||||
self._state = value
|
||||
self.hass.async_add_job(self.async_update_ha_state())
|
131
homeassistant/components/binary_sensor/raspihats.py
Normal file
131
homeassistant/components/binary_sensor/raspihats.py
Normal file
|
@ -0,0 +1,131 @@
|
|||
"""
|
||||
Configure a binary_sensor using a digital input from a raspihats board.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.raspihats/
|
||||
"""
|
||||
import logging
|
||||
import voluptuous as vol
|
||||
from homeassistant.const import (
|
||||
CONF_NAME, CONF_DEVICE_CLASS, DEVICE_DEFAULT_NAME
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.binary_sensor import (
|
||||
PLATFORM_SCHEMA, BinarySensorDevice
|
||||
)
|
||||
from homeassistant.components.raspihats import (
|
||||
CONF_I2C_HATS, CONF_BOARD, CONF_ADDRESS, CONF_CHANNELS, CONF_INDEX,
|
||||
CONF_INVERT_LOGIC, I2C_HAT_NAMES, I2C_HATS_MANAGER, I2CHatsException
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['raspihats']
|
||||
|
||||
DEFAULT_INVERT_LOGIC = False
|
||||
DEFAULT_DEVICE_CLASS = None
|
||||
|
||||
_CHANNELS_SCHEMA = vol.Schema([{
|
||||
vol.Required(CONF_INDEX): cv.positive_int,
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
|
||||
vol.Optional(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): cv.string,
|
||||
}])
|
||||
|
||||
_I2C_HATS_SCHEMA = vol.Schema([{
|
||||
vol.Required(CONF_BOARD): vol.In(I2C_HAT_NAMES),
|
||||
vol.Required(CONF_ADDRESS): vol.Coerce(int),
|
||||
vol.Required(CONF_CHANNELS): _CHANNELS_SCHEMA
|
||||
}])
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_I2C_HATS): _I2C_HATS_SCHEMA,
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Setup the raspihats binary_sensor devices."""
|
||||
I2CHatBinarySensor.I2C_HATS_MANAGER = hass.data[I2C_HATS_MANAGER]
|
||||
binary_sensors = []
|
||||
i2c_hat_configs = config.get(CONF_I2C_HATS)
|
||||
for i2c_hat_config in i2c_hat_configs:
|
||||
address = i2c_hat_config[CONF_ADDRESS]
|
||||
board = i2c_hat_config[CONF_BOARD]
|
||||
try:
|
||||
I2CHatBinarySensor.I2C_HATS_MANAGER.register_board(board, address)
|
||||
for channel_config in i2c_hat_config[CONF_CHANNELS]:
|
||||
binary_sensors.append(
|
||||
I2CHatBinarySensor(
|
||||
address,
|
||||
channel_config[CONF_INDEX],
|
||||
channel_config[CONF_NAME],
|
||||
channel_config[CONF_INVERT_LOGIC],
|
||||
channel_config[CONF_DEVICE_CLASS]
|
||||
)
|
||||
)
|
||||
except I2CHatsException as ex:
|
||||
_LOGGER.error(
|
||||
"Failed to register " + board + "I2CHat@" + hex(address) + " "
|
||||
+ str(ex)
|
||||
)
|
||||
add_devices(binary_sensors)
|
||||
|
||||
|
||||
class I2CHatBinarySensor(BinarySensorDevice):
|
||||
"""Represents a binary sensor that uses a I2C-HAT digital input."""
|
||||
|
||||
I2C_HATS_MANAGER = None
|
||||
|
||||
def __init__(self, address, channel, name, invert_logic, device_class):
|
||||
"""Initialize sensor."""
|
||||
self._address = address
|
||||
self._channel = channel
|
||||
self._name = name or DEVICE_DEFAULT_NAME
|
||||
self._invert_logic = invert_logic
|
||||
self._device_class = device_class
|
||||
self._state = self.I2C_HATS_MANAGER.read_di(
|
||||
self._address,
|
||||
self._channel
|
||||
)
|
||||
|
||||
def online_callback():
|
||||
"""Callback fired when board is online."""
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
self.I2C_HATS_MANAGER.register_online_callback(
|
||||
self._address,
|
||||
self._channel,
|
||||
online_callback
|
||||
)
|
||||
|
||||
def edge_callback(state):
|
||||
"""Read digital input state."""
|
||||
self._state = state
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
self.I2C_HATS_MANAGER.register_di_callback(
|
||||
self._address,
|
||||
self._channel,
|
||||
edge_callback
|
||||
)
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of this sensor."""
|
||||
return self._device_class
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of this sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Polling not needed for this sensor."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return the state of this sensor."""
|
||||
return self._state != self._invert_logic
|
93
homeassistant/components/binary_sensor/rpi_pfio.py
Normal file
93
homeassistant/components/binary_sensor/rpi_pfio.py
Normal file
|
@ -0,0 +1,93 @@
|
|||
"""
|
||||
Support for binary sensor using the PiFace Digital I/O module on a RPi.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.rpi_pfio/
|
||||
"""
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.components.rpi_pfio as rpi_pfio
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDevice, PLATFORM_SCHEMA)
|
||||
from homeassistant.const import DEVICE_DEFAULT_NAME
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_INVERT_LOGIC = 'invert_logic'
|
||||
ATTR_SETTLE_TIME = 'settle_time'
|
||||
CONF_PORTS = 'ports'
|
||||
|
||||
DEFAULT_INVERT_LOGIC = False
|
||||
DEFAULT_SETTLE_TIME = 20
|
||||
|
||||
DEPENDENCIES = ['rpi_pfio']
|
||||
|
||||
PORT_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_NAME, default=None): cv.string,
|
||||
vol.Optional(ATTR_SETTLE_TIME, default=DEFAULT_SETTLE_TIME):
|
||||
cv.positive_int,
|
||||
vol.Optional(ATTR_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean
|
||||
})
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_PORTS, default={}): vol.Schema({
|
||||
cv.positive_int: PORT_SCHEMA
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Setup the PiFace Digital Input devices."""
|
||||
binary_sensors = []
|
||||
ports = config.get('ports')
|
||||
for port, port_entity in ports.items():
|
||||
name = port_entity[ATTR_NAME]
|
||||
settle_time = port_entity[ATTR_SETTLE_TIME] / 1000
|
||||
invert_logic = port_entity[ATTR_INVERT_LOGIC]
|
||||
|
||||
binary_sensors.append(RPiPFIOBinarySensor(
|
||||
hass, port, name, settle_time, invert_logic))
|
||||
add_devices(binary_sensors, True)
|
||||
|
||||
rpi_pfio.activate_listener(hass)
|
||||
|
||||
|
||||
class RPiPFIOBinarySensor(BinarySensorDevice):
|
||||
"""Represent a binary sensor that a PiFace Digital Input."""
|
||||
|
||||
def __init__(self, hass, port, name, settle_time, invert_logic):
|
||||
"""Initialize the RPi binary sensor."""
|
||||
self._port = port
|
||||
self._name = name or DEVICE_DEFAULT_NAME
|
||||
self._invert_logic = invert_logic
|
||||
self._state = None
|
||||
|
||||
def read_pfio(port):
|
||||
"""Read state from PFIO."""
|
||||
self._state = rpi_pfio.read_input(self._port)
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
rpi_pfio.edge_detect(hass, self._port, read_pfio, settle_time)
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""No polling needed."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return the state of the entity."""
|
||||
return self._state != self._invert_logic
|
||||
|
||||
def update(self):
|
||||
"""Update the PFIO state."""
|
||||
self._state = rpi_pfio.read_input(self._port)
|
|
@ -4,6 +4,7 @@ Support for Wink binary sensors.
|
|||
For more details about this platform, please refer to the documentation at
|
||||
at https://home-assistant.io/components/binary_sensor.wink/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
|
@ -101,6 +102,11 @@ class WinkBinarySensorDevice(WinkDevice, BinarySensorDevice, Entity):
|
|||
else:
|
||||
self.capability = None
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Callback when entity is added to hass."""
|
||||
self.hass.data[DOMAIN]['entities']['binary_sensor'].append(self)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if the binary sensor is on."""
|
||||
|
|
|
@ -13,7 +13,7 @@ from homeassistant.const import (
|
|||
CONF_USERNAME, CONF_PASSWORD, ATTR_FRIENDLY_NAME, ATTR_ARMED)
|
||||
from homeassistant.helpers import discovery
|
||||
|
||||
REQUIREMENTS = ['blinkpy==0.5.2']
|
||||
REQUIREMENTS = ['blinkpy==0.6.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -269,7 +269,7 @@ class CameraImageView(CameraView):
|
|||
image = yield from camera.async_camera_image()
|
||||
|
||||
if image:
|
||||
return web.Response(body=image)
|
||||
return web.Response(body=image, content_type='image/jpeg')
|
||||
|
||||
return web.Response(status=500)
|
||||
|
||||
|
|
|
@ -12,18 +12,22 @@ import voluptuous as vol
|
|||
|
||||
import homeassistant.loader as loader
|
||||
from homeassistant.components.camera import (Camera, PLATFORM_SCHEMA)
|
||||
from homeassistant.components.ffmpeg import DATA_FFMPEG
|
||||
from homeassistant.const import (
|
||||
CONF_HOST, CONF_NAME, CONF_USERNAME, CONF_PASSWORD, CONF_PORT)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_get_clientsession, async_aiohttp_proxy_web)
|
||||
async_get_clientsession, async_aiohttp_proxy_web,
|
||||
async_aiohttp_proxy_stream)
|
||||
|
||||
REQUIREMENTS = ['amcrest==1.1.9']
|
||||
REQUIREMENTS = ['amcrest==1.2.0']
|
||||
DEPENDENCIES = ['ffmpeg']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_RESOLUTION = 'resolution'
|
||||
CONF_STREAM_SOURCE = 'stream_source'
|
||||
CONF_FFMPEG_ARGUMENTS = 'ffmpeg_arguments'
|
||||
|
||||
DEFAULT_NAME = 'Amcrest Camera'
|
||||
DEFAULT_PORT = 80
|
||||
|
@ -40,7 +44,8 @@ RESOLUTION_LIST = {
|
|||
|
||||
STREAM_SOURCE_LIST = {
|
||||
'mjpeg': 0,
|
||||
'snapshot': 1
|
||||
'snapshot': 1,
|
||||
'rtsp': 2,
|
||||
}
|
||||
|
||||
CONTENT_TYPE_HEADER = 'Content-Type'
|
||||
|
@ -56,6 +61,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_STREAM_SOURCE, default=DEFAULT_STREAM_SOURCE):
|
||||
vol.All(vol.In(STREAM_SOURCE_LIST)),
|
||||
vol.Optional(CONF_FFMPEG_ARGUMENTS): cv.string,
|
||||
})
|
||||
|
||||
|
||||
|
@ -92,8 +98,9 @@ class AmcrestCam(Camera):
|
|||
super(AmcrestCam, self).__init__()
|
||||
self._camera = camera
|
||||
self._base_url = self._camera.get_base_url()
|
||||
self._hass = hass
|
||||
self._name = device_info.get(CONF_NAME)
|
||||
self._ffmpeg = hass.data[DATA_FFMPEG]
|
||||
self._ffmpeg_arguments = device_info.get(CONF_FFMPEG_ARGUMENTS)
|
||||
self._resolution = RESOLUTION_LIST[device_info.get(CONF_RESOLUTION)]
|
||||
self._stream_source = STREAM_SOURCE_LIST[
|
||||
device_info.get(CONF_STREAM_SOURCE)
|
||||
|
@ -117,15 +124,28 @@ class AmcrestCam(Camera):
|
|||
yield from super().handle_async_mjpeg_stream(request)
|
||||
return
|
||||
|
||||
# Otherwise, stream an MJPEG image stream directly from the camera
|
||||
websession = async_get_clientsession(self.hass)
|
||||
streaming_url = '{0}mjpg/video.cgi?channel=0&subtype={1}'.format(
|
||||
self._base_url, self._resolution)
|
||||
elif self._stream_source == STREAM_SOURCE_LIST['mjpeg']:
|
||||
# stream an MJPEG image stream directly from the camera
|
||||
websession = async_get_clientsession(self.hass)
|
||||
streaming_url = self._camera.mjpeg_url(typeno=self._resolution)
|
||||
stream_coro = websession.get(
|
||||
streaming_url, auth=self._token, timeout=TIMEOUT)
|
||||
|
||||
stream_coro = websession.get(
|
||||
streaming_url, auth=self._token, timeout=TIMEOUT)
|
||||
yield from async_aiohttp_proxy_web(self.hass, request, stream_coro)
|
||||
|
||||
yield from async_aiohttp_proxy_web(self.hass, request, stream_coro)
|
||||
else:
|
||||
# streaming via fmpeg
|
||||
from haffmpeg import CameraMjpeg
|
||||
|
||||
streaming_url = self._camera.rtsp_url(typeno=self._resolution)
|
||||
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
|
||||
yield from stream.open_camera(
|
||||
streaming_url, extra_cmd=self._ffmpeg_arguments)
|
||||
|
||||
yield from async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
yield from stream.close()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
|
38
homeassistant/components/camera/axis.py
Normal file
38
homeassistant/components/camera/axis.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
"""
|
||||
Support for Axis camera streaming.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/camera.axis/
|
||||
"""
|
||||
import logging
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_NAME, CONF_USERNAME, CONF_PASSWORD,
|
||||
CONF_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION)
|
||||
from homeassistant.components.camera.mjpeg import (
|
||||
CONF_MJPEG_URL, CONF_STILL_IMAGE_URL, MjpegCamera)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['axis']
|
||||
DOMAIN = 'axis'
|
||||
|
||||
|
||||
def _get_image_url(host, mode):
|
||||
if mode == 'mjpeg':
|
||||
return 'http://{}/axis-cgi/mjpg/video.cgi'.format(host)
|
||||
elif mode == 'single':
|
||||
return 'http://{}/axis-cgi/jpg/image.cgi'.format(host)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Setup Axis camera."""
|
||||
device_info = {
|
||||
CONF_NAME: discovery_info['name'],
|
||||
CONF_USERNAME: discovery_info['username'],
|
||||
CONF_PASSWORD: discovery_info['password'],
|
||||
CONF_MJPEG_URL: _get_image_url(discovery_info['host'], 'mjpeg'),
|
||||
CONF_STILL_IMAGE_URL: _get_image_url(discovery_info['host'], 'single'),
|
||||
CONF_AUTHENTICATION: HTTP_DIGEST_AUTHENTICATION,
|
||||
}
|
||||
add_devices([MjpegCamera(hass, device_info)])
|
|
@ -20,12 +20,15 @@ _LOGGER = logging.getLogger(__name__)
|
|||
|
||||
CONF_NVR = 'nvr'
|
||||
CONF_KEY = 'key'
|
||||
CONF_PASSWORD = 'password'
|
||||
|
||||
DEFAULT_PASSWORD = 'ubnt'
|
||||
DEFAULT_PORT = 7080
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_NVR): cv.string,
|
||||
vol.Required(CONF_KEY): cv.string,
|
||||
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
})
|
||||
|
||||
|
@ -34,6 +37,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||
"""Discover cameras on a Unifi NVR."""
|
||||
addr = config[CONF_NVR]
|
||||
key = config[CONF_KEY]
|
||||
password = config[CONF_PASSWORD]
|
||||
port = config[CONF_PORT]
|
||||
|
||||
from uvcclient import nvr
|
||||
|
@ -59,7 +63,8 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||
|
||||
add_devices([UnifiVideoCamera(nvrconn,
|
||||
camera[identifier],
|
||||
camera['name'])
|
||||
camera['name'],
|
||||
password)
|
||||
for camera in cameras])
|
||||
return True
|
||||
|
||||
|
@ -67,12 +72,13 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||
class UnifiVideoCamera(Camera):
|
||||
"""A Ubiquiti Unifi Video Camera."""
|
||||
|
||||
def __init__(self, nvr, uuid, name):
|
||||
def __init__(self, nvr, uuid, name, password):
|
||||
"""Initialize an Unifi camera."""
|
||||
super(UnifiVideoCamera, self).__init__()
|
||||
self._nvr = nvr
|
||||
self._uuid = uuid
|
||||
self._name = name
|
||||
self._password = password
|
||||
self.is_streaming = False
|
||||
self._connect_addr = None
|
||||
self._camera = None
|
||||
|
@ -102,7 +108,6 @@ class UnifiVideoCamera(Camera):
|
|||
def _login(self):
|
||||
"""Login to the camera."""
|
||||
from uvcclient import camera as uvc_camera
|
||||
from uvcclient import store as uvc_store
|
||||
|
||||
caminfo = self._nvr.get_camera(self._uuid)
|
||||
if self._connect_addr:
|
||||
|
@ -110,13 +115,6 @@ class UnifiVideoCamera(Camera):
|
|||
else:
|
||||
addrs = [caminfo['host'], caminfo['internalHost']]
|
||||
|
||||
store = uvc_store.get_info_store()
|
||||
password = store.get_camera_password(self._uuid)
|
||||
if password is None:
|
||||
_LOGGER.debug("Logging into camera %(name)s with default password",
|
||||
dict(name=self._name))
|
||||
password = 'ubnt'
|
||||
|
||||
if self._nvr.server_version >= (3, 2, 0):
|
||||
client_cls = uvc_camera.UVCCameraClientV320
|
||||
else:
|
||||
|
@ -126,7 +124,7 @@ class UnifiVideoCamera(Camera):
|
|||
for addr in addrs:
|
||||
try:
|
||||
camera = client_cls(
|
||||
addr, caminfo['username'], password)
|
||||
addr, caminfo['username'], self._password)
|
||||
camera.login()
|
||||
_LOGGER.debug("Logged into UVC camera %(name)s via %(addr)s",
|
||||
dict(name=self._name, addr=addr))
|
||||
|
|
|
@ -107,12 +107,7 @@ class ZoneMinderCamera(MjpegCamera):
|
|||
self._monitor_id)
|
||||
return
|
||||
|
||||
if not status_response.get("success", False):
|
||||
_LOGGER.warning("Alarm status API call failed for monitor %i",
|
||||
self._monitor_id)
|
||||
return
|
||||
|
||||
self._is_recording = status_response['status'] == ZM_STATE_ALARM
|
||||
self._is_recording = status_response.get('status') == ZM_STATE_ALARM
|
||||
|
||||
@property
|
||||
def is_recording(self):
|
||||
|
|
|
@ -149,22 +149,22 @@ class SensiboClimate(ClimateDevice):
|
|||
@property
|
||||
def current_fan_mode(self):
|
||||
"""Return the fan setting."""
|
||||
return self._ac_states['fanLevel']
|
||||
return self._ac_states.get('fanLevel')
|
||||
|
||||
@property
|
||||
def fan_list(self):
|
||||
"""List of available fan modes."""
|
||||
return self._current_capabilities['fanLevels']
|
||||
return self._current_capabilities.get('fanLevels')
|
||||
|
||||
@property
|
||||
def current_swing_mode(self):
|
||||
"""Return the fan setting."""
|
||||
return self._ac_states['swing']
|
||||
return self._ac_states.get('swing')
|
||||
|
||||
@property
|
||||
def swing_list(self):
|
||||
"""List of available swing modes."""
|
||||
return self._current_capabilities['swing']
|
||||
return self._current_capabilities.get('swing')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
|
|
@ -4,6 +4,8 @@ Support for Wink thermostats.
|
|||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/climate.wink/
|
||||
"""
|
||||
import asyncio
|
||||
|
||||
from homeassistant.components.wink import WinkDevice, DOMAIN
|
||||
from homeassistant.components.climate import (
|
||||
STATE_AUTO, STATE_COOL, STATE_HEAT, ClimateDevice,
|
||||
|
@ -52,6 +54,11 @@ class WinkThermostat(WinkDevice, ClimateDevice):
|
|||
super().__init__(wink, hass)
|
||||
self._config_temp_unit = temp_unit
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Callback when entity is added to hass."""
|
||||
self.hass.data[DOMAIN]['entities']['climate'].append(self)
|
||||
|
||||
@property
|
||||
def temperature_unit(self):
|
||||
"""Return the unit of measurement."""
|
||||
|
|
|
@ -5,7 +5,7 @@ import os
|
|||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.const import EVENT_COMPONENT_LOADED
|
||||
from homeassistant.const import EVENT_COMPONENT_LOADED, CONF_ID
|
||||
from homeassistant.setup import (
|
||||
async_prepare_setup_platform, ATTR_COMPONENT)
|
||||
from homeassistant.components.frontend import register_built_in_panel
|
||||
|
@ -14,8 +14,8 @@ from homeassistant.util.yaml import load_yaml, dump
|
|||
|
||||
DOMAIN = 'config'
|
||||
DEPENDENCIES = ['http']
|
||||
SECTIONS = ('core', 'group', 'hassbian')
|
||||
ON_DEMAND = ('zwave', )
|
||||
SECTIONS = ('core', 'group', 'hassbian', 'automation')
|
||||
ON_DEMAND = ('zwave')
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
|
@ -60,7 +60,7 @@ def async_setup(hass, config):
|
|||
return True
|
||||
|
||||
|
||||
class EditKeyBasedConfigView(HomeAssistantView):
|
||||
class BaseEditConfigView(HomeAssistantView):
|
||||
"""Configure a Group endpoint."""
|
||||
|
||||
def __init__(self, component, config_type, path, key_schema, data_schema,
|
||||
|
@ -73,13 +73,29 @@ class EditKeyBasedConfigView(HomeAssistantView):
|
|||
self.data_schema = data_schema
|
||||
self.post_write_hook = post_write_hook
|
||||
|
||||
def _empty_config(self):
|
||||
"""Empty config if file not found."""
|
||||
raise NotImplementedError
|
||||
|
||||
def _get_value(self, data, config_key):
|
||||
"""Get value."""
|
||||
raise NotImplementedError
|
||||
|
||||
def _write_value(self, data, config_key, new_value):
|
||||
"""Set value."""
|
||||
raise NotImplementedError
|
||||
|
||||
@asyncio.coroutine
|
||||
def get(self, request, config_key):
|
||||
"""Fetch device specific config."""
|
||||
hass = request.app['hass']
|
||||
current = yield from hass.loop.run_in_executor(
|
||||
None, _read, hass.config.path(self.path))
|
||||
return self.json(current.get(config_key, {}))
|
||||
current = yield from self.read_config(hass)
|
||||
value = self._get_value(current, config_key)
|
||||
|
||||
if value is None:
|
||||
return self.json_message('Resource not found', 404)
|
||||
|
||||
return self.json(value)
|
||||
|
||||
@asyncio.coroutine
|
||||
def post(self, request, config_key):
|
||||
|
@ -104,10 +120,10 @@ class EditKeyBasedConfigView(HomeAssistantView):
|
|||
hass = request.app['hass']
|
||||
path = hass.config.path(self.path)
|
||||
|
||||
current = yield from hass.loop.run_in_executor(None, _read, path)
|
||||
current.setdefault(config_key, {}).update(data)
|
||||
current = yield from self.read_config(hass)
|
||||
self._write_value(current, config_key, data)
|
||||
|
||||
yield from hass.loop.run_in_executor(None, _write, path, current)
|
||||
yield from hass.async_add_job(_write, path, current)
|
||||
|
||||
if self.post_write_hook is not None:
|
||||
hass.async_add_job(self.post_write_hook(hass))
|
||||
|
@ -116,13 +132,59 @@ class EditKeyBasedConfigView(HomeAssistantView):
|
|||
'result': 'ok',
|
||||
})
|
||||
|
||||
@asyncio.coroutine
|
||||
def read_config(self, hass):
|
||||
"""Read the config."""
|
||||
current = yield from hass.async_add_job(
|
||||
_read, hass.config.path(self.path))
|
||||
if not current:
|
||||
current = self._empty_config()
|
||||
return current
|
||||
|
||||
|
||||
class EditKeyBasedConfigView(BaseEditConfigView):
|
||||
"""Configure a list of entries."""
|
||||
|
||||
def _empty_config(self):
|
||||
"""Return an empty config."""
|
||||
return {}
|
||||
|
||||
def _get_value(self, data, config_key):
|
||||
"""Get value."""
|
||||
return data.get(config_key, {})
|
||||
|
||||
def _write_value(self, data, config_key, new_value):
|
||||
"""Set value."""
|
||||
data.setdefault(config_key, {}).update(new_value)
|
||||
|
||||
|
||||
class EditIdBasedConfigView(BaseEditConfigView):
|
||||
"""Configure key based config entries."""
|
||||
|
||||
def _empty_config(self):
|
||||
"""Return an empty config."""
|
||||
return []
|
||||
|
||||
def _get_value(self, data, config_key):
|
||||
"""Get value."""
|
||||
return next(
|
||||
(val for val in data if val.get(CONF_ID) == config_key), None)
|
||||
|
||||
def _write_value(self, data, config_key, new_value):
|
||||
"""Set value."""
|
||||
value = self._get_value(data, config_key)
|
||||
|
||||
if value is None:
|
||||
value = {CONF_ID: config_key}
|
||||
data.append(value)
|
||||
|
||||
value.update(new_value)
|
||||
|
||||
|
||||
def _read(path):
|
||||
"""Read YAML helper."""
|
||||
if not os.path.isfile(path):
|
||||
with open(path, 'w'):
|
||||
pass
|
||||
return {}
|
||||
return None
|
||||
|
||||
return load_yaml(path)
|
||||
|
||||
|
|
20
homeassistant/components/config/automation.py
Normal file
20
homeassistant/components/config/automation.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
"""Provide configuration end points for Z-Wave."""
|
||||
import asyncio
|
||||
|
||||
from homeassistant.components.config import EditIdBasedConfigView
|
||||
from homeassistant.components.automation import (
|
||||
PLATFORM_SCHEMA, DOMAIN, async_reload)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
|
||||
CONFIG_PATH = 'automations.yaml'
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass):
|
||||
"""Set up the Automation config API."""
|
||||
hass.http.register_view(EditIdBasedConfigView(
|
||||
DOMAIN, 'config', CONFIG_PATH, cv.string,
|
||||
PLATFORM_SCHEMA, post_write_hook=async_reload
|
||||
))
|
||||
return True
|
|
@ -9,9 +9,11 @@ the user has submitted configuration information.
|
|||
import asyncio
|
||||
import logging
|
||||
|
||||
from homeassistant.core import callback as async_callback
|
||||
from homeassistant.const import EVENT_TIME_CHANGED, ATTR_FRIENDLY_NAME, \
|
||||
ATTR_ENTITY_PICTURE
|
||||
from homeassistant.helpers.entity import generate_entity_id
|
||||
from homeassistant.util.async import run_callback_threadsafe
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_REQUESTS = {}
|
||||
|
@ -43,7 +45,9 @@ def request_config(
|
|||
|
||||
Will return an ID to be used for sequent calls.
|
||||
"""
|
||||
instance = _get_instance(hass)
|
||||
instance = run_callback_threadsafe(hass.loop,
|
||||
_async_get_instance,
|
||||
hass).result()
|
||||
|
||||
request_id = instance.request_config(
|
||||
name, callback,
|
||||
|
@ -79,7 +83,8 @@ def async_setup(hass, config):
|
|||
return True
|
||||
|
||||
|
||||
def _get_instance(hass):
|
||||
@async_callback
|
||||
def _async_get_instance(hass):
|
||||
"""Get an instance per hass object."""
|
||||
instance = hass.data.get(_KEY_INSTANCE)
|
||||
|
||||
|
@ -97,7 +102,7 @@ class Configurator(object):
|
|||
self.hass = hass
|
||||
self._cur_id = 0
|
||||
self._requests = {}
|
||||
hass.services.register(
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_CONFIGURE, self.handle_service_call)
|
||||
|
||||
def request_config(
|
||||
|
|
62
homeassistant/components/cover/lutron_caseta.py
Normal file
62
homeassistant/components/cover/lutron_caseta.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
"""
|
||||
Support for Lutron Caseta SerenaRollerShade.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/cover.lutron_caseta/
|
||||
"""
|
||||
import logging
|
||||
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
CoverDevice, SUPPORT_OPEN, SUPPORT_CLOSE)
|
||||
from homeassistant.components.lutron_caseta import (
|
||||
LUTRON_CASETA_SMARTBRIDGE, LutronCasetaDevice)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['lutron_caseta']
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Lutron Caseta Serena shades as a cover device."""
|
||||
devs = []
|
||||
bridge = hass.data[LUTRON_CASETA_SMARTBRIDGE]
|
||||
cover_devices = bridge.get_devices_by_types(["SerenaRollerShade"])
|
||||
for cover_device in cover_devices:
|
||||
dev = LutronCasetaCover(cover_device, bridge)
|
||||
devs.append(dev)
|
||||
|
||||
add_devices(devs, True)
|
||||
|
||||
|
||||
class LutronCasetaCover(LutronCasetaDevice, CoverDevice):
|
||||
"""Representation of a Lutron Serena shade."""
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
"""Flag supported features."""
|
||||
return SUPPORT_OPEN | SUPPORT_CLOSE
|
||||
|
||||
@property
|
||||
def is_closed(self):
|
||||
"""Return if the cover is closed."""
|
||||
return self._state["current_state"] < 1
|
||||
|
||||
def close_cover(self):
|
||||
"""Close the cover."""
|
||||
self._smartbridge.set_value(self._device_id, 0)
|
||||
|
||||
def open_cover(self):
|
||||
"""Open the cover."""
|
||||
self._smartbridge.set_value(self._device_id, 100)
|
||||
|
||||
def set_cover_position(self, position, **kwargs):
|
||||
"""Move the roller shutter to a specific position."""
|
||||
self._smartbridge.set_value(self._device_id, position)
|
||||
|
||||
def update(self):
|
||||
"""Call when forcing a refresh of the device."""
|
||||
self._state = self._smartbridge.get_device_by_id(self._device_id)
|
||||
_LOGGER.debug(self._state)
|
|
@ -4,6 +4,8 @@ Support for Wink Covers.
|
|||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/cover.wink/
|
||||
"""
|
||||
import asyncio
|
||||
|
||||
from homeassistant.components.cover import CoverDevice
|
||||
from homeassistant.components.wink import WinkDevice, DOMAIN
|
||||
|
||||
|
@ -31,6 +33,11 @@ class WinkCoverDevice(WinkDevice, CoverDevice):
|
|||
"""Initialize the cover."""
|
||||
super().__init__(wink, hass)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Callback when entity is added to hass."""
|
||||
self.hass.data[DOMAIN]['entities']['cover'].append(self)
|
||||
|
||||
def close_cover(self):
|
||||
"""Close the shade."""
|
||||
self.wink.set_state(0)
|
||||
|
|
120
homeassistant/components/datadog.py
Normal file
120
homeassistant/components/datadog.py
Normal file
|
@ -0,0 +1,120 @@
|
|||
"""
|
||||
A component which allows you to send data to Datadog.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/datadog/
|
||||
"""
|
||||
import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (CONF_HOST, CONF_PORT, CONF_PREFIX,
|
||||
EVENT_LOGBOOK_ENTRY, EVENT_STATE_CHANGED,
|
||||
STATE_UNKNOWN)
|
||||
from homeassistant.helpers import state as state_helper
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['datadog==0.15.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_RATE = 'rate'
|
||||
DEFAULT_HOST = 'localhost'
|
||||
DEFAULT_PORT = 8125
|
||||
DEFAULT_PREFIX = 'hass'
|
||||
DEFAULT_RATE = 1
|
||||
DOMAIN = 'datadog'
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PREFIX, default=DEFAULT_PREFIX): cv.string,
|
||||
vol.Optional(CONF_RATE, default=DEFAULT_RATE):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Setup the Datadog component."""
|
||||
from datadog import initialize, statsd
|
||||
|
||||
conf = config[DOMAIN]
|
||||
host = conf.get(CONF_HOST)
|
||||
port = conf.get(CONF_PORT)
|
||||
sample_rate = conf.get(CONF_RATE)
|
||||
prefix = conf.get(CONF_PREFIX)
|
||||
|
||||
initialize(statsd_host=host, statsd_port=port)
|
||||
|
||||
def logbook_entry_listener(event):
|
||||
"""Listen for logbook entries and send them as events."""
|
||||
name = event.data.get('name')
|
||||
message = event.data.get('message')
|
||||
|
||||
statsd.event(
|
||||
title="Home Assistant",
|
||||
text="%%% \n **{}** {} \n %%%".format(name, message),
|
||||
tags=[
|
||||
"entity:{}".format(event.data.get('entity_id')),
|
||||
"domain:{}".format(event.data.get('domain'))
|
||||
]
|
||||
)
|
||||
|
||||
_LOGGER.debug('Sent event %s', event.data.get('entity_id'))
|
||||
|
||||
def state_changed_listener(event):
|
||||
"""Listen for new messages on the bus and sends them to Datadog."""
|
||||
state = event.data.get('new_state')
|
||||
|
||||
if state is None or state.state == STATE_UNKNOWN:
|
||||
return
|
||||
|
||||
if state.attributes.get('hidden') is True:
|
||||
return
|
||||
|
||||
states = dict(state.attributes)
|
||||
metric = "{}.{}".format(prefix, state.domain)
|
||||
tags = ["entity:{}".format(state.entity_id)]
|
||||
|
||||
for key, value in states.items():
|
||||
if isinstance(value, (float, int)):
|
||||
attribute = "{}.{}".format(metric, key.replace(' ', '_'))
|
||||
statsd.gauge(
|
||||
attribute,
|
||||
value,
|
||||
sample_rate=sample_rate,
|
||||
tags=tags
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
'Sent metric %s: %s (tags: %s)',
|
||||
attribute,
|
||||
value,
|
||||
tags
|
||||
)
|
||||
|
||||
try:
|
||||
value = state_helper.state_as_number(state)
|
||||
except ValueError:
|
||||
_LOGGER.debug(
|
||||
'Error sending %s: %s (tags: %s)',
|
||||
metric,
|
||||
state.state,
|
||||
tags
|
||||
)
|
||||
return
|
||||
|
||||
statsd.gauge(
|
||||
metric,
|
||||
value,
|
||||
sample_rate=sample_rate,
|
||||
tags=tags
|
||||
)
|
||||
|
||||
_LOGGER.debug('Sent metric %s: %s (tags: %s)', metric, value, tags)
|
||||
|
||||
hass.bus.listen(EVENT_LOGBOOK_ENTRY, logbook_entry_listener)
|
||||
hass.bus.listen(EVENT_STATE_CHANGED, state_changed_listener)
|
||||
|
||||
return True
|
|
@ -157,28 +157,28 @@ def async_setup(hass, config):
|
|||
}},
|
||||
]}))
|
||||
|
||||
tasks2.append(group.Group.async_create_group(hass, 'living room', [
|
||||
tasks2.append(group.Group.async_create_group(hass, 'Living Room', [
|
||||
lights[1], switches[0], 'input_select.living_room_preset',
|
||||
'cover.living_room_window', media_players[1],
|
||||
'scene.romantic_lights']))
|
||||
tasks2.append(group.Group.async_create_group(hass, 'bedroom', [
|
||||
tasks2.append(group.Group.async_create_group(hass, 'Bedroom', [
|
||||
lights[0], switches[1], media_players[0],
|
||||
'input_slider.noise_allowance']))
|
||||
tasks2.append(group.Group.async_create_group(hass, 'kitchen', [
|
||||
tasks2.append(group.Group.async_create_group(hass, 'Kitchen', [
|
||||
lights[2], 'cover.kitchen_window', 'lock.kitchen_door']))
|
||||
tasks2.append(group.Group.async_create_group(hass, 'doors', [
|
||||
tasks2.append(group.Group.async_create_group(hass, 'Doors', [
|
||||
'lock.front_door', 'lock.kitchen_door',
|
||||
'garage_door.right_garage_door', 'garage_door.left_garage_door']))
|
||||
tasks2.append(group.Group.async_create_group(hass, 'automations', [
|
||||
tasks2.append(group.Group.async_create_group(hass, 'Automations', [
|
||||
'input_select.who_cooks', 'input_boolean.notify', ]))
|
||||
tasks2.append(group.Group.async_create_group(hass, 'people', [
|
||||
tasks2.append(group.Group.async_create_group(hass, 'People', [
|
||||
'device_tracker.demo_anne_therese', 'device_tracker.demo_home_boy',
|
||||
'device_tracker.demo_paulus']))
|
||||
tasks2.append(group.Group.async_create_group(hass, 'downstairs', [
|
||||
tasks2.append(group.Group.async_create_group(hass, 'Downstairs', [
|
||||
'group.living_room', 'group.kitchen',
|
||||
'scene.romantic_lights', 'cover.kitchen_window',
|
||||
'cover.living_room_window', 'group.doors',
|
||||
'thermostat.ecobee',
|
||||
'climate.ecobee',
|
||||
], view=True))
|
||||
|
||||
results = yield from asyncio.gather(*tasks2, loop=hass.loop)
|
||||
|
|
|
@ -14,12 +14,13 @@ from homeassistant.core import callback
|
|||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.const import STATE_HOME, STATE_NOT_HOME
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time, async_track_state_change)
|
||||
async_track_point_in_utc_time, async_track_state_change)
|
||||
from homeassistant.helpers.sun import is_up, get_astral_event_next
|
||||
from homeassistant.loader import get_component
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
DOMAIN = 'device_sun_light_trigger'
|
||||
DEPENDENCIES = ['light', 'device_tracker', 'group', 'sun']
|
||||
DEPENDENCIES = ['light', 'device_tracker', 'group']
|
||||
|
||||
CONF_DEVICE_GROUP = 'device_group'
|
||||
CONF_DISABLE_TURN_OFF = 'disable_turn_off'
|
||||
|
@ -50,7 +51,6 @@ def async_setup(hass, config):
|
|||
device_tracker = get_component('device_tracker')
|
||||
group = get_component('group')
|
||||
light = get_component('light')
|
||||
sun = get_component('sun')
|
||||
conf = config[DOMAIN]
|
||||
disable_turn_off = conf.get(CONF_DISABLE_TURN_OFF)
|
||||
light_group = conf.get(CONF_LIGHT_GROUP, light.ENTITY_ID_ALL_LIGHTS)
|
||||
|
@ -78,7 +78,7 @@ def async_setup(hass, config):
|
|||
|
||||
Async friendly.
|
||||
"""
|
||||
next_setting = sun.next_setting(hass)
|
||||
next_setting = get_astral_event_next(hass, 'sunset')
|
||||
if not next_setting:
|
||||
return None
|
||||
return next_setting - LIGHT_TRANSITION_TIME * len(light_ids)
|
||||
|
@ -103,7 +103,7 @@ def async_setup(hass, config):
|
|||
# Track every time sun rises so we can schedule a time-based
|
||||
# pre-sun set event
|
||||
@callback
|
||||
def schedule_light_turn_on(entity, old_state, new_state):
|
||||
def schedule_light_turn_on(now):
|
||||
"""Turn on all the lights at the moment sun sets.
|
||||
|
||||
We will schedule to have each light start after one another
|
||||
|
@ -114,26 +114,26 @@ def async_setup(hass, config):
|
|||
return
|
||||
|
||||
for index, light_id in enumerate(light_ids):
|
||||
async_track_point_in_time(
|
||||
async_track_point_in_utc_time(
|
||||
hass, async_turn_on_factory(light_id),
|
||||
start_point + index * LIGHT_TRANSITION_TIME)
|
||||
|
||||
async_track_state_change(hass, sun.ENTITY_ID, schedule_light_turn_on,
|
||||
sun.STATE_BELOW_HORIZON, sun.STATE_ABOVE_HORIZON)
|
||||
async_track_point_in_utc_time(hass, schedule_light_turn_on,
|
||||
get_astral_event_next(hass, 'sunrise'))
|
||||
|
||||
# If the sun is already above horizon schedule the time-based pre-sun set
|
||||
# event.
|
||||
if sun.is_on(hass):
|
||||
schedule_light_turn_on(None, None, None)
|
||||
if is_up(hass):
|
||||
schedule_light_turn_on(None)
|
||||
|
||||
@callback
|
||||
def check_light_on_dev_state_change(entity, old_state, new_state):
|
||||
"""Handle tracked device state changes."""
|
||||
lights_are_on = group.is_on(hass, light_group)
|
||||
light_needed = not (lights_are_on or sun.is_on(hass))
|
||||
light_needed = not (lights_are_on or is_up(hass))
|
||||
|
||||
# These variables are needed for the elif check
|
||||
now = dt_util.now()
|
||||
now = dt_util.utcnow()
|
||||
start_point = calc_time_for_light_when_sunset()
|
||||
|
||||
# Do we need lights?
|
||||
|
@ -146,7 +146,7 @@ def async_setup(hass, config):
|
|||
# Check this by seeing if current time is later then the point
|
||||
# in time when we would start putting the lights on.
|
||||
elif (start_point and
|
||||
start_point < now < sun.next_setting(hass)):
|
||||
start_point < now < get_astral_event_next(hass, 'sunset')):
|
||||
|
||||
# Check for every light if it would be on if someone was home
|
||||
# when the fading in started and turn it on if so
|
||||
|
|
|
@ -21,7 +21,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
REQUIREMENTS = ['aioautomatic==0.3.1']
|
||||
REQUIREMENTS = ['aioautomatic==0.4.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -31,7 +31,8 @@ CONF_DEVICES = 'devices'
|
|||
|
||||
DEFAULT_TIMEOUT = 5
|
||||
|
||||
SCOPE = ['location', 'vehicle:profile', 'trip']
|
||||
DEFAULT_SCOPE = ['location', 'vehicle:profile', 'trip']
|
||||
FULL_SCOPE = DEFAULT_SCOPE + ['current_location']
|
||||
|
||||
ATTR_FUEL_LEVEL = 'fuel_level'
|
||||
|
||||
|
@ -58,8 +59,17 @@ def async_setup_scanner(hass, config, async_see, discovery_info=None):
|
|||
client_session=async_get_clientsession(hass),
|
||||
request_kwargs={'timeout': DEFAULT_TIMEOUT})
|
||||
try:
|
||||
session = yield from client.create_session_from_password(
|
||||
SCOPE, config[CONF_USERNAME], config[CONF_PASSWORD])
|
||||
try:
|
||||
session = yield from client.create_session_from_password(
|
||||
FULL_SCOPE, config[CONF_USERNAME], config[CONF_PASSWORD])
|
||||
except aioautomatic.exceptions.ForbiddenError as exc:
|
||||
if not str(exc).startswith("invalid_scope"):
|
||||
raise exc
|
||||
_LOGGER.info("Client not authorized for current_location scope. "
|
||||
"location:updated events will not be received.")
|
||||
session = yield from client.create_session_from_password(
|
||||
DEFAULT_SCOPE, config[CONF_USERNAME], config[CONF_PASSWORD])
|
||||
|
||||
data = AutomaticData(
|
||||
hass, client, session, config[CONF_DEVICES], async_see)
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ MIN_TIME_BETWEEN_SCANS = timedelta(seconds=5)
|
|||
INTERFACES = 2
|
||||
DEFAULT_TIMEOUT = 10
|
||||
|
||||
REQUIREMENTS = ['beautifulsoup4==4.5.3']
|
||||
REQUIREMENTS = ['beautifulsoup4==4.6.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -163,6 +163,7 @@ class Tplink3DeviceScanner(TplinkDeviceScanner):
|
|||
def scan_devices(self):
|
||||
"""Scan for new devices and return a list with found device IDs."""
|
||||
self._update_info()
|
||||
self._log_out()
|
||||
return self.last_results.keys()
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
|
@ -195,8 +196,9 @@ class Tplink3DeviceScanner(TplinkDeviceScanner):
|
|||
self.sysauth = regex_result.group(1)
|
||||
_LOGGER.info(self.sysauth)
|
||||
return True
|
||||
except ValueError:
|
||||
_LOGGER.error("Couldn't fetch auth tokens!")
|
||||
except (ValueError, KeyError) as _:
|
||||
_LOGGER.error("Couldn't fetch auth tokens! Response was: %s",
|
||||
response.text)
|
||||
return False
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_SCANS)
|
||||
|
@ -250,6 +252,21 @@ class Tplink3DeviceScanner(TplinkDeviceScanner):
|
|||
|
||||
return False
|
||||
|
||||
def _log_out(self):
|
||||
with self.lock:
|
||||
_LOGGER.info("Logging out of router admin interface...")
|
||||
|
||||
url = ('http://{}/cgi-bin/luci/;stok={}/admin/system?'
|
||||
'form=logout').format(self.host, self.stok)
|
||||
referer = 'http://{}/webpages/index.html'.format(self.host)
|
||||
|
||||
requests.post(url,
|
||||
params={'operation': 'write'},
|
||||
headers={'referer': referer},
|
||||
cookies={'sysauth': self.sysauth})
|
||||
self.stok = ''
|
||||
self.sysauth = ''
|
||||
|
||||
|
||||
class Tplink4DeviceScanner(TplinkDeviceScanner):
|
||||
"""This class queries an Archer C7 router with TP-Link firmware 150427."""
|
||||
|
|
|
@ -21,7 +21,7 @@ from homeassistant.helpers.event import async_track_point_in_utc_time
|
|||
from homeassistant.helpers.discovery import async_load_platform, async_discover
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
REQUIREMENTS = ['netdisco==1.0.0rc3']
|
||||
REQUIREMENTS = ['netdisco==1.0.0']
|
||||
|
||||
DOMAIN = 'discovery'
|
||||
|
||||
|
@ -31,6 +31,7 @@ SERVICE_WEMO = 'belkin_wemo'
|
|||
SERVICE_HASS_IOS_APP = 'hass_ios'
|
||||
SERVICE_IKEA_TRADFRI = 'ikea_tradfri'
|
||||
SERVICE_HASSIO = 'hassio'
|
||||
SERVICE_AXIS = 'axis'
|
||||
|
||||
SERVICE_HANDLERS = {
|
||||
SERVICE_HASS_IOS_APP: ('ios', None),
|
||||
|
@ -38,6 +39,7 @@ SERVICE_HANDLERS = {
|
|||
SERVICE_WEMO: ('wemo', None),
|
||||
SERVICE_IKEA_TRADFRI: ('tradfri', None),
|
||||
SERVICE_HASSIO: ('hassio', None),
|
||||
SERVICE_AXIS: ('axis', None),
|
||||
'philips_hue': ('light', 'hue'),
|
||||
'google_cast': ('media_player', 'cast'),
|
||||
'panasonic_viera': ('media_player', 'panasonic_viera'),
|
||||
|
|
|
@ -15,7 +15,7 @@ import homeassistant.helpers.config_validation as cv
|
|||
from homeassistant.helpers import state as state_helper
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
REQUIREMENTS = ['dweepy==0.2.0']
|
||||
REQUIREMENTS = ['dweepy==0.3.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -67,4 +67,4 @@ def send_data(name, msg):
|
|||
try:
|
||||
dweepy.dweet_for(name, msg)
|
||||
except dweepy.DweepyError:
|
||||
_LOGGER.error("Error saving data '%s' to Dweet.io", msg)
|
||||
_LOGGER.error("Error saving data to Dweet.io: %s", msg)
|
||||
|
|
|
@ -24,7 +24,7 @@ from homeassistant.helpers.entity import Entity
|
|||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
REQUIREMENTS = ['pyeight==0.0.4']
|
||||
REQUIREMENTS = ['pyeight==0.0.5']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -145,6 +145,9 @@ def async_setup(hass, config):
|
|||
sensors.append('{}_{}'.format(obj.side, sensor))
|
||||
binary_sensors.append('{}_presence'.format(obj.side))
|
||||
sensors.append('room_temp')
|
||||
else:
|
||||
# No users, cannot continue
|
||||
return False
|
||||
|
||||
hass.async_add_job(discovery.async_load_platform(
|
||||
hass, 'sensor', DOMAIN, {
|
||||
|
|
|
@ -4,6 +4,7 @@ Support for Wink fans.
|
|||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/fan.wink/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from homeassistant.components.fan import (FanEntity, SPEED_HIGH,
|
||||
|
@ -12,6 +13,8 @@ from homeassistant.components.fan import (FanEntity, SPEED_HIGH,
|
|||
from homeassistant.helpers.entity import ToggleEntity
|
||||
from homeassistant.components.wink import WinkDevice, DOMAIN
|
||||
|
||||
DEPENDENCIES = ['wink']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SPEED_LOWEST = 'lowest'
|
||||
|
@ -34,6 +37,11 @@ class WinkFanDevice(WinkDevice, FanEntity):
|
|||
"""Initialize the fan."""
|
||||
super().__init__(wink, hass)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Callback when entity is added to hass."""
|
||||
self.hass.data[DOMAIN]['entities']['fan'].append(self)
|
||||
|
||||
def set_direction(self: ToggleEntity, direction: str) -> None:
|
||||
"""Set the direction of the fan."""
|
||||
self.wink.set_fan_direction(direction)
|
||||
|
|
|
@ -1,22 +1,23 @@
|
|||
"""DO NOT MODIFY. Auto-generated by script/fingerprint_frontend."""
|
||||
|
||||
FINGERPRINTS = {
|
||||
"compatibility.js": "83d9c77748dafa9db49ae77d7f3d8fb0",
|
||||
"core.js": "5d08475f03adb5969bd31855d5ca0cfd",
|
||||
"frontend.html": "5999c8fac69c503b846672cae75a12b0",
|
||||
"compatibility.js": "8e4c44b5f4288cc48ec1ba94a9bec812",
|
||||
"core.js": "d4a7cb8c80c62b536764e0e81385f6aa",
|
||||
"frontend.html": "fbb9d6bdd3d661db26cad9475a5e22f1",
|
||||
"mdi.html": "f407a5a57addbe93817ee1b244d33fbe",
|
||||
"micromarkdown-js.html": "93b5ec4016f0bba585521cf4d18dec1a",
|
||||
"panels/ha-panel-automation.html": "21cba0a4fee9d2b45dda47f7a1dd82d8",
|
||||
"panels/ha-panel-config.html": "59d9eb28758b497a4d9b2428f978b9b1",
|
||||
"panels/ha-panel-dev-event.html": "2db9c218065ef0f61d8d08db8093cad2",
|
||||
"panels/ha-panel-dev-info.html": "61610e015a411cfc84edd2c4d489e71d",
|
||||
"panels/ha-panel-dev-service.html": "415552027cb083badeff5f16080410ed",
|
||||
"panels/ha-panel-dev-state.html": "d70314913b8923d750932367b1099750",
|
||||
"panels/ha-panel-dev-template.html": "567fbf86735e1b891e40c2f4060fec9b",
|
||||
"panels/ha-panel-hassio.html": "23d175b6744c20e2fdf475b6efdaa1d3",
|
||||
"panels/ha-panel-hassio.html": "9474ba65077371622f21ed9a30cf5229",
|
||||
"panels/ha-panel-history.html": "89062c48c76206cad1cec14ddbb1cbb1",
|
||||
"panels/ha-panel-iframe.html": "d920f0aa3c903680f2f8795e2255daab",
|
||||
"panels/ha-panel-logbook.html": "6dd6a16f52117318b202e60f98400163",
|
||||
"panels/ha-panel-map.html": "31c592c239636f91e07c7ac232a5ebc4",
|
||||
"panels/ha-panel-zwave.html": "84fb45638d2a69bac343246a687f647c",
|
||||
"panels/ha-panel-zwave.html": "19336d2c50c91dd6a122acc0606ff10d",
|
||||
"websocket_test.html": "575de64b431fe11c3785bf96d7813450"
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
!(function(){"use strict";function e(e,r){var t=arguments;if(void 0===e||null===e)throw new TypeError("Cannot convert first argument to object");for(var n=Object(e),o=1;o<arguments.length;o++){var i=t[o];if(void 0!==i&&null!==i)for(var l=Object.keys(Object(i)),a=0,c=l.length;a<c;a++){var b=l[a],f=Object.getOwnPropertyDescriptor(i,b);void 0!==f&&f.enumerable&&(n[b]=i[b])}}return n}function r(){Object.assign||Object.defineProperty(Object,"assign",{enumerable:!1,configurable:!0,writable:!0,value:e})}var t={assign:e,polyfill:r};t.polyfill()})();
|
||||
!function(){"use strict";function e(e,t){if(void 0===e||null===e)throw new TypeError("Cannot convert first argument to object");for(var r=Object(e),n=1;n<arguments.length;n++){var o=arguments[n];if(void 0!==o&&null!==o)for(var i=Object.keys(Object(o)),l=0,c=i.length;l<c;l++){var a=i[l],b=Object.getOwnPropertyDescriptor(o,a);void 0!==b&&b.enumerable&&(r[a]=o[a])}}return r}function t(){Object.assign||Object.defineProperty(Object,"assign",{enumerable:!1,configurable:!0,writable:!0,value:e})}({assign:e,polyfill:t}).polyfill()}();
|
||||
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
|
@ -1 +1 @@
|
|||
Subproject commit 9e7dc4a921f86e60cc1f14afe254e5310b63e854
|
||||
Subproject commit 6858555c86f18eb0ab176008e9aa2c3842fec7ce
|
Binary file not shown.
After Width: | Height: | Size: 2.8 KiB |
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
|
@ -21,7 +21,7 @@ from homeassistant.helpers.entity import Entity
|
|||
from homeassistant.helpers.event import track_time_interval
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
|
||||
REQUIREMENTS = ['pyhomematic==0.1.25']
|
||||
REQUIREMENTS = ['pyhomematic==0.1.26']
|
||||
|
||||
DOMAIN = 'homematic'
|
||||
|
||||
|
|
|
@ -11,25 +11,26 @@ import os
|
|||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID, CONF_NAME, CONF_ENTITY_ID)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.loader import get_component
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = 'image_processing'
|
||||
DEPENDENCIES = ['camera']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
DEVICE_CLASSES = [
|
||||
'alpr', # automatic license plate recognition
|
||||
'face', # face
|
||||
'alpr', # Automatic license plate recognition
|
||||
'face', # Face
|
||||
'ocr', # OCR
|
||||
]
|
||||
|
||||
SERVICE_SCAN = 'scan'
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Support for the demo image processing.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/demo/
|
||||
"""
|
||||
from homeassistant.components.image_processing import ATTR_CONFIDENCE
|
||||
|
@ -12,7 +12,7 @@ from homeassistant.components.image_processing.microsoft_face_identify import (
|
|||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the demo image_processing platform."""
|
||||
"""Set up the demo image processing platform."""
|
||||
add_devices([
|
||||
DemoImageProcessingAlpr('camera.demo_camera', "Demo Alpr"),
|
||||
DemoImageProcessingFace(
|
||||
|
@ -21,10 +21,10 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||
|
||||
|
||||
class DemoImageProcessingAlpr(ImageProcessingAlprEntity):
|
||||
"""Demo alpr image processing entity."""
|
||||
"""Demo ALPR image processing entity."""
|
||||
|
||||
def __init__(self, camera_entity, name):
|
||||
"""Initialize demo alpr."""
|
||||
"""Initialize demo ALPR image processing entity."""
|
||||
super().__init__()
|
||||
|
||||
self._name = name
|
||||
|
@ -61,7 +61,7 @@ class DemoImageProcessingFace(ImageProcessingFaceEntity):
|
|||
"""Demo face identify image processing entity."""
|
||||
|
||||
def __init__(self, camera_entity, name):
|
||||
"""Initialize demo alpr."""
|
||||
"""Initialize demo face image processing entity."""
|
||||
super().__init__()
|
||||
|
||||
self._name = name
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Component that will help set the dlib face detect processing.
|
||||
Component that will help set the Dlib face detect processing.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/image_processing.dlib_face_detect/
|
||||
"""
|
||||
import logging
|
||||
|
@ -21,7 +21,7 @@ _LOGGER = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Microsoft Face detection platform."""
|
||||
"""Set up the Dlib Face detection platform."""
|
||||
entities = []
|
||||
for camera in config[CONF_SOURCE]:
|
||||
entities.append(DlibFaceDetectEntity(
|
||||
|
@ -35,7 +35,7 @@ class DlibFaceDetectEntity(ImageProcessingFaceEntity):
|
|||
"""Dlib Face API entity for identify."""
|
||||
|
||||
def __init__(self, camera_entity, name=None):
|
||||
"""Initialize Dlib."""
|
||||
"""Initialize Dlib face entity."""
|
||||
super().__init__()
|
||||
|
||||
self._camera = camera_entity
|
||||
|
@ -62,7 +62,7 @@ class DlibFaceDetectEntity(ImageProcessingFaceEntity):
|
|||
import face_recognition
|
||||
|
||||
fak_file = io.BytesIO(image)
|
||||
fak_file.name = "snapshot.jpg"
|
||||
fak_file.name = 'snapshot.jpg'
|
||||
fak_file.seek(0)
|
||||
|
||||
image = face_recognition.load_image_file(fak_file)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Component that will help set the dlib face detect processing.
|
||||
Component that will help set the Dlib face detect processing.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/image_processing.dlib_face_identify/
|
||||
"""
|
||||
import logging
|
||||
|
@ -29,7 +29,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Microsoft Face detection platform."""
|
||||
"""Set up the Dlib Face detection platform."""
|
||||
entities = []
|
||||
for camera in config[CONF_SOURCE]:
|
||||
entities.append(DlibFaceIdentifyEntity(
|
||||
|
@ -43,7 +43,7 @@ class DlibFaceIdentifyEntity(ImageProcessingFaceEntity):
|
|||
"""Dlib Face API entity for identify."""
|
||||
|
||||
def __init__(self, camera_entity, faces, name=None):
|
||||
"""Initialize Dlib."""
|
||||
"""Initialize Dlib face identify entry."""
|
||||
# pylint: disable=import-error
|
||||
import face_recognition
|
||||
super().__init__()
|
||||
|
@ -77,7 +77,7 @@ class DlibFaceIdentifyEntity(ImageProcessingFaceEntity):
|
|||
import face_recognition
|
||||
|
||||
fak_file = io.BytesIO(image)
|
||||
fak_file.name = "snapshot.jpg"
|
||||
fak_file.name = 'snapshot.jpg'
|
||||
fak_file.seek(0)
|
||||
|
||||
image = face_recognition.load_image_file(fak_file)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Component that will help set the microsoft face detect processing.
|
||||
Component that will help set the Microsoft face detect processing.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/image_processing.microsoft_face_detect/
|
||||
"""
|
||||
import asyncio
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
Component that will help set the microsoft face for verify processing.
|
||||
Component that will help set the Microsoft face for verify processing.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/image_processing.microsoft_face_identify/
|
||||
|
@ -62,8 +62,8 @@ class ImageProcessingFaceEntity(ImageProcessingEntity):
|
|||
|
||||
def __init__(self):
|
||||
"""Initialize base face identify/verify entity."""
|
||||
self.faces = [] # last scan data
|
||||
self.total_faces = 0 # face count
|
||||
self.faces = []
|
||||
self.total_faces = 0
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
|
@ -71,11 +71,11 @@ class ImageProcessingFaceEntity(ImageProcessingEntity):
|
|||
confidence = 0
|
||||
state = STATE_UNKNOWN
|
||||
|
||||
# no confidence support
|
||||
# No confidence support
|
||||
if not self.confidence:
|
||||
return self.total_faces
|
||||
|
||||
# search high confidence
|
||||
# Search high confidence
|
||||
for face in self.faces:
|
||||
if ATTR_CONFIDENCE not in face:
|
||||
continue
|
||||
|
@ -128,7 +128,7 @@ class ImageProcessingFaceEntity(ImageProcessingEntity):
|
|||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
# send events
|
||||
# Send events
|
||||
for face in faces:
|
||||
if ATTR_CONFIDENCE in face and self.confidence:
|
||||
if face[ATTR_CONFIDENCE] < self.confidence:
|
||||
|
@ -139,7 +139,7 @@ class ImageProcessingFaceEntity(ImageProcessingEntity):
|
|||
self.hass.bus.async_fire, EVENT_DETECT_FACE, face
|
||||
)
|
||||
|
||||
# update entity store
|
||||
# Update entity store
|
||||
self.faces = faces
|
||||
self.total_faces = total
|
||||
|
||||
|
@ -200,7 +200,7 @@ class MicrosoftFaceIdentifyEntity(ImageProcessingFaceEntity):
|
|||
_LOGGER.error("Can't process image on Microsoft face: %s", err)
|
||||
return
|
||||
|
||||
# parse data
|
||||
# Parse data
|
||||
knwon_faces = []
|
||||
total = 0
|
||||
for face in detect:
|
||||
|
@ -220,5 +220,4 @@ class MicrosoftFaceIdentifyEntity(ImageProcessingFaceEntity):
|
|||
ATTR_CONFIDENCE: data['confidence'] * 100,
|
||||
})
|
||||
|
||||
# process data
|
||||
self.async_process_faces(knwon_faces, total)
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
"""
|
||||
Component that will help set the OpenALPR cloud for ALPR processing.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/image_processing.openalpr_cloud/
|
||||
"""
|
||||
import asyncio
|
||||
from base64 import b64encode
|
||||
import logging
|
||||
from base64 import b64encode
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.core import split_entity_id
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.components.image_processing import (
|
||||
|
@ -19,7 +20,6 @@ from homeassistant.components.image_processing import (
|
|||
from homeassistant.components.image_processing.openalpr_local import (
|
||||
ImageProcessingAlprEntity)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -44,8 +44,7 @@ CONF_REGION = 'region'
|
|||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
vol.Required(CONF_REGION):
|
||||
vol.All(vol.Lower, vol.In(OPENALPR_REGIONS)),
|
||||
vol.Required(CONF_REGION): vol.All(vol.Lower, vol.In(OPENALPR_REGIONS)),
|
||||
})
|
||||
|
||||
|
||||
|
@ -70,7 +69,7 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
|||
|
||||
|
||||
class OpenAlprCloudEntity(ImageProcessingAlprEntity):
|
||||
"""OpenALPR cloud entity."""
|
||||
"""Representation of an OpenALPR cloud entity."""
|
||||
|
||||
def __init__(self, camera_entity, params, confidence, name=None):
|
||||
"""Initialize OpenALPR cloud API."""
|
||||
|
@ -129,7 +128,7 @@ class OpenAlprCloudEntity(ImageProcessingAlprEntity):
|
|||
_LOGGER.error("Timeout for OpenALPR API")
|
||||
return
|
||||
|
||||
# processing api data
|
||||
# Processing API data
|
||||
vehicles = 0
|
||||
result = {}
|
||||
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
"""
|
||||
Component that will help set the OpenALPR local for ALPR processing.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/image_processing.openalpr_local/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
import io
|
||||
import logging
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.core import split_entity_id, callback
|
||||
from homeassistant.const import STATE_UNKNOWN
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.image_processing import (
|
||||
PLATFORM_SCHEMA, ImageProcessingEntity, CONF_CONFIDENCE, CONF_SOURCE,
|
||||
CONF_ENTITY_ID, CONF_NAME, ATTR_ENTITY_ID, ATTR_CONFIDENCE)
|
||||
|
@ -45,15 +45,13 @@ OPENALPR_REGIONS = [
|
|||
'vn2'
|
||||
]
|
||||
|
||||
|
||||
CONF_REGION = 'region'
|
||||
CONF_ALPR_BIN = 'alp_bin'
|
||||
CONF_REGION = 'region'
|
||||
|
||||
DEFAULT_BINARY = 'alpr'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_REGION):
|
||||
vol.All(vol.Lower, vol.In(OPENALPR_REGIONS)),
|
||||
vol.Required(CONF_REGION): vol.All(vol.Lower, vol.In(OPENALPR_REGIONS)),
|
||||
vol.Optional(CONF_ALPR_BIN, default=DEFAULT_BINARY): cv.string,
|
||||
})
|
||||
|
||||
|
@ -77,9 +75,9 @@ class ImageProcessingAlprEntity(ImageProcessingEntity):
|
|||
"""Base entity class for ALPR image processing."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize base alpr entity."""
|
||||
self.plates = {} # last scan data
|
||||
self.vehicles = 0 # vehicles count
|
||||
"""Initialize base ALPR entity."""
|
||||
self.plates = {}
|
||||
self.vehicles = 0
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
|
@ -128,7 +126,7 @@ class ImageProcessingAlprEntity(ImageProcessingEntity):
|
|||
if confidence >= self.confidence}
|
||||
new_plates = set(plates) - set(self.plates)
|
||||
|
||||
# send events
|
||||
# Send events
|
||||
for i_plate in new_plates:
|
||||
self.hass.async_add_job(
|
||||
self.hass.bus.async_fire, EVENT_FOUND_PLATE, {
|
||||
|
@ -138,7 +136,7 @@ class ImageProcessingAlprEntity(ImageProcessingEntity):
|
|||
}
|
||||
)
|
||||
|
||||
# update entity store
|
||||
# Update entity store
|
||||
self.plates = plates
|
||||
self.vehicles = vehicles
|
||||
|
||||
|
@ -192,7 +190,7 @@ class OpenAlprLocalEntity(ImageProcessingAlprEntity):
|
|||
stderr=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
# send image
|
||||
# Send image
|
||||
stdout, _ = yield from alpr.communicate(input=image)
|
||||
stdout = io.StringIO(str(stdout, 'utf-8'))
|
||||
|
||||
|
@ -204,12 +202,12 @@ class OpenAlprLocalEntity(ImageProcessingAlprEntity):
|
|||
new_plates = RE_ALPR_PLATE.search(line)
|
||||
new_result = RE_ALPR_RESULT.search(line)
|
||||
|
||||
# found new vehicle
|
||||
# Found new vehicle
|
||||
if new_plates:
|
||||
vehicles += 1
|
||||
continue
|
||||
|
||||
# found plate result
|
||||
# Found plate result
|
||||
if new_result:
|
||||
try:
|
||||
result.update(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Component that performs OpenCV classification on images.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/image_processing.opencv/
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
@ -9,22 +9,15 @@ import logging
|
|||
|
||||
from homeassistant.core import split_entity_id
|
||||
from homeassistant.components.image_processing import (
|
||||
ImageProcessingEntity,
|
||||
PLATFORM_SCHEMA,
|
||||
)
|
||||
ImageProcessingEntity, PLATFORM_SCHEMA)
|
||||
from homeassistant.components.opencv import (
|
||||
ATTR_MATCHES,
|
||||
CLASSIFIER_GROUP_CONFIG,
|
||||
CONF_CLASSIFIER,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_NAME,
|
||||
process_image,
|
||||
)
|
||||
|
||||
DEPENDENCIES = ['opencv']
|
||||
ATTR_MATCHES, CLASSIFIER_GROUP_CONFIG, CONF_CLASSIFIER, CONF_ENTITY_ID,
|
||||
CONF_NAME, process_image)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['opencv']
|
||||
|
||||
DEFAULT_TIMEOUT = 10
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=2)
|
||||
|
@ -33,18 +26,13 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(CLASSIFIER_GROUP_CONFIG)
|
|||
|
||||
|
||||
def _create_processor_from_config(hass, camera_entity, config):
|
||||
"""Create an OpenCV processor from configurtaion."""
|
||||
"""Create an OpenCV processor from configuration."""
|
||||
classifier_config = config[CONF_CLASSIFIER]
|
||||
name = '{} {}'.format(
|
||||
config[CONF_NAME],
|
||||
split_entity_id(camera_entity)[1].replace('_', ' '))
|
||||
config[CONF_NAME], split_entity_id(camera_entity)[1].replace('_', ' '))
|
||||
|
||||
processor = OpenCVImageProcessor(
|
||||
hass,
|
||||
camera_entity,
|
||||
name,
|
||||
classifier_config,
|
||||
)
|
||||
hass, camera_entity, name, classifier_config)
|
||||
|
||||
return processor
|
||||
|
||||
|
@ -57,10 +45,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||
devices = []
|
||||
for camera_entity in discovery_info[CONF_ENTITY_ID]:
|
||||
devices.append(
|
||||
_create_processor_from_config(
|
||||
hass,
|
||||
camera_entity,
|
||||
discovery_info))
|
||||
_create_processor_from_config(hass, camera_entity, discovery_info))
|
||||
|
||||
add_devices(devices)
|
||||
|
||||
|
@ -115,6 +100,5 @@ class OpenCVImageProcessor(ImageProcessingEntity):
|
|||
def process_image(self, image):
|
||||
"""Process the image."""
|
||||
self._last_image = image
|
||||
self._matches = process_image(image,
|
||||
self._classifier_configs,
|
||||
False)
|
||||
self._matches = process_image(
|
||||
image, self._classifier_configs, False)
|
||||
|
|
114
homeassistant/components/image_processing/seven_segments.py
Normal file
114
homeassistant/components/image_processing/seven_segments.py
Normal file
|
@ -0,0 +1,114 @@
|
|||
"""
|
||||
Local optical character recognition processing of seven segements displays.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/image_processing.seven_segments/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
import io
|
||||
import os
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.core import split_entity_id
|
||||
from homeassistant.components.image_processing import (
|
||||
PLATFORM_SCHEMA, ImageProcessingEntity, CONF_SOURCE, CONF_ENTITY_ID,
|
||||
CONF_NAME)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_DIGITS = 'digits'
|
||||
CONF_HEIGHT = 'height'
|
||||
CONF_SSOCR_BIN = 'ssocr_bin'
|
||||
CONF_THRESHOLD = 'threshold'
|
||||
CONF_WIDTH = 'width'
|
||||
CONF_X_POS = 'x_position'
|
||||
CONF_Y_POS = 'y_position'
|
||||
|
||||
DEFAULT_BINARY = 'ssocr'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_DIGITS, default=-1): cv.positive_int,
|
||||
vol.Optional(CONF_HEIGHT, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_SSOCR_BIN, default=DEFAULT_BINARY): cv.string,
|
||||
vol.Optional(CONF_THRESHOLD, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_WIDTH, default=0): cv.positive_int,
|
||||
vol.Optional(CONF_X_POS, default=0): cv.string,
|
||||
vol.Optional(CONF_Y_POS, default=0): cv.positive_int,
|
||||
})
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up the Seven segments OCR platform."""
|
||||
entities = []
|
||||
for camera in config[CONF_SOURCE]:
|
||||
entities.append(ImageProcessingSsocr(
|
||||
hass, camera[CONF_ENTITY_ID], config, camera.get(CONF_NAME)
|
||||
))
|
||||
|
||||
async_add_devices(entities)
|
||||
|
||||
|
||||
class ImageProcessingSsocr(ImageProcessingEntity):
|
||||
"""Representation of the seven segments OCR image processing entity."""
|
||||
|
||||
def __init__(self, hass, camera_entity, config, name):
|
||||
"""Initialize seven segments processing."""
|
||||
self.hass = hass
|
||||
self._camera_entity = camera_entity
|
||||
if name:
|
||||
self._name = name
|
||||
else:
|
||||
self._name = "SevenSegement OCR {0}".format(
|
||||
split_entity_id(camera_entity)[1])
|
||||
self._state = None
|
||||
self.filepath = os.path.join(self.hass.config.config_dir, 'ocr.png')
|
||||
self._command = [
|
||||
config[CONF_SSOCR_BIN], 'erosion', 'make_mono', 'crop',
|
||||
str(config[CONF_X_POS]), str(config[CONF_Y_POS]),
|
||||
str(config[CONF_WIDTH]), str(config[CONF_HEIGHT]), '-t',
|
||||
str(config[CONF_THRESHOLD]), '-d', str(config[CONF_DIGITS]),
|
||||
self.filepath
|
||||
]
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of this device, from component DEVICE_CLASSES."""
|
||||
return 'ocr'
|
||||
|
||||
@property
|
||||
def camera_entity(self):
|
||||
"""Return camera entity id from process pictures."""
|
||||
return self._camera_entity
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the image processor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the entity."""
|
||||
return self._state
|
||||
|
||||
def process_image(self, image):
|
||||
"""Process the image."""
|
||||
from PIL import Image
|
||||
import subprocess
|
||||
|
||||
stream = io.BytesIO(image)
|
||||
img = Image.open(stream)
|
||||
img.save(self.filepath, 'png')
|
||||
|
||||
ocr = subprocess.Popen(
|
||||
self._command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out = ocr.communicate()
|
||||
if out[0] != b'':
|
||||
self._state = out[0].strip().decode('utf-8')
|
||||
else:
|
||||
self._state = None
|
||||
_LOGGER.warning(
|
||||
"Unable to detect value: %s", out[1].strip().decode('utf-8'))
|
142
homeassistant/components/kira.py
Normal file
142
homeassistant/components/kira.py
Normal file
|
@ -0,0 +1,142 @@
|
|||
"""KIRA interface to receive UDP packets from an IR-IP bridge."""
|
||||
# pylint: disable=import-error
|
||||
import logging
|
||||
import os
|
||||
import yaml
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.error import Error as VoluptuousError
|
||||
|
||||
from homeassistant.helpers import discovery
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE,
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
CONF_PORT,
|
||||
CONF_SENSORS,
|
||||
CONF_TYPE,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
STATE_UNKNOWN)
|
||||
|
||||
REQUIREMENTS = ["pykira==0.1.1"]
|
||||
|
||||
DOMAIN = 'kira'
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_HOST = "0.0.0.0"
|
||||
DEFAULT_PORT = 65432
|
||||
|
||||
CONF_CODE = "code"
|
||||
CONF_REPEAT = "repeat"
|
||||
CONF_REMOTES = "remotes"
|
||||
CONF_SENSOR = "sensor"
|
||||
CONF_REMOTE = "remote"
|
||||
|
||||
CODES_YAML = '{}_codes.yaml'.format(DOMAIN)
|
||||
|
||||
CODE_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_CODE): cv.string,
|
||||
vol.Optional(CONF_TYPE): cv.string,
|
||||
vol.Optional(CONF_DEVICE): cv.string,
|
||||
vol.Optional(CONF_REPEAT): cv.positive_int,
|
||||
})
|
||||
|
||||
SENSOR_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_NAME, default=DOMAIN):
|
||||
vol.Exclusive(cv.string, "sensors"),
|
||||
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
})
|
||||
|
||||
REMOTE_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_NAME, default=DOMAIN):
|
||||
vol.Exclusive(cv.string, "remotes"),
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
})
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Optional(CONF_SENSORS): [SENSOR_SCHEMA],
|
||||
vol.Optional(CONF_REMOTES): [REMOTE_SCHEMA]})
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
def load_codes(path):
|
||||
"""Load Kira codes from specified file."""
|
||||
codes = []
|
||||
if os.path.exists(path):
|
||||
with open(path) as code_file:
|
||||
data = yaml.load(code_file) or []
|
||||
for code in data:
|
||||
try:
|
||||
codes.append(CODE_SCHEMA(code))
|
||||
except VoluptuousError as exception:
|
||||
# keep going
|
||||
_LOGGER.warning('Kira Code Invalid Data: %s', exception)
|
||||
else:
|
||||
with open(path, 'w') as code_file:
|
||||
code_file.write('')
|
||||
return codes
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Setup KIRA capability."""
|
||||
import pykira
|
||||
|
||||
sensors = config.get(DOMAIN, {}).get(CONF_SENSORS, [])
|
||||
remotes = config.get(DOMAIN, {}).get(CONF_REMOTES, [])
|
||||
# If no sensors or remotes were specified, add a sensor
|
||||
if not(sensors or remotes):
|
||||
sensors.append({})
|
||||
|
||||
codes = load_codes(hass.config.path(CODES_YAML))
|
||||
|
||||
hass.data[DOMAIN] = {
|
||||
CONF_SENSOR: {},
|
||||
CONF_REMOTE: {},
|
||||
}
|
||||
|
||||
def load_module(platform, idx, module_conf):
|
||||
"""Set up Kira module and load platform."""
|
||||
# note: module_name is not the HA device name. it's just a unique name
|
||||
# to ensure the component and platform can share information
|
||||
module_name = ("%s_%d" % (DOMAIN, idx)) if idx else DOMAIN
|
||||
device_name = module_conf.get(CONF_NAME, DOMAIN)
|
||||
port = module_conf.get(CONF_PORT, DEFAULT_PORT)
|
||||
host = module_conf.get(CONF_HOST, DEFAULT_HOST)
|
||||
|
||||
if platform == CONF_SENSOR:
|
||||
module = pykira.KiraReceiver(host, port)
|
||||
module.start()
|
||||
else:
|
||||
module = pykira.KiraModule(host, port)
|
||||
|
||||
hass.data[DOMAIN][platform][module_name] = module
|
||||
for code in codes:
|
||||
code_tuple = (code.get(CONF_NAME),
|
||||
code.get(CONF_DEVICE, STATE_UNKNOWN))
|
||||
module.registerCode(code_tuple, code.get(CONF_CODE))
|
||||
|
||||
discovery.load_platform(hass, platform, DOMAIN,
|
||||
{'name': module_name, 'device': device_name},
|
||||
config)
|
||||
|
||||
for idx, module_conf in enumerate(sensors):
|
||||
load_module(CONF_SENSOR, idx, module_conf)
|
||||
|
||||
for idx, module_conf in enumerate(remotes):
|
||||
load_module(CONF_REMOTE, idx, module_conf)
|
||||
|
||||
def _stop_kira(_event):
|
||||
for receiver in hass.data[DOMAIN][CONF_SENSOR].values():
|
||||
receiver.stop()
|
||||
_LOGGER.info("Terminated receivers")
|
||||
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_kira)
|
||||
|
||||
return True
|
|
@ -50,13 +50,15 @@ ATTR_TRANSITION = "transition"
|
|||
ATTR_RGB_COLOR = "rgb_color"
|
||||
ATTR_XY_COLOR = "xy_color"
|
||||
ATTR_COLOR_TEMP = "color_temp"
|
||||
ATTR_KELVIN = "kelvin"
|
||||
ATTR_MIN_MIREDS = "min_mireds"
|
||||
ATTR_MAX_MIREDS = "max_mireds"
|
||||
ATTR_COLOR_NAME = "color_name"
|
||||
ATTR_WHITE_VALUE = "white_value"
|
||||
|
||||
# int with value 0 .. 255 representing brightness of the light.
|
||||
# Brightness of the light, 0..255 or percentage
|
||||
ATTR_BRIGHTNESS = "brightness"
|
||||
ATTR_BRIGHTNESS_PCT = "brightness_pct"
|
||||
|
||||
# String representing a profile (built-in ones or external defined).
|
||||
ATTR_PROFILE = "profile"
|
||||
|
@ -92,18 +94,21 @@ PROP_TO_ATTR = {
|
|||
# Service call validation schemas
|
||||
VALID_TRANSITION = vol.All(vol.Coerce(float), vol.Clamp(min=0, max=6553))
|
||||
VALID_BRIGHTNESS = vol.All(vol.Coerce(int), vol.Clamp(min=0, max=255))
|
||||
VALID_BRIGHTNESS_PCT = vol.All(vol.Coerce(float), vol.Range(min=0, max=100))
|
||||
|
||||
LIGHT_TURN_ON_SCHEMA = vol.Schema({
|
||||
ATTR_ENTITY_ID: cv.entity_ids,
|
||||
ATTR_PROFILE: cv.string,
|
||||
ATTR_TRANSITION: VALID_TRANSITION,
|
||||
ATTR_BRIGHTNESS: VALID_BRIGHTNESS,
|
||||
ATTR_BRIGHTNESS_PCT: VALID_BRIGHTNESS_PCT,
|
||||
ATTR_COLOR_NAME: cv.string,
|
||||
ATTR_RGB_COLOR: vol.All(vol.ExactSequence((cv.byte, cv.byte, cv.byte)),
|
||||
vol.Coerce(tuple)),
|
||||
ATTR_XY_COLOR: vol.All(vol.ExactSequence((cv.small_float, cv.small_float)),
|
||||
vol.Coerce(tuple)),
|
||||
ATTR_COLOR_TEMP: vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
ATTR_KELVIN: vol.All(vol.Coerce(int), vol.Range(min=0)),
|
||||
ATTR_WHITE_VALUE: vol.All(vol.Coerce(int), vol.Range(min=0, max=255)),
|
||||
ATTR_FLASH: vol.In([FLASH_SHORT, FLASH_LONG]),
|
||||
ATTR_EFFECT: cv.string,
|
||||
|
@ -142,20 +147,21 @@ def is_on(hass, entity_id=None):
|
|||
|
||||
|
||||
def turn_on(hass, entity_id=None, transition=None, brightness=None,
|
||||
rgb_color=None, xy_color=None, color_temp=None, white_value=None,
|
||||
brightness_pct=None, rgb_color=None, xy_color=None,
|
||||
color_temp=None, kelvin=None, white_value=None,
|
||||
profile=None, flash=None, effect=None, color_name=None):
|
||||
"""Turn all or specified light on."""
|
||||
hass.add_job(
|
||||
async_turn_on, hass, entity_id, transition, brightness,
|
||||
rgb_color, xy_color, color_temp, white_value,
|
||||
async_turn_on, hass, entity_id, transition, brightness, brightness_pct,
|
||||
rgb_color, xy_color, color_temp, kelvin, white_value,
|
||||
profile, flash, effect, color_name)
|
||||
|
||||
|
||||
@callback
|
||||
def async_turn_on(hass, entity_id=None, transition=None, brightness=None,
|
||||
rgb_color=None, xy_color=None, color_temp=None,
|
||||
white_value=None, profile=None, flash=None, effect=None,
|
||||
color_name=None):
|
||||
brightness_pct=None, rgb_color=None, xy_color=None,
|
||||
color_temp=None, kelvin=None, white_value=None,
|
||||
profile=None, flash=None, effect=None, color_name=None):
|
||||
"""Turn all or specified light on."""
|
||||
data = {
|
||||
key: value for key, value in [
|
||||
|
@ -163,9 +169,11 @@ def async_turn_on(hass, entity_id=None, transition=None, brightness=None,
|
|||
(ATTR_PROFILE, profile),
|
||||
(ATTR_TRANSITION, transition),
|
||||
(ATTR_BRIGHTNESS, brightness),
|
||||
(ATTR_BRIGHTNESS_PCT, brightness_pct),
|
||||
(ATTR_RGB_COLOR, rgb_color),
|
||||
(ATTR_XY_COLOR, xy_color),
|
||||
(ATTR_COLOR_TEMP, color_temp),
|
||||
(ATTR_KELVIN, kelvin),
|
||||
(ATTR_WHITE_VALUE, white_value),
|
||||
(ATTR_FLASH, flash),
|
||||
(ATTR_EFFECT, effect),
|
||||
|
@ -207,6 +215,27 @@ def toggle(hass, entity_id=None, transition=None):
|
|||
hass.services.call(DOMAIN, SERVICE_TOGGLE, data)
|
||||
|
||||
|
||||
def preprocess_turn_on_alternatives(params):
|
||||
"""Processing extra data for turn light on request."""
|
||||
profile = Profiles.get(params.pop(ATTR_PROFILE, None))
|
||||
if profile is not None:
|
||||
params.setdefault(ATTR_XY_COLOR, profile[:2])
|
||||
params.setdefault(ATTR_BRIGHTNESS, profile[2])
|
||||
|
||||
color_name = params.pop(ATTR_COLOR_NAME, None)
|
||||
if color_name is not None:
|
||||
params[ATTR_RGB_COLOR] = color_util.color_name_to_rgb(color_name)
|
||||
|
||||
kelvin = params.pop(ATTR_KELVIN, None)
|
||||
if kelvin is not None:
|
||||
mired = color_util.color_temperature_kelvin_to_mired(kelvin)
|
||||
params[ATTR_COLOR_TEMP] = int(mired)
|
||||
|
||||
brightness_pct = params.pop(ATTR_BRIGHTNESS_PCT, None)
|
||||
if brightness_pct is not None:
|
||||
params[ATTR_BRIGHTNESS] = int(255 * brightness_pct/100)
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
"""Expose light control via statemachine and services."""
|
||||
|
@ -215,10 +244,8 @@ def async_setup(hass, config):
|
|||
yield from component.async_setup(config)
|
||||
|
||||
# load profiles from files
|
||||
profiles = yield from hass.loop.run_in_executor(
|
||||
None, _load_profile_data, hass)
|
||||
|
||||
if profiles is None:
|
||||
profiles_valid = yield from Profiles.load_profiles(hass)
|
||||
if not profiles_valid:
|
||||
return False
|
||||
|
||||
@asyncio.coroutine
|
||||
|
@ -231,17 +258,7 @@ def async_setup(hass, config):
|
|||
target_lights = component.async_extract_from_service(service)
|
||||
params.pop(ATTR_ENTITY_ID, None)
|
||||
|
||||
# Processing extra data for turn light on request.
|
||||
profile = profiles.get(params.pop(ATTR_PROFILE, None))
|
||||
|
||||
if profile:
|
||||
params.setdefault(ATTR_XY_COLOR, profile[:2])
|
||||
params.setdefault(ATTR_BRIGHTNESS, profile[2])
|
||||
|
||||
color_name = params.pop(ATTR_COLOR_NAME, None)
|
||||
|
||||
if color_name is not None:
|
||||
params[ATTR_RGB_COLOR] = color_util.color_name_to_rgb(color_name)
|
||||
preprocess_turn_on_alternatives(params)
|
||||
|
||||
for light in target_lights:
|
||||
if service.service == SERVICE_TURN_ON:
|
||||
|
@ -287,31 +304,51 @@ def async_setup(hass, config):
|
|||
return True
|
||||
|
||||
|
||||
def _load_profile_data(hass):
|
||||
"""Load built-in profiles and custom profiles."""
|
||||
profile_paths = [os.path.join(os.path.dirname(__file__),
|
||||
LIGHT_PROFILES_FILE),
|
||||
hass.config.path(LIGHT_PROFILES_FILE)]
|
||||
profiles = {}
|
||||
class Profiles:
|
||||
"""Representation of available color profiles."""
|
||||
|
||||
for profile_path in profile_paths:
|
||||
if not os.path.isfile(profile_path):
|
||||
continue
|
||||
with open(profile_path) as inp:
|
||||
reader = csv.reader(inp)
|
||||
_all = None
|
||||
|
||||
# Skip the header
|
||||
next(reader, None)
|
||||
@classmethod
|
||||
@asyncio.coroutine
|
||||
def load_profiles(cls, hass):
|
||||
"""Load and cache profiles."""
|
||||
def load_profile_data(hass):
|
||||
"""Load built-in profiles and custom profiles."""
|
||||
profile_paths = [os.path.join(os.path.dirname(__file__),
|
||||
LIGHT_PROFILES_FILE),
|
||||
hass.config.path(LIGHT_PROFILES_FILE)]
|
||||
profiles = {}
|
||||
|
||||
try:
|
||||
for rec in reader:
|
||||
profile, color_x, color_y, brightness = PROFILE_SCHEMA(rec)
|
||||
profiles[profile] = (color_x, color_y, brightness)
|
||||
except vol.MultipleInvalid as ex:
|
||||
_LOGGER.error("Error parsing light profile from %s: %s",
|
||||
profile_path, ex)
|
||||
return None
|
||||
return profiles
|
||||
for profile_path in profile_paths:
|
||||
if not os.path.isfile(profile_path):
|
||||
continue
|
||||
with open(profile_path) as inp:
|
||||
reader = csv.reader(inp)
|
||||
|
||||
# Skip the header
|
||||
next(reader, None)
|
||||
|
||||
try:
|
||||
for rec in reader:
|
||||
profile, color_x, color_y, brightness = \
|
||||
PROFILE_SCHEMA(rec)
|
||||
profiles[profile] = (color_x, color_y, brightness)
|
||||
except vol.MultipleInvalid as ex:
|
||||
_LOGGER.error(
|
||||
"Error parsing light profile from %s: %s",
|
||||
profile_path, ex)
|
||||
return None
|
||||
return profiles
|
||||
|
||||
cls._all = yield from hass.loop.run_in_executor(
|
||||
None, load_profile_data, hass)
|
||||
return cls._all is not None
|
||||
|
||||
@classmethod
|
||||
def get(cls, name):
|
||||
"""Return a named profile."""
|
||||
return cls._all.get(name)
|
||||
|
||||
|
||||
class Light(ToggleEntity):
|
||||
|
|
|
@ -31,7 +31,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Add device specified by serial number."""
|
||||
"""Set up Blinkstick device specified by serial number."""
|
||||
from blinkstick import blinkstick
|
||||
|
||||
name = config.get(CONF_NAME)
|
||||
|
|
|
@ -29,6 +29,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Blinkt Light platform."""
|
||||
# pylint: disable=import-error
|
||||
import blinkt
|
||||
|
||||
# ensure that the lights are off when exiting
|
||||
|
|
|
@ -20,14 +20,13 @@ _LOGGER = logging.getLogger(__name__)
|
|||
CONF_SENDER_ID = 'sender_id'
|
||||
|
||||
DEFAULT_NAME = 'EnOcean Light'
|
||||
|
||||
DEPENDENCIES = ['enocean']
|
||||
|
||||
SUPPORT_ENOCEAN = SUPPORT_BRIGHTNESS
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_ID, default=[]): vol.All(cv.ensure_list,
|
||||
[vol.Coerce(int)]),
|
||||
vol.Optional(CONF_ID, default=[]):
|
||||
vol.All(cv.ensure_list, [vol.Coerce(int)]),
|
||||
vol.Required(CONF_SENDER_ID): vol.All(cv.ensure_list, [vol.Coerce(int)]),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
})
|
||||
|
|
|
@ -35,26 +35,26 @@ SUPPORT_FLUX_LED_RGBW = (SUPPORT_WHITE_VALUE | SUPPORT_EFFECT |
|
|||
MODE_RGB = 'rgb'
|
||||
MODE_RGBW = 'rgbw'
|
||||
|
||||
# List of Supported Effects which aren't already declared in LIGHT
|
||||
EFFECT_RED_FADE = "red_fade"
|
||||
EFFECT_GREEN_FADE = "green_fade"
|
||||
EFFECT_BLUE_FADE = "blue_fade"
|
||||
EFFECT_YELLOW_FADE = "yellow_fade"
|
||||
EFFECT_CYAN_FADE = "cyan_fade"
|
||||
EFFECT_PURPLE_FADE = "purple_fade"
|
||||
EFFECT_WHITE_FADE = "white_fade"
|
||||
EFFECT_RED_GREEN_CROSS_FADE = "rg_cross_fade"
|
||||
EFFECT_RED_BLUE_CROSS_FADE = "rb_cross_fade"
|
||||
EFFECT_GREEN_BLUE_CROSS_FADE = "gb_cross_fade"
|
||||
EFFECT_COLORSTROBE = "colorstrobe"
|
||||
EFFECT_RED_STROBE = "red_strobe"
|
||||
EFFECT_GREEN_STROBE = "green_strobe"
|
||||
EFFECT_BLUE_STOBE = "blue_strobe"
|
||||
EFFECT_YELLOW_STROBE = "yellow_strobe"
|
||||
EFFECT_CYAN_STROBE = "cyan_strobe"
|
||||
EFFECT_PURPLE_STROBE = "purple_strobe"
|
||||
EFFECT_WHITE_STROBE = "white_strobe"
|
||||
EFFECT_COLORJUMP = "colorjump"
|
||||
# List of supported effects which aren't already declared in LIGHT
|
||||
EFFECT_RED_FADE = 'red_fade'
|
||||
EFFECT_GREEN_FADE = 'green_fade'
|
||||
EFFECT_BLUE_FADE = 'blue_fade'
|
||||
EFFECT_YELLOW_FADE = 'yellow_fade'
|
||||
EFFECT_CYAN_FADE = 'cyan_fade'
|
||||
EFFECT_PURPLE_FADE = 'purple_fade'
|
||||
EFFECT_WHITE_FADE = 'white_fade'
|
||||
EFFECT_RED_GREEN_CROSS_FADE = 'rg_cross_fade'
|
||||
EFFECT_RED_BLUE_CROSS_FADE = 'rb_cross_fade'
|
||||
EFFECT_GREEN_BLUE_CROSS_FADE = 'gb_cross_fade'
|
||||
EFFECT_COLORSTROBE = 'colorstrobe'
|
||||
EFFECT_RED_STROBE = 'red_strobe'
|
||||
EFFECT_GREEN_STROBE = 'green_strobe'
|
||||
EFFECT_BLUE_STOBE = 'blue_strobe'
|
||||
EFFECT_YELLOW_STROBE = 'yellow_strobe'
|
||||
EFFECT_CYAN_STROBE = 'cyan_strobe'
|
||||
EFFECT_PURPLE_STROBE = 'purple_strobe'
|
||||
EFFECT_WHITE_STROBE = 'white_strobe'
|
||||
EFFECT_COLORJUMP = 'colorjump'
|
||||
|
||||
FLUX_EFFECT_LIST = [
|
||||
EFFECT_COLORLOOP,
|
||||
|
@ -121,7 +121,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||
ipaddr = device['ipaddr']
|
||||
if ipaddr in light_ips:
|
||||
continue
|
||||
device['name'] = device['id'] + " " + ipaddr
|
||||
device['name'] = '{} {}'.format(device['id'], ipaddr)
|
||||
device[ATTR_MODE] = 'rgbw'
|
||||
device[CONF_PROTOCOL] = None
|
||||
light = FluxLight(device)
|
||||
|
@ -167,7 +167,7 @@ class FluxLight(Light):
|
|||
@property
|
||||
def unique_id(self):
|
||||
"""Return the ID of this light."""
|
||||
return "{}.{}".format(self.__class__, self._ipaddr)
|
||||
return '{}.{}'.format(self.__class__, self._ipaddr)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
|
|
@ -84,7 +84,7 @@ def setup_light(device_id, name, insteonhub, hass, add_devices_callback):
|
|||
request_id = _CONFIGURING.pop(device_id)
|
||||
configurator = get_component('configurator')
|
||||
configurator.request_done(request_id)
|
||||
_LOGGER.info("Device configuration done!")
|
||||
_LOGGER.debug("Device configuration done")
|
||||
|
||||
conf_lights = config_from_file(hass.config.path(INSTEON_LOCAL_LIGHTS_CONF))
|
||||
if device_id not in conf_lights:
|
||||
|
@ -107,7 +107,7 @@ def config_from_file(filename, config=None):
|
|||
with open(filename, 'w') as fdesc:
|
||||
fdesc.write(json.dumps(config))
|
||||
except IOError as error:
|
||||
_LOGGER.error('Saving config file failed: %s', error)
|
||||
_LOGGER.error("Saving config file failed: %s", error)
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
Support for INSTEON lights via PowerLinc Modem.
|
||||
Support for Insteon lights via PowerLinc Modem.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/insteon_plm/
|
||||
|
@ -12,16 +12,16 @@ from homeassistant.components.light import (
|
|||
ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, Light)
|
||||
from homeassistant.loader import get_component
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['insteon_plm']
|
||||
|
||||
MAX_BRIGHTNESS = 255
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up the INSTEON PLM device class for the hass platform."""
|
||||
"""Set up the Insteon PLM device."""
|
||||
plm = hass.data['insteon_plm']
|
||||
|
||||
device_list = []
|
||||
|
@ -30,7 +30,7 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
|||
address = device.get('address_hex')
|
||||
dimmable = bool('dimmable' in device.get('capabilities'))
|
||||
|
||||
_LOGGER.info('Registered %s with light platform.', name)
|
||||
_LOGGER.info("Registered %s with light platform", name)
|
||||
|
||||
device_list.append(
|
||||
InsteonPLMDimmerDevice(hass, plm, address, name, dimmable)
|
||||
|
@ -72,14 +72,14 @@ class InsteonPLMDimmerDevice(Light):
|
|||
def brightness(self):
|
||||
"""Return the brightness of this light between 0..255."""
|
||||
onlevel = self._plm.get_device_attr(self._address, 'onlevel')
|
||||
_LOGGER.debug('on level for %s is %s', self._address, onlevel)
|
||||
_LOGGER.debug("on level for %s is %s", self._address, onlevel)
|
||||
return int(onlevel)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return the boolean response if the node is on."""
|
||||
onlevel = self._plm.get_device_attr(self._address, 'onlevel')
|
||||
_LOGGER.debug('on level for %s is %s', self._address, onlevel)
|
||||
_LOGGER.debug("on level for %s is %s", self._address, onlevel)
|
||||
return bool(onlevel)
|
||||
|
||||
@property
|
||||
|
@ -101,7 +101,7 @@ class InsteonPLMDimmerDevice(Light):
|
|||
@callback
|
||||
def async_light_update(self, message):
|
||||
"""Receive notification from transport that new data exists."""
|
||||
_LOGGER.info('Received update calback from PLM for %s', self._address)
|
||||
_LOGGER.info("Received update calback from PLM for %s", self._address)
|
||||
self._hass.async_add_job(self.async_update_ha_state())
|
||||
|
||||
@asyncio.coroutine
|
||||
|
|
|
@ -24,13 +24,12 @@ def setup_platform(hass, config: ConfigType,
|
|||
add_devices: Callable[[list], None], discovery_info=None):
|
||||
"""Set up the ISY994 light platform."""
|
||||
if isy.ISY is None or not isy.ISY.connected:
|
||||
_LOGGER.error('A connection has not been made to the ISY controller.')
|
||||
_LOGGER.error("A connection has not been made to the ISY controller")
|
||||
return False
|
||||
|
||||
devices = []
|
||||
|
||||
for node in isy.filter_nodes(isy.NODES, units=UOM,
|
||||
states=STATES):
|
||||
for node in isy.filter_nodes(isy.NODES, units=UOM, states=STATES):
|
||||
if node.dimmable or '51' in node.uom:
|
||||
devices.append(ISYLightDevice(node))
|
||||
|
||||
|
@ -57,12 +56,12 @@ class ISYLightDevice(isy.ISYDevice, Light):
|
|||
def turn_off(self, **kwargs) -> None:
|
||||
"""Send the turn off command to the ISY994 light device."""
|
||||
if not self._node.off():
|
||||
_LOGGER.debug('Unable to turn on light.')
|
||||
_LOGGER.debug("Unable to turn on light")
|
||||
|
||||
def turn_on(self, brightness=None, **kwargs) -> None:
|
||||
"""Send the turn on command to the ISY994 light device."""
|
||||
if not self._node.on(val=brightness):
|
||||
_LOGGER.debug('Unable to turn on light.')
|
||||
_LOGGER.debug("Unable to turn on light")
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
|
|
|
@ -9,6 +9,7 @@ import logging
|
|||
import asyncio
|
||||
import sys
|
||||
import math
|
||||
from os import path
|
||||
from functools import partial
|
||||
from datetime import timedelta
|
||||
import async_timeout
|
||||
|
@ -16,15 +17,19 @@ import async_timeout
|
|||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.light import (
|
||||
Light, PLATFORM_SCHEMA, ATTR_BRIGHTNESS, ATTR_COLOR_NAME, ATTR_RGB_COLOR,
|
||||
Light, DOMAIN, PLATFORM_SCHEMA, LIGHT_TURN_ON_SCHEMA,
|
||||
ATTR_BRIGHTNESS, ATTR_RGB_COLOR,
|
||||
ATTR_XY_COLOR, ATTR_COLOR_TEMP, ATTR_TRANSITION, ATTR_EFFECT,
|
||||
SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP, SUPPORT_RGB_COLOR,
|
||||
SUPPORT_XY_COLOR, SUPPORT_TRANSITION, SUPPORT_EFFECT)
|
||||
SUPPORT_XY_COLOR, SUPPORT_TRANSITION, SUPPORT_EFFECT,
|
||||
preprocess_turn_on_alternatives)
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
from homeassistant.util.color import (
|
||||
color_temperature_mired_to_kelvin, color_temperature_kelvin_to_mired)
|
||||
from homeassistant import util
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.service import extract_entity_ids
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import homeassistant.util.color as color_util
|
||||
|
||||
|
@ -41,7 +46,10 @@ BULB_LATENCY = 500
|
|||
|
||||
CONF_SERVER = 'server'
|
||||
|
||||
SERVICE_LIFX_SET_STATE = 'lifx_set_state'
|
||||
|
||||
ATTR_HSBK = 'hsbk'
|
||||
ATTR_POWER = 'power'
|
||||
|
||||
BYTE_MAX = 255
|
||||
SHORT_MAX = 65535
|
||||
|
@ -53,6 +61,10 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||
vol.Optional(CONF_SERVER, default='0.0.0.0'): cv.string,
|
||||
})
|
||||
|
||||
LIFX_SET_STATE_SCHEMA = LIGHT_TURN_ON_SCHEMA.extend({
|
||||
ATTR_POWER: cv.boolean,
|
||||
})
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
|
@ -87,6 +99,41 @@ class LIFXManager(object):
|
|||
self.hass = hass
|
||||
self.async_add_devices = async_add_devices
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_service_handle(service):
|
||||
"""Apply a service."""
|
||||
tasks = []
|
||||
for light in self.service_to_entities(service):
|
||||
if service.service == SERVICE_LIFX_SET_STATE:
|
||||
task = light.async_set_state(**service.data)
|
||||
tasks.append(hass.async_add_job(task))
|
||||
if tasks:
|
||||
yield from asyncio.wait(tasks, loop=hass.loop)
|
||||
|
||||
descriptions = self.get_descriptions()
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_LIFX_SET_STATE, async_service_handle,
|
||||
descriptions.get(SERVICE_LIFX_SET_STATE),
|
||||
schema=LIFX_SET_STATE_SCHEMA)
|
||||
|
||||
@staticmethod
|
||||
def get_descriptions():
|
||||
"""Load and return descriptions for our own service calls."""
|
||||
return load_yaml_config_file(
|
||||
path.join(path.dirname(__file__), 'services.yaml'))
|
||||
|
||||
def service_to_entities(self, service):
|
||||
"""Return the known devices that a service call mentions."""
|
||||
entity_ids = extract_entity_ids(self.hass, service)
|
||||
if entity_ids:
|
||||
entities = [entity for entity in self.entities.values()
|
||||
if entity.entity_id in entity_ids]
|
||||
else:
|
||||
entities = list(self.entities.values())
|
||||
|
||||
return entities
|
||||
|
||||
@callback
|
||||
def register(self, device):
|
||||
"""Handle for newly detected bulb."""
|
||||
|
@ -298,6 +345,18 @@ class LIFXLight(Light):
|
|||
@asyncio.coroutine
|
||||
def async_turn_on(self, **kwargs):
|
||||
"""Turn the device on."""
|
||||
kwargs[ATTR_POWER] = True
|
||||
yield from self.async_set_state(**kwargs)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_turn_off(self, **kwargs):
|
||||
"""Turn the device off."""
|
||||
kwargs[ATTR_POWER] = False
|
||||
yield from self.async_set_state(**kwargs)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_state(self, **kwargs):
|
||||
"""Set a color on the light and turn it on/off."""
|
||||
yield from self.stop_effect()
|
||||
|
||||
if ATTR_EFFECT in kwargs:
|
||||
|
@ -309,39 +368,41 @@ class LIFXLight(Light):
|
|||
else:
|
||||
fade = 0
|
||||
|
||||
# These are both False if ATTR_POWER is not set
|
||||
power_on = kwargs.get(ATTR_POWER, False)
|
||||
power_off = not kwargs.get(ATTR_POWER, True)
|
||||
|
||||
hsbk, changed_color = self.find_hsbk(**kwargs)
|
||||
_LOGGER.debug("turn_on: %s (%d) %d %d %d %d %d",
|
||||
self.who, self._power, fade, *hsbk)
|
||||
|
||||
if self._power == 0:
|
||||
if power_off:
|
||||
self.device.set_power(False, None, 0)
|
||||
if changed_color:
|
||||
self.device.set_color(hsbk, None, 0)
|
||||
self.device.set_power(True, None, fade)
|
||||
if power_on:
|
||||
self.device.set_power(True, None, fade)
|
||||
else:
|
||||
self.device.set_power(True, None, 0) # racing for power status
|
||||
if power_on:
|
||||
self.device.set_power(True, None, 0)
|
||||
if changed_color:
|
||||
self.device.set_color(hsbk, None, fade)
|
||||
if power_off:
|
||||
self.device.set_power(False, None, fade)
|
||||
|
||||
self.update_later(0)
|
||||
if fade < BULB_LATENCY:
|
||||
self.set_power(1)
|
||||
self.set_color(*hsbk)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_turn_off(self, **kwargs):
|
||||
"""Turn the device off."""
|
||||
yield from self.stop_effect()
|
||||
|
||||
if ATTR_TRANSITION in kwargs:
|
||||
fade = int(kwargs[ATTR_TRANSITION] * 1000)
|
||||
if power_on:
|
||||
self.update_later(0)
|
||||
else:
|
||||
fade = 0
|
||||
self.update_later(fade)
|
||||
|
||||
self.device.set_power(False, None, fade)
|
||||
|
||||
self.update_later(fade)
|
||||
if fade < BULB_LATENCY:
|
||||
self.set_power(0)
|
||||
if fade <= BULB_LATENCY:
|
||||
if power_on:
|
||||
self.set_power(1)
|
||||
if power_off:
|
||||
self.set_power(0)
|
||||
if changed_color:
|
||||
self.set_color(*hsbk)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_update(self):
|
||||
|
@ -374,9 +435,7 @@ class LIFXLight(Light):
|
|||
if hsbk is not None:
|
||||
return [hsbk, True]
|
||||
|
||||
color_name = kwargs.pop(ATTR_COLOR_NAME, None)
|
||||
if color_name is not None:
|
||||
kwargs[ATTR_RGB_COLOR] = color_util.color_name_to_rgb(color_name)
|
||||
preprocess_turn_on_alternatives(kwargs)
|
||||
|
||||
if ATTR_RGB_COLOR in kwargs:
|
||||
hue, saturation, brightness = \
|
||||
|
|
|
@ -2,16 +2,14 @@
|
|||
import logging
|
||||
import asyncio
|
||||
import random
|
||||
from os import path
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.light import (
|
||||
DOMAIN, ATTR_BRIGHTNESS, ATTR_COLOR_NAME, ATTR_RGB_COLOR, ATTR_EFFECT,
|
||||
ATTR_TRANSITION)
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
DOMAIN, ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, ATTR_COLOR_NAME,
|
||||
ATTR_RGB_COLOR, ATTR_EFFECT, ATTR_TRANSITION,
|
||||
VALID_BRIGHTNESS, VALID_BRIGHTNESS_PCT)
|
||||
from homeassistant.const import (ATTR_ENTITY_ID)
|
||||
from homeassistant.helpers.service import extract_entity_ids
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@ -39,7 +37,8 @@ LIFX_EFFECT_SCHEMA = vol.Schema({
|
|||
})
|
||||
|
||||
LIFX_EFFECT_BREATHE_SCHEMA = LIFX_EFFECT_SCHEMA.extend({
|
||||
ATTR_BRIGHTNESS: vol.All(vol.Coerce(int), vol.Clamp(min=0, max=255)),
|
||||
ATTR_BRIGHTNESS: VALID_BRIGHTNESS,
|
||||
ATTR_BRIGHTNESS_PCT: VALID_BRIGHTNESS_PCT,
|
||||
ATTR_COLOR_NAME: cv.string,
|
||||
ATTR_RGB_COLOR: vol.All(vol.ExactSequence((cv.byte, cv.byte, cv.byte)),
|
||||
vol.Coerce(tuple)),
|
||||
|
@ -52,7 +51,8 @@ LIFX_EFFECT_BREATHE_SCHEMA = LIFX_EFFECT_SCHEMA.extend({
|
|||
LIFX_EFFECT_PULSE_SCHEMA = LIFX_EFFECT_BREATHE_SCHEMA
|
||||
|
||||
LIFX_EFFECT_COLORLOOP_SCHEMA = LIFX_EFFECT_SCHEMA.extend({
|
||||
ATTR_BRIGHTNESS: vol.All(vol.Coerce(int), vol.Clamp(min=0, max=255)),
|
||||
ATTR_BRIGHTNESS: VALID_BRIGHTNESS,
|
||||
ATTR_BRIGHTNESS_PCT: VALID_BRIGHTNESS_PCT,
|
||||
vol.Optional(ATTR_PERIOD, default=60):
|
||||
vol.All(vol.Coerce(float), vol.Clamp(min=0.05)),
|
||||
vol.Optional(ATTR_CHANGE, default=20):
|
||||
|
@ -73,19 +73,12 @@ def setup(hass, lifx_manager):
|
|||
@asyncio.coroutine
|
||||
def async_service_handle(service):
|
||||
"""Apply a service."""
|
||||
entity_ids = extract_entity_ids(hass, service)
|
||||
if entity_ids:
|
||||
devices = [entity for entity in lifx_manager.entities.values()
|
||||
if entity.entity_id in entity_ids]
|
||||
else:
|
||||
devices = list(lifx_manager.entities.values())
|
||||
|
||||
if devices:
|
||||
yield from start_effect(hass, devices,
|
||||
entities = lifx_manager.service_to_entities(service)
|
||||
if entities:
|
||||
yield from start_effect(hass, entities,
|
||||
service.service, **service.data)
|
||||
|
||||
descriptions = load_yaml_config_file(
|
||||
path.join(path.dirname(__file__), 'services.yaml'))
|
||||
descriptions = lifx_manager.get_descriptions()
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_EFFECT_BREATHE, async_service_handle,
|
||||
|
@ -292,7 +285,7 @@ class LIFXEffectColorloop(LIFXEffect):
|
|||
direction = 1 if random.randint(0, 1) else -1
|
||||
|
||||
# Random start
|
||||
hue = random.randint(0, 359)
|
||||
hue = random.uniform(0, 360) % 360
|
||||
|
||||
while self.lights:
|
||||
hue = (hue + direction*change) % 360
|
||||
|
@ -312,7 +305,7 @@ class LIFXEffectColorloop(LIFXEffect):
|
|||
brightness = light.effect_data.color[2]
|
||||
|
||||
hsbk = [
|
||||
int(65535/359*lhue),
|
||||
int(65535/360*lhue),
|
||||
int(random.uniform(0.8, 1.0)*65535),
|
||||
brightness,
|
||||
NEUTRAL_WHITE,
|
||||
|
|
|
@ -1,3 +1,23 @@
|
|||
lifx_set_state:
|
||||
description: Set a color/brightness and possibliy turn the light on/off
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of entities to set a state on
|
||||
example: 'light.garage'
|
||||
|
||||
'...':
|
||||
description: All turn_on parameters can be used to specify a color
|
||||
|
||||
transition:
|
||||
description: Duration in seconds it takes to get to the final state
|
||||
example: 10
|
||||
|
||||
power:
|
||||
description: Turn the light on (True) or off (False). Leave out to keep the power as it is.
|
||||
example: True
|
||||
|
||||
|
||||
lifx_effect_breathe:
|
||||
description: Run a breathe effect by fading to a color and back.
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@ Support for LimitlessLED bulbs.
|
|||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/light.limitlessled/
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
@ -17,7 +16,7 @@ from homeassistant.components.light import (
|
|||
SUPPORT_RGB_COLOR, SUPPORT_TRANSITION, Light, PLATFORM_SCHEMA)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['limitlessled==1.0.5']
|
||||
REQUIREMENTS = ['limitlessled==1.0.8']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -195,7 +195,7 @@ class Luminary(Light):
|
|||
self._brightness = kwargs[ATTR_BRIGHTNESS]
|
||||
_LOGGER.debug("turn_on requested brightness for light: %s is: %s ",
|
||||
self._name, self._brightness)
|
||||
self._brightness = self._luminary.set_luminance(
|
||||
self._luminary.set_luminance(
|
||||
int(self._brightness / 2.55),
|
||||
transition)
|
||||
|
||||
|
|
|
@ -26,7 +26,11 @@ turn_on:
|
|||
|
||||
color_temp:
|
||||
description: Color temperature for the light in mireds
|
||||
example: '250'
|
||||
example: 250
|
||||
|
||||
kelvin:
|
||||
description: Color temperature for the light in Kelvin
|
||||
example: 4000
|
||||
|
||||
white_value:
|
||||
description: Number between 0..255 indicating level of white
|
||||
|
@ -36,6 +40,10 @@ turn_on:
|
|||
description: Number between 0..255 indicating brightness
|
||||
example: 120
|
||||
|
||||
brightness_pct:
|
||||
description: Number between 0..100 indicating percentage of full brightness
|
||||
example: 47
|
||||
|
||||
profile:
|
||||
description: Name of a light profile to use
|
||||
example: relax
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
"""Support for the IKEA Tradfri platform."""
|
||||
"""
|
||||
Support for the IKEA Tradfri platform.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/light.tradfri/
|
||||
"""
|
||||
import logging
|
||||
|
||||
from homeassistant.components.light import (
|
||||
|
@ -6,7 +11,7 @@ from homeassistant.components.light import (
|
|||
SUPPORT_COLOR_TEMP, SUPPORT_RGB_COLOR, Light)
|
||||
from homeassistant.components.light import \
|
||||
PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA
|
||||
from homeassistant.components.tradfri import KEY_GATEWAY
|
||||
from homeassistant.components.tradfri import KEY_GATEWAY, KEY_TRADFRI_GROUPS
|
||||
from homeassistant.util import color as color_util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@ -30,8 +35,10 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||
lights = [dev for dev in devices if dev.has_light_control]
|
||||
add_devices(Tradfri(light) for light in lights)
|
||||
|
||||
groups = gateway.get_groups()
|
||||
add_devices(TradfriGroup(group) for group in groups)
|
||||
allow_tradfri_groups = hass.data[KEY_TRADFRI_GROUPS][gateway_id]
|
||||
if allow_tradfri_groups:
|
||||
groups = gateway.get_groups()
|
||||
add_devices(TradfriGroup(group) for group in groups)
|
||||
|
||||
|
||||
class TradfriGroup(Light):
|
||||
|
|
|
@ -4,6 +4,7 @@ Support for Wink lights.
|
|||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/light.wink/
|
||||
"""
|
||||
import asyncio
|
||||
import colorsys
|
||||
|
||||
from homeassistant.components.light import (
|
||||
|
@ -38,6 +39,11 @@ class WinkLight(WinkDevice, Light):
|
|||
"""Initialize the Wink device."""
|
||||
super().__init__(wink, hass)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Callback when entity is added to hass."""
|
||||
self.hass.data[DOMAIN]['entities']['light'].append(self)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if light is on."""
|
||||
|
|
|
@ -55,3 +55,59 @@ unlock:
|
|||
code:
|
||||
description: An optional code to unlock the lock with
|
||||
example: 1234
|
||||
|
||||
wink_set_lock_vacation_mode:
|
||||
description: Set vacation mode for all or specified locks. Disables all user codes.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of lock to unlock
|
||||
example: 'lock.front_door'
|
||||
enabled:
|
||||
description: enable or disable. true or false.
|
||||
example: true
|
||||
|
||||
wink_set_lock_alarm_mode:
|
||||
description: Set alarm mode for all or specified locks.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of lock to unlock
|
||||
example: 'lock.front_door'
|
||||
mode:
|
||||
description: One of tamper, activity, or forced_entry
|
||||
example: tamper
|
||||
|
||||
wink_set_lock_alarm_sensitivity:
|
||||
description: Set alarm sensitivity for all or specified locks.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of lock to unlock
|
||||
example: 'lock.front_door'
|
||||
sensitivity:
|
||||
description: One of low, medium_low, medium, medium_high, high
|
||||
example: medium
|
||||
|
||||
wink_set_lock_alarm_state:
|
||||
description: Set alarm state.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of lock to unlock
|
||||
example: 'lock.front_door'
|
||||
enabled:
|
||||
description: enable or disable. true or false.
|
||||
example: true
|
||||
|
||||
wink_set_lock_beeper_state:
|
||||
description: Set beeper state.
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name of lock to unlock
|
||||
example: 'lock.front_door'
|
||||
enabled:
|
||||
description: enable or disable. true or false.
|
||||
example: true
|
||||
|
||||
|
|
|
@ -4,11 +4,55 @@ Support for Wink locks.
|
|||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/lock.wink/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from os import path
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.lock import LockDevice
|
||||
from homeassistant.components.wink import WinkDevice, DOMAIN
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
|
||||
DEPENDENCIES = ['wink']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_SET_VACATION_MODE = 'wink_set_lock_vacation_mode'
|
||||
SERVICE_SET_ALARM_MODE = 'wink_set_lock_alarm_mode'
|
||||
SERVICE_SET_ALARM_SENSITIVITY = 'wink_set_lock_alarm_sensitivity'
|
||||
SERVICE_SET_ALARM_STATE = 'wink_set_lock_alarm_state'
|
||||
SERVICE_SET_BEEPER_STATE = 'wink_set_lock_beeper_state'
|
||||
|
||||
ATTR_ENABLED = 'enabled'
|
||||
ATTR_SENSITIVITY = 'sensitivity'
|
||||
ATTR_MODE = 'mode'
|
||||
|
||||
ALARM_SENSITIVITY_MAP = {"low": 0.2, "medium_low": 0.4,
|
||||
"medium": 0.6, "medium_high": 0.8,
|
||||
"high": 1.0}
|
||||
|
||||
ALARM_MODES_MAP = {"tamper": "tamper",
|
||||
"activity": "alert",
|
||||
"forced_entry": "forced_entry"}
|
||||
|
||||
SET_ENABLED_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required(ATTR_ENABLED): cv.string,
|
||||
})
|
||||
|
||||
SET_SENSITIVITY_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required(ATTR_SENSITIVITY): vol.In(ALARM_SENSITIVITY_MAP)
|
||||
})
|
||||
|
||||
SET_ALARM_MODES_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required(ATTR_MODE): vol.In(ALARM_MODES_MAP)
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Wink platform."""
|
||||
|
@ -19,6 +63,58 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||
if _id not in hass.data[DOMAIN]['unique_ids']:
|
||||
add_devices([WinkLockDevice(lock, hass)])
|
||||
|
||||
def service_handle(service):
|
||||
"""Handler for services."""
|
||||
entity_ids = service.data.get('entity_id')
|
||||
all_locks = hass.data[DOMAIN]['entities']['lock']
|
||||
locks_to_set = []
|
||||
if entity_ids is None:
|
||||
locks_to_set = all_locks
|
||||
else:
|
||||
for lock in all_locks:
|
||||
if lock.entity_id in entity_ids:
|
||||
locks_to_set.append(lock)
|
||||
|
||||
for lock in locks_to_set:
|
||||
if service.service == SERVICE_SET_VACATION_MODE:
|
||||
lock.set_vacation_mode(service.data.get(ATTR_ENABLED))
|
||||
elif service.service == SERVICE_SET_ALARM_STATE:
|
||||
lock.set_alarm_state(service.data.get(ATTR_ENABLED))
|
||||
elif service.service == SERVICE_SET_BEEPER_STATE:
|
||||
lock.set_beeper_state(service.data.get(ATTR_ENABLED))
|
||||
elif service.service == SERVICE_SET_ALARM_MODE:
|
||||
lock.set_alarm_mode(service.data.get(ATTR_MODE))
|
||||
elif service.service == SERVICE_SET_ALARM_SENSITIVITY:
|
||||
lock.set_alarm_sensitivity(service.data.get(ATTR_SENSITIVITY))
|
||||
|
||||
descriptions = load_yaml_config_file(
|
||||
path.join(path.dirname(__file__), 'services.yaml'))
|
||||
|
||||
hass.services.register(DOMAIN, SERVICE_SET_VACATION_MODE,
|
||||
service_handle,
|
||||
descriptions.get(SERVICE_SET_VACATION_MODE),
|
||||
schema=SET_ENABLED_SCHEMA)
|
||||
|
||||
hass.services.register(DOMAIN, SERVICE_SET_ALARM_STATE,
|
||||
service_handle,
|
||||
descriptions.get(SERVICE_SET_ALARM_STATE),
|
||||
schema=SET_ENABLED_SCHEMA)
|
||||
|
||||
hass.services.register(DOMAIN, SERVICE_SET_BEEPER_STATE,
|
||||
service_handle,
|
||||
descriptions.get(SERVICE_SET_BEEPER_STATE),
|
||||
schema=SET_ENABLED_SCHEMA)
|
||||
|
||||
hass.services.register(DOMAIN, SERVICE_SET_ALARM_MODE,
|
||||
service_handle,
|
||||
descriptions.get(SERVICE_SET_ALARM_MODE),
|
||||
schema=SET_ALARM_MODES_SCHEMA)
|
||||
|
||||
hass.services.register(DOMAIN, SERVICE_SET_ALARM_SENSITIVITY,
|
||||
service_handle,
|
||||
descriptions.get(SERVICE_SET_ALARM_SENSITIVITY),
|
||||
schema=SET_SENSITIVITY_SCHEMA)
|
||||
|
||||
|
||||
class WinkLockDevice(WinkDevice, LockDevice):
|
||||
"""Representation of a Wink lock."""
|
||||
|
@ -27,6 +123,11 @@ class WinkLockDevice(WinkDevice, LockDevice):
|
|||
"""Initialize the lock."""
|
||||
super().__init__(wink, hass)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
"""Callback when entity is added to hass."""
|
||||
self.hass.data[DOMAIN]['entities']['lock'].append(self)
|
||||
|
||||
@property
|
||||
def is_locked(self):
|
||||
"""Return true if device is locked."""
|
||||
|
@ -39,3 +140,60 @@ class WinkLockDevice(WinkDevice, LockDevice):
|
|||
def unlock(self, **kwargs):
|
||||
"""Unlock the device."""
|
||||
self.wink.set_state(False)
|
||||
|
||||
def set_alarm_state(self, enabled):
|
||||
"""Set lock's alarm state."""
|
||||
self.wink.set_alarm_state(enabled)
|
||||
|
||||
def set_vacation_mode(self, enabled):
|
||||
"""Set lock's vacation mode."""
|
||||
self.wink.set_vacation_mode(enabled)
|
||||
|
||||
def set_beeper_state(self, enabled):
|
||||
"""Set lock's beeper mode."""
|
||||
self.wink.set_beeper_mode(enabled)
|
||||
|
||||
def set_alarm_sensitivity(self, sensitivity):
|
||||
"""
|
||||
Set lock's alarm sensitivity.
|
||||
|
||||
Valid sensitivities:
|
||||
0.2, 0.4, 0.6, 0.8, 1.0
|
||||
"""
|
||||
self.wink.set_alarm_sensitivity(sensitivity)
|
||||
|
||||
def set_alarm_mode(self, mode):
|
||||
"""
|
||||
Set lock's alarm mode.
|
||||
|
||||
Valid modes:
|
||||
alert - Beep when lock is locked or unlocked
|
||||
tamper - 15 sec alarm when lock is disturbed when locked
|
||||
forced_entry - 3 min alarm when significant force applied
|
||||
to door when locked.
|
||||
"""
|
||||
self.wink.set_alarm_mode(mode)
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
super_attrs = super().device_state_attributes
|
||||
sensitivity = dict_value_to_key(ALARM_SENSITIVITY_MAP,
|
||||
self.wink.alarm_sensitivity())
|
||||
super_attrs['alarm sensitivity'] = sensitivity
|
||||
super_attrs['vacation mode'] = self.wink.vacation_mode_enabled()
|
||||
super_attrs['beeper mode'] = self.wink.beeper_enabled()
|
||||
super_attrs['auto lock'] = self.wink.auto_lock_enabled()
|
||||
alarm_mode = dict_value_to_key(ALARM_MODES_MAP,
|
||||
self.wink.alarm_mode())
|
||||
super_attrs['alarm mode'] = alarm_mode
|
||||
super_attrs['alarm enabled'] = self.wink.alarm_enabled()
|
||||
return super_attrs
|
||||
|
||||
|
||||
def dict_value_to_key(dict_map, comp_value):
|
||||
"""Return the key that has the provided value."""
|
||||
for key, value in dict_map.items():
|
||||
if value == comp_value:
|
||||
return key
|
||||
return STATE_UNKNOWN
|
||||
|
|
|
@ -141,9 +141,10 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
|||
class_id=zwave.const.COMMAND_CLASS_USER_CODE).values():
|
||||
if value.index != code_slot:
|
||||
continue
|
||||
if len(str(usercode)) > 4:
|
||||
if len(str(usercode)) < 4:
|
||||
_LOGGER.error("Invalid code provided: (%s) "
|
||||
"usercode must %s or less digits",
|
||||
"usercode must be atleast 4 and at most"
|
||||
" %s digits",
|
||||
usercode, len(value.data))
|
||||
break
|
||||
value.data = str(usercode)
|
||||
|
|
|
@ -19,7 +19,8 @@ from homeassistant.components.frontend import register_built_in_panel
|
|||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.const import (
|
||||
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, EVENT_STATE_CHANGED,
|
||||
STATE_NOT_HOME, STATE_OFF, STATE_ON, ATTR_HIDDEN, HTTP_BAD_REQUEST)
|
||||
STATE_NOT_HOME, STATE_OFF, STATE_ON, ATTR_HIDDEN, HTTP_BAD_REQUEST,
|
||||
EVENT_LOGBOOK_ENTRY)
|
||||
from homeassistant.core import State, split_entity_id, DOMAIN as HA_DOMAIN
|
||||
|
||||
DOMAIN = 'logbook'
|
||||
|
@ -47,10 +48,10 @@ CONFIG_SCHEMA = vol.Schema({
|
|||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
EVENT_LOGBOOK_ENTRY = 'logbook_entry'
|
||||
|
||||
GROUP_BY_MINUTES = 15
|
||||
|
||||
CONTINUOUS_DOMAINS = ['proximity', 'sensor']
|
||||
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_MESSAGE = 'message'
|
||||
ATTR_DOMAIN = 'domain'
|
||||
|
@ -191,7 +192,8 @@ def humanify(events):
|
|||
if entity_id is None:
|
||||
continue
|
||||
|
||||
if entity_id.startswith('sensor.'):
|
||||
if entity_id.startswith(tuple('{}.'.format(
|
||||
domain) for domain in CONTINUOUS_DOMAINS)):
|
||||
last_sensor_event[entity_id] = event
|
||||
|
||||
elif event.event_type == EVENT_HOMEASSISTANT_STOP:
|
||||
|
@ -223,12 +225,12 @@ def humanify(events):
|
|||
domain = to_state.domain
|
||||
|
||||
# Skip all but the last sensor state
|
||||
if domain == 'sensor' and \
|
||||
if domain in CONTINUOUS_DOMAINS and \
|
||||
event != last_sensor_event[to_state.entity_id]:
|
||||
continue
|
||||
|
||||
# Don't show continuous sensor value changes in the logbook
|
||||
if domain == 'sensor' and \
|
||||
if domain in CONTINUOUS_DOMAINS and \
|
||||
to_state.attributes.get('unit_of_measurement'):
|
||||
continue
|
||||
|
||||
|
|
|
@ -14,9 +14,7 @@ from homeassistant.const import CONF_HOST
|
|||
from homeassistant.helpers import discovery
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
REQUIREMENTS = ['https://github.com/gurumitts/'
|
||||
'pylutron-caseta/archive/v0.2.6.zip#'
|
||||
'pylutron-caseta==v0.2.6']
|
||||
REQUIREMENTS = ['pylutron-caseta==0.2.6']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -46,7 +44,7 @@ def setup(hass, base_config):
|
|||
|
||||
_LOGGER.info("Connected to Lutron smartbridge at %s", config[CONF_HOST])
|
||||
|
||||
for component in ('light', 'switch'):
|
||||
for component in ('light', 'switch', 'cover'):
|
||||
discovery.load_platform(hass, component, DOMAIN, {}, config)
|
||||
|
||||
return True
|
||||
|
|
|
@ -19,7 +19,7 @@ from homeassistant.const import (
|
|||
CONF_NAME, STATE_ON)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['denonavr==0.4.0']
|
||||
REQUIREMENTS = ['denonavr==0.4.1']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -9,20 +9,22 @@ from functools import wraps
|
|||
import logging
|
||||
import urllib
|
||||
import re
|
||||
import os
|
||||
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config import load_yaml_config_file
|
||||
from homeassistant.components.media_player import (
|
||||
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK,
|
||||
SUPPORT_PLAY_MEDIA, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_STOP,
|
||||
SUPPORT_TURN_OFF, SUPPORT_PLAY, SUPPORT_VOLUME_STEP, MediaPlayerDevice,
|
||||
PLATFORM_SCHEMA, MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO,
|
||||
MEDIA_TYPE_PLAYLIST, MEDIA_PLAYER_SCHEMA, DOMAIN)
|
||||
SUPPORT_TURN_OFF, SUPPORT_PLAY, SUPPORT_VOLUME_STEP, SUPPORT_SHUFFLE_SET,
|
||||
MediaPlayerDevice, PLATFORM_SCHEMA, MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW,
|
||||
MEDIA_TYPE_VIDEO, MEDIA_TYPE_PLAYLIST, MEDIA_PLAYER_SCHEMA, DOMAIN)
|
||||
from homeassistant.const import (
|
||||
STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING, CONF_HOST, CONF_NAME,
|
||||
CONF_PORT, CONF_SSL, CONF_PROXY_SSL, CONF_USERNAME, CONF_PASSWORD,
|
||||
EVENT_HOMEASSISTANT_STOP)
|
||||
CONF_TIMEOUT, EVENT_HOMEASSISTANT_STOP)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
@ -32,6 +34,8 @@ REQUIREMENTS = ['jsonrpc-async==0.6', 'jsonrpc-websocket==0.5']
|
|||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
EVENT_KODI_CALL_METHOD_RESULT = 'kodi_call_method_result'
|
||||
|
||||
CONF_TCP_PORT = 'tcp_port'
|
||||
CONF_TURN_OFF_ACTION = 'turn_off_action'
|
||||
CONF_ENABLE_WEBSOCKET = 'enable_websocket'
|
||||
|
@ -61,8 +65,9 @@ MEDIA_TYPES = {
|
|||
}
|
||||
|
||||
SUPPORT_KODI = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
|
||||
SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_SEEK | \
|
||||
SUPPORT_PLAY_MEDIA | SUPPORT_STOP | SUPPORT_PLAY | SUPPORT_VOLUME_STEP
|
||||
SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_SEEK | \
|
||||
SUPPORT_PLAY_MEDIA | SUPPORT_STOP | SUPPORT_SHUFFLE_SET | \
|
||||
SUPPORT_PLAY | SUPPORT_VOLUME_STEP
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
|
@ -71,6 +76,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||
vol.Optional(CONF_TCP_PORT, default=DEFAULT_TCP_PORT): cv.port,
|
||||
vol.Optional(CONF_PROXY_SSL, default=DEFAULT_PROXY_SSL): cv.boolean,
|
||||
vol.Optional(CONF_TURN_OFF_ACTION, default=None): vol.In(TURN_OFF_ACTION),
|
||||
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
|
||||
vol.Inclusive(CONF_USERNAME, 'auth'): cv.string,
|
||||
vol.Inclusive(CONF_PASSWORD, 'auth'): cv.string,
|
||||
vol.Optional(CONF_ENABLE_WEBSOCKET, default=DEFAULT_ENABLE_WEBSOCKET):
|
||||
|
@ -78,16 +84,15 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||
})
|
||||
|
||||
SERVICE_ADD_MEDIA = 'kodi_add_to_playlist'
|
||||
SERVICE_SET_SHUFFLE = 'kodi_set_shuffle'
|
||||
SERVICE_CALL_METHOD = 'kodi_call_method'
|
||||
|
||||
DATA_KODI = 'kodi'
|
||||
|
||||
ATTR_MEDIA_TYPE = 'media_type'
|
||||
ATTR_MEDIA_NAME = 'media_name'
|
||||
ATTR_MEDIA_ARTIST_NAME = 'artist_name'
|
||||
ATTR_MEDIA_ID = 'media_id'
|
||||
|
||||
MEDIA_PLAYER_SET_SHUFFLE_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
|
||||
vol.Required('shuffle_on'): cv.boolean,
|
||||
})
|
||||
ATTR_METHOD = 'method'
|
||||
|
||||
MEDIA_PLAYER_ADD_MEDIA_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
|
||||
vol.Required(ATTR_MEDIA_TYPE): cv.string,
|
||||
|
@ -95,20 +100,25 @@ MEDIA_PLAYER_ADD_MEDIA_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
|
|||
vol.Optional(ATTR_MEDIA_NAME): cv.string,
|
||||
vol.Optional(ATTR_MEDIA_ARTIST_NAME): cv.string,
|
||||
})
|
||||
MEDIA_PLAYER_CALL_METHOD_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
|
||||
vol.Required(ATTR_METHOD): cv.string,
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
SERVICE_TO_METHOD = {
|
||||
SERVICE_ADD_MEDIA: {
|
||||
'method': 'async_add_media_to_playlist',
|
||||
'schema': MEDIA_PLAYER_ADD_MEDIA_SCHEMA},
|
||||
SERVICE_SET_SHUFFLE: {
|
||||
'method': 'async_set_shuffle',
|
||||
'schema': MEDIA_PLAYER_SET_SHUFFLE_SCHEMA},
|
||||
SERVICE_CALL_METHOD: {
|
||||
'method': 'async_call_method',
|
||||
'schema': MEDIA_PLAYER_CALL_METHOD_SCHEMA},
|
||||
}
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up the Kodi platform."""
|
||||
if DATA_KODI not in hass.data:
|
||||
hass.data[DATA_KODI] = []
|
||||
host = config.get(CONF_HOST)
|
||||
port = config.get(CONF_PORT)
|
||||
tcp_port = config.get(CONF_TCP_PORT)
|
||||
|
@ -128,8 +138,10 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
|||
host=host, port=port, tcp_port=tcp_port, encryption=encryption,
|
||||
username=config.get(CONF_USERNAME),
|
||||
password=config.get(CONF_PASSWORD),
|
||||
turn_off_action=config.get(CONF_TURN_OFF_ACTION), websocket=websocket)
|
||||
turn_off_action=config.get(CONF_TURN_OFF_ACTION),
|
||||
timeout=config.get(CONF_TIMEOUT), websocket=websocket)
|
||||
|
||||
hass.data[DATA_KODI].append(entity)
|
||||
async_add_devices([entity], update_before_add=True)
|
||||
|
||||
@asyncio.coroutine
|
||||
|
@ -141,23 +153,37 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
|||
|
||||
params = {key: value for key, value in service.data.items()
|
||||
if key != 'entity_id'}
|
||||
|
||||
yield from getattr(entity, method['method'])(**params)
|
||||
entity_ids = service.data.get('entity_id')
|
||||
if entity_ids:
|
||||
target_players = [player for player in hass.data[DATA_KODI]
|
||||
if player.entity_id in entity_ids]
|
||||
else:
|
||||
target_players = hass.data[DATA_KODI]
|
||||
|
||||
update_tasks = []
|
||||
if entity.should_poll:
|
||||
update_coro = entity.async_update_ha_state(True)
|
||||
update_tasks.append(update_coro)
|
||||
for player in target_players:
|
||||
yield from getattr(player, method['method'])(**params)
|
||||
|
||||
for player in target_players:
|
||||
if player.should_poll:
|
||||
update_coro = player.async_update_ha_state(True)
|
||||
update_tasks.append(update_coro)
|
||||
|
||||
if update_tasks:
|
||||
yield from asyncio.wait(update_tasks, loop=hass.loop)
|
||||
|
||||
if hass.services.has_service(DOMAIN, SERVICE_ADD_MEDIA):
|
||||
return
|
||||
|
||||
descriptions = yield from hass.loop.run_in_executor(
|
||||
None, load_yaml_config_file, os.path.join(
|
||||
os.path.dirname(__file__), 'services.yaml'))
|
||||
|
||||
for service in SERVICE_TO_METHOD:
|
||||
schema = SERVICE_TO_METHOD[service].get(
|
||||
'schema', MEDIA_PLAYER_SCHEMA)
|
||||
schema = SERVICE_TO_METHOD[service]['schema']
|
||||
hass.services.async_register(
|
||||
DOMAIN, service, async_service_handler,
|
||||
description=None, schema=schema)
|
||||
description=descriptions.get(service), schema=schema)
|
||||
|
||||
|
||||
def cmd(func):
|
||||
|
@ -185,7 +211,7 @@ class KodiDevice(MediaPlayerDevice):
|
|||
|
||||
def __init__(self, hass, name, host, port, tcp_port, encryption=False,
|
||||
username=None, password=None, turn_off_action=None,
|
||||
websocket=True):
|
||||
timeout=DEFAULT_TIMEOUT, websocket=True):
|
||||
"""Initialize the Kodi device."""
|
||||
import jsonrpc_async
|
||||
import jsonrpc_websocket
|
||||
|
@ -193,7 +219,7 @@ class KodiDevice(MediaPlayerDevice):
|
|||
self._name = name
|
||||
|
||||
kwargs = {
|
||||
'timeout': DEFAULT_TIMEOUT,
|
||||
'timeout': timeout,
|
||||
'session': async_get_clientsession(hass),
|
||||
}
|
||||
|
||||
|
@ -657,16 +683,40 @@ class KodiDevice(MediaPlayerDevice):
|
|||
{"item": {"file": str(media_id)}})
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_set_shuffle(self, shuffle_on):
|
||||
def async_set_shuffle(self, shuffle):
|
||||
"""Set shuffle mode, for the first player."""
|
||||
if len(self._players) < 1:
|
||||
raise RuntimeError("Error: No active player.")
|
||||
yield from self.server.Player.SetShuffle(
|
||||
{"playerid": self._players[0]['playerid'], "shuffle": shuffle_on})
|
||||
{"playerid": self._players[0]['playerid'], "shuffle": shuffle})
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_call_method(self, method, **kwargs):
|
||||
"""Run Kodi JSONRPC API method with params."""
|
||||
import jsonrpc_base
|
||||
_LOGGER.debug('Run API method "%s", kwargs=%s', method, kwargs)
|
||||
result_ok = False
|
||||
try:
|
||||
result = yield from getattr(self.server, method)(**kwargs)
|
||||
result_ok = True
|
||||
except jsonrpc_base.jsonrpc.ProtocolError as exc:
|
||||
result = exc.args[2]['error']
|
||||
_LOGGER.error('Run API method %s.%s(%s) error: %s',
|
||||
self.entity_id, method, kwargs, result)
|
||||
|
||||
if isinstance(result, dict):
|
||||
event_data = {'entity_id': self.entity_id,
|
||||
'result': result,
|
||||
'result_ok': result_ok,
|
||||
'input': {'method': method, 'params': kwargs}}
|
||||
_LOGGER.debug('EVENT kodi_call_method_result: %s', event_data)
|
||||
self.hass.bus.async_fire(EVENT_KODI_CALL_METHOD_RESULT,
|
||||
event_data=event_data)
|
||||
return result
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_add_media_to_playlist(
|
||||
self, media_type, media_id=None, media_name='', artist_name=''):
|
||||
self, media_type, media_id=None, media_name='ALL', artist_name=''):
|
||||
"""Add a media to default playlist (i.e. playlistid=0).
|
||||
|
||||
First the media type must be selected, then
|
||||
|
@ -675,13 +725,14 @@ class KodiDevice(MediaPlayerDevice):
|
|||
All the albums of an artist can be added with
|
||||
media_name="ALL"
|
||||
"""
|
||||
import jsonrpc_base
|
||||
params = {"playlistid": 0}
|
||||
if media_type == "SONG":
|
||||
if media_id is None:
|
||||
media_id = yield from self.async_find_song(
|
||||
media_name, artist_name)
|
||||
|
||||
yield from self.server.Playlist.Add(
|
||||
{"playlistid": 0, "item": {"songid": int(media_id)}})
|
||||
if media_id:
|
||||
params["item"] = {"songid": int(media_id)}
|
||||
|
||||
elif media_type == "ALBUM":
|
||||
if media_id is None:
|
||||
|
@ -691,12 +742,22 @@ class KodiDevice(MediaPlayerDevice):
|
|||
|
||||
media_id = yield from self.async_find_album(
|
||||
media_name, artist_name)
|
||||
if media_id:
|
||||
params["item"] = {"albumid": int(media_id)}
|
||||
|
||||
yield from self.server.Playlist.Add(
|
||||
{"playlistid": 0, "item": {"albumid": int(media_id)}})
|
||||
else:
|
||||
raise RuntimeError("Unrecognized media type.")
|
||||
|
||||
if media_id is not None:
|
||||
try:
|
||||
yield from self.server.Playlist.Add(params)
|
||||
except jsonrpc_base.jsonrpc.ProtocolError as exc:
|
||||
result = exc.args[2]['error']
|
||||
_LOGGER.error('Run API method %s.Playlist.Add(%s) error: %s',
|
||||
self.entity_id, media_type, result)
|
||||
else:
|
||||
_LOGGER.warning('No media detected for Playlist.Add')
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_add_all_albums(self, artist_name):
|
||||
"""Add all albums of an artist to default playlist (i.e. playlistid=0).
|
||||
|
@ -734,9 +795,13 @@ class KodiDevice(MediaPlayerDevice):
|
|||
def async_find_artist(self, artist_name):
|
||||
"""Find artist by name."""
|
||||
artists = yield from self.async_get_artists()
|
||||
out = self._find(
|
||||
artist_name, [a['artist'] for a in artists['artists']])
|
||||
return artists['artists'][out[0][0]]['artistid']
|
||||
try:
|
||||
out = self._find(
|
||||
artist_name, [a['artist'] for a in artists['artists']])
|
||||
return artists['artists'][out[0][0]]['artistid']
|
||||
except KeyError:
|
||||
_LOGGER.warning('No artists were found: %s', artist_name)
|
||||
return None
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_get_songs(self, artist_id=None):
|
||||
|
@ -769,8 +834,14 @@ class KodiDevice(MediaPlayerDevice):
|
|||
artist_id = yield from self.async_find_artist(artist_name)
|
||||
|
||||
albums = yield from self.async_get_albums(artist_id)
|
||||
out = self._find(album_name, [a['label'] for a in albums['albums']])
|
||||
return albums['albums'][out[0][0]]['albumid']
|
||||
try:
|
||||
out = self._find(
|
||||
album_name, [a['label'] for a in albums['albums']])
|
||||
return albums['albums'][out[0][0]]['albumid']
|
||||
except KeyError:
|
||||
_LOGGER.warning('No albums were found with artist: %s, album: %s',
|
||||
artist_name, album_name)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _find(key_word, words):
|
||||
|
|
|
@ -14,9 +14,7 @@ from homeassistant.components.media_player import (
|
|||
from homeassistant.const import (STATE_OFF, STATE_ON, CONF_HOST, CONF_NAME)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['https://github.com/miracle2k/onkyo-eiscp/archive/'
|
||||
'066023aec04770518d494c32fb72eea0ec5c1b7c.zip#'
|
||||
'onkyo-eiscp==1.0']
|
||||
REQUIREMENTS = ['onkyo-eiscp==1.1']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -17,9 +17,7 @@ from homeassistant.const import (
|
|||
import homeassistant.helpers.config_validation as cv
|
||||
import homeassistant.loader as loader
|
||||
|
||||
REQUIREMENTS = [
|
||||
'https://github.com/bah2830/python-roku/archive/3.1.3.zip'
|
||||
'#roku==3.1.3']
|
||||
REQUIREMENTS = ['python-roku==3.1.3']
|
||||
|
||||
KNOWN_HOSTS = []
|
||||
DEFAULT_PORT = 8060
|
||||
|
|
|
@ -15,9 +15,7 @@ from homeassistant.const import (
|
|||
CONF_HOST, CONF_PORT, STATE_OFF, STATE_ON, CONF_NAME)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = [
|
||||
'https://github.com/laf/russound/archive/0.1.7.zip'
|
||||
'#russound==0.1.7']
|
||||
REQUIREMENTS = ['russound==0.1.7']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -269,3 +269,34 @@ soundtouch_remove_zone_slave:
|
|||
slaves:
|
||||
description: Name of slaves entities to remove from the existing zone
|
||||
example: 'media_player.soundtouch_bedroom'
|
||||
|
||||
kodi_add_to_playlist:
|
||||
description: Add music to the default playlist (i.e. playlistid=0).
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of the Kodi entities where to add the media.
|
||||
example: 'media_player.living_room_kodi'
|
||||
media_type:
|
||||
description: Media type identifier. It must be one of SONG or ALBUM.
|
||||
example: ALBUM
|
||||
media_id:
|
||||
description: Unique Id of the media entry to add (`songid` or albumid`). If not defined, `media_name` and `artist_name` are needed to search the Kodi music library.
|
||||
example: 123456
|
||||
media_name:
|
||||
description: Optional media name for filtering media. Can be 'ALL' when `media_type` is 'ALBUM' and `artist_name` is specified, to add all songs from one artist.
|
||||
example: 'Highway to Hell'
|
||||
artist_name:
|
||||
description: Optional artist name for filtering media.
|
||||
example: 'AC/DC'
|
||||
|
||||
kodi_call_method:
|
||||
description: 'Call a Kodi JSONRPC API method with optional parameters. Results of the Kodi API call will be redirected in a Home Assistant event: `kodi_call_method_result`.'
|
||||
|
||||
fields:
|
||||
entity_id:
|
||||
description: Name(s) of the Kodi entities where to run the API method.
|
||||
example: 'media_player.living_room_kodi'
|
||||
method:
|
||||
description: Name of the Kodi JSONRPC API method to be called.
|
||||
example: 'VideoLibrary.GetRecentlyAddedEpisodes'
|
||||
|
|
|
@ -51,6 +51,7 @@ SERVICE_SNAPSHOT = 'sonos_snapshot'
|
|||
SERVICE_RESTORE = 'sonos_restore'
|
||||
SERVICE_SET_TIMER = 'sonos_set_sleep_timer'
|
||||
SERVICE_CLEAR_TIMER = 'sonos_clear_sleep_timer'
|
||||
SERVICE_UPDATE_ALARM = 'sonos_update_alarm'
|
||||
|
||||
DATA_SONOS = 'sonos'
|
||||
|
||||
|
@ -62,6 +63,11 @@ CONF_INTERFACE_ADDR = 'interface_addr'
|
|||
|
||||
# Service call validation schemas
|
||||
ATTR_SLEEP_TIME = 'sleep_time'
|
||||
ATTR_ALARM_ID = 'alarm_id'
|
||||
ATTR_VOLUME = 'volume'
|
||||
ATTR_ENABLED = 'enabled'
|
||||
ATTR_INCLUDE_LINKED_ZONES = 'include_linked_zones'
|
||||
ATTR_TIME = 'time'
|
||||
ATTR_MASTER = 'master'
|
||||
ATTR_WITH_GROUP = 'with_group'
|
||||
|
||||
|
@ -90,6 +96,14 @@ SONOS_SET_TIMER_SCHEMA = SONOS_SCHEMA.extend({
|
|||
vol.All(vol.Coerce(int), vol.Range(min=0, max=86399))
|
||||
})
|
||||
|
||||
SONOS_UPDATE_ALARM_SCHEMA = SONOS_SCHEMA.extend({
|
||||
vol.Required(ATTR_ALARM_ID): cv.positive_int,
|
||||
vol.Optional(ATTR_TIME): cv.time,
|
||||
vol.Optional(ATTR_VOLUME): cv.small_float,
|
||||
vol.Optional(ATTR_ENABLED): cv.boolean,
|
||||
vol.Optional(ATTR_INCLUDE_LINKED_ZONES): cv.boolean,
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Sonos platform."""
|
||||
|
@ -163,9 +177,11 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||
elif service.service == SERVICE_RESTORE:
|
||||
device.restore(service.data[ATTR_WITH_GROUP])
|
||||
elif service.service == SERVICE_SET_TIMER:
|
||||
device.set_timer(service.data[ATTR_SLEEP_TIME])
|
||||
device.set_sleep_timer(service.data[ATTR_SLEEP_TIME])
|
||||
elif service.service == SERVICE_CLEAR_TIMER:
|
||||
device.clear_timer()
|
||||
device.clear_sleep_timer()
|
||||
elif service.service == SERVICE_UPDATE_ALARM:
|
||||
device.update_alarm(**service.data)
|
||||
|
||||
device.schedule_update_ha_state(True)
|
||||
|
||||
|
@ -193,6 +209,11 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
|||
DOMAIN, SERVICE_CLEAR_TIMER, service_handle,
|
||||
descriptions.get(SERVICE_CLEAR_TIMER), schema=SONOS_SCHEMA)
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN, SERVICE_UPDATE_ALARM, service_handle,
|
||||
descriptions.get(SERVICE_UPDATE_ALARM),
|
||||
schema=SONOS_UPDATE_ALARM_SCHEMA)
|
||||
|
||||
|
||||
def _parse_timespan(timespan):
|
||||
"""Parse a time-span into number of seconds."""
|
||||
|
@ -1034,6 +1055,30 @@ class SonosDevice(MediaPlayerDevice):
|
|||
"""Clear the timer on the player."""
|
||||
self._player.set_sleep_timer(None)
|
||||
|
||||
@soco_error
|
||||
@soco_coordinator
|
||||
def update_alarm(self, **data):
|
||||
"""Set the alarm clock on the player."""
|
||||
from soco import alarms
|
||||
a = None
|
||||
for alarm in alarms.get_alarms(self.soco):
|
||||
# pylint: disable=protected-access
|
||||
if alarm._alarm_id == str(data[ATTR_ALARM_ID]):
|
||||
a = alarm
|
||||
if a is None:
|
||||
_LOGGER.warning("did not find alarm with id %s",
|
||||
data[ATTR_ALARM_ID])
|
||||
return
|
||||
if ATTR_TIME in data:
|
||||
a.start_time = data[ATTR_TIME]
|
||||
if ATTR_VOLUME in data:
|
||||
a.volume = int(data[ATTR_VOLUME] * 100)
|
||||
if ATTR_ENABLED in data:
|
||||
a.enabled = data[ATTR_ENABLED]
|
||||
if ATTR_INCLUDE_LINKED_ZONES in data:
|
||||
a.include_linked_zones = data[ATTR_INCLUDE_LINKED_ZONES]
|
||||
a.save()
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return device specific state attributes."""
|
||||
|
|
|
@ -176,6 +176,7 @@ class SpotifyMediaPlayer(MediaPlayerDevice):
|
|||
self._state = STATE_PAUSED
|
||||
if current.get('is_playing'):
|
||||
self._state = STATE_PLAYING
|
||||
self._shuffle = current.get('shuffle_state')
|
||||
device = current.get('device')
|
||||
if device is None:
|
||||
self._state = STATE_IDLE
|
||||
|
@ -184,8 +185,6 @@ class SpotifyMediaPlayer(MediaPlayerDevice):
|
|||
self._volume = device.get('volume_percent') / 100
|
||||
if device.get('name'):
|
||||
self._current_device = device.get('name')
|
||||
if device.get('shuffle_state'):
|
||||
self._shuffle = device.get('shuffle_state')
|
||||
|
||||
def set_volume_level(self, volume):
|
||||
"""Set the volume level."""
|
||||
|
@ -213,7 +212,8 @@ class SpotifyMediaPlayer(MediaPlayerDevice):
|
|||
|
||||
def select_source(self, source):
|
||||
"""Select playback device."""
|
||||
self._player.transfer_playback(self._devices[source])
|
||||
self._player.transfer_playback(self._devices[source],
|
||||
self._state == STATE_PLAYING)
|
||||
|
||||
def play_media(self, media_type, media_id, **kwargs):
|
||||
"""Play media."""
|
||||
|
|
|
@ -28,10 +28,12 @@ _LOGGER = logging.getLogger(__name__)
|
|||
DOMAIN = 'microsoft_face'
|
||||
DEPENDENCIES = ['camera']
|
||||
|
||||
FACE_API_URL = "https://westus.api.cognitive.microsoft.com/face/v1.0/{0}"
|
||||
FACE_API_URL = "api.cognitive.microsoft.com/face/v1.0/{0}"
|
||||
|
||||
DATA_MICROSOFT_FACE = 'microsoft_face'
|
||||
|
||||
CONF_AZURE_REGION = 'azure_region'
|
||||
|
||||
SERVICE_CREATE_GROUP = 'create_group'
|
||||
SERVICE_DELETE_GROUP = 'delete_group'
|
||||
SERVICE_TRAIN_GROUP = 'train_group'
|
||||
|
@ -49,6 +51,7 @@ DEFAULT_TIMEOUT = 10
|
|||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
vol.Optional(CONF_AZURE_REGION, default="westus"): cv.string,
|
||||
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
|
||||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
@ -115,6 +118,7 @@ def async_setup(hass, config):
|
|||
entities = {}
|
||||
face = MicrosoftFace(
|
||||
hass,
|
||||
config[DOMAIN].get(CONF_AZURE_REGION),
|
||||
config[DOMAIN].get(CONF_API_KEY),
|
||||
config[DOMAIN].get(CONF_TIMEOUT),
|
||||
entities
|
||||
|
@ -304,12 +308,13 @@ class MicrosoftFaceGroupEntity(Entity):
|
|||
class MicrosoftFace(object):
|
||||
"""Microsoft Face api for HomeAssistant."""
|
||||
|
||||
def __init__(self, hass, api_key, timeout, entities):
|
||||
def __init__(self, hass, server_loc, api_key, timeout, entities):
|
||||
"""Initialize Microsoft Face api."""
|
||||
self.hass = hass
|
||||
self.websession = async_get_clientsession(hass)
|
||||
self.timeout = timeout
|
||||
self._api_key = api_key
|
||||
self._server_url = "https://{0}.{1}".format(server_loc, FACE_API_URL)
|
||||
self._store = {}
|
||||
self._entities = entities
|
||||
|
||||
|
@ -346,7 +351,7 @@ class MicrosoftFace(object):
|
|||
params=None):
|
||||
"""Make a api call."""
|
||||
headers = {"Ocp-Apim-Subscription-Key": self._api_key}
|
||||
url = FACE_API_URL.format(function)
|
||||
url = self._server_url.format(function)
|
||||
|
||||
payload = None
|
||||
if binary:
|
||||
|
|
|
@ -25,8 +25,7 @@ from homeassistant.components.http import HomeAssistantView
|
|||
from homeassistant.components.frontend import add_manifest_json_key
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
# pyelliptic is dependency of pywebpush and 1.5.8 contains a breaking change
|
||||
REQUIREMENTS = ['pywebpush==0.6.1', 'PyJWT==1.4.2', 'pyelliptic==1.5.7']
|
||||
REQUIREMENTS = ['pywebpush==1.0.0', 'PyJWT==1.4.2']
|
||||
|
||||
DEPENDENCIES = ['frontend']
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ from homeassistant.components.notify import (
|
|||
from homeassistant.const import (CONF_API_KEY, CONF_SENDER, CONF_RECIPIENT)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['sendgrid==4.0.0']
|
||||
REQUIREMENTS = ['sendgrid==4.1.0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -9,9 +9,9 @@ import smtplib
|
|||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.image import MIMEImage
|
||||
import email.utils
|
||||
from email.mime.application import MIMEApplication
|
||||
|
||||
import email.utils
|
||||
import os
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.notify import (
|
||||
|
@ -26,10 +26,12 @@ import homeassistant.util.dt as dt_util
|
|||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_IMAGES = 'images' # optional embedded image file attachments
|
||||
ATTR_HTML = 'html'
|
||||
|
||||
CONF_STARTTLS = 'starttls'
|
||||
CONF_DEBUG = 'debug'
|
||||
CONF_SERVER = 'server'
|
||||
CONF_SENDER_NAME = 'sender_name'
|
||||
|
||||
DEFAULT_HOST = 'localhost'
|
||||
DEFAULT_PORT = 25
|
||||
|
@ -47,6 +49,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
|||
vol.Optional(CONF_STARTTLS, default=DEFAULT_STARTTLS): cv.boolean,
|
||||
vol.Optional(CONF_USERNAME): cv.string,
|
||||
vol.Optional(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_SENDER_NAME): cv.string,
|
||||
vol.Optional(CONF_DEBUG, default=DEFAULT_DEBUG): cv.boolean,
|
||||
})
|
||||
|
||||
|
@ -62,6 +65,7 @@ def get_service(hass, config, discovery_info=None):
|
|||
config.get(CONF_USERNAME),
|
||||
config.get(CONF_PASSWORD),
|
||||
config.get(CONF_RECIPIENT),
|
||||
config.get(CONF_SENDER_NAME),
|
||||
config.get(CONF_DEBUG))
|
||||
|
||||
if mail_service.connection_is_valid():
|
||||
|
@ -74,7 +78,7 @@ class MailNotificationService(BaseNotificationService):
|
|||
"""Implement the notification service for E-Mail messages."""
|
||||
|
||||
def __init__(self, server, port, timeout, sender, starttls, username,
|
||||
password, recipients, debug):
|
||||
password, recipients, sender_name, debug):
|
||||
"""Initialize the service."""
|
||||
self._server = server
|
||||
self._port = port
|
||||
|
@ -84,6 +88,8 @@ class MailNotificationService(BaseNotificationService):
|
|||
self.username = username
|
||||
self.password = password
|
||||
self.recipients = recipients
|
||||
self._sender_name = sender_name
|
||||
self._timeout = timeout
|
||||
self.debug = debug
|
||||
self.tries = 2
|
||||
|
||||
|
@ -128,19 +134,28 @@ class MailNotificationService(BaseNotificationService):
|
|||
Build and send a message to a user.
|
||||
|
||||
Will send plain text normally, or will build a multipart HTML message
|
||||
with inline image attachments if images config is defined.
|
||||
with inline image attachments if images config is defined, or will
|
||||
build a multipart HTML if html config is defined.
|
||||
"""
|
||||
subject = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
|
||||
data = kwargs.get(ATTR_DATA)
|
||||
|
||||
if data:
|
||||
msg = _build_multipart_msg(message, images=data.get(ATTR_IMAGES))
|
||||
if ATTR_HTML in data:
|
||||
msg = _build_html_msg(message, data[ATTR_HTML],
|
||||
images=data.get(ATTR_IMAGES))
|
||||
else:
|
||||
msg = _build_multipart_msg(message,
|
||||
images=data.get(ATTR_IMAGES))
|
||||
else:
|
||||
msg = _build_text_msg(message)
|
||||
|
||||
msg['Subject'] = subject
|
||||
msg['To'] = ','.join(self.recipients)
|
||||
msg['From'] = self._sender
|
||||
if self._sender_name:
|
||||
msg['From'] = '{} <{}>'.format(self._sender_name, self._sender)
|
||||
else:
|
||||
msg['From'] = self._sender
|
||||
msg['X-Mailer'] = 'HomeAssistant'
|
||||
msg['Date'] = email.utils.format_datetime(dt_util.now())
|
||||
msg['Message-Id'] = email.utils.make_msgid()
|
||||
|
@ -155,12 +170,16 @@ class MailNotificationService(BaseNotificationService):
|
|||
mail.sendmail(self._sender, self.recipients,
|
||||
msg.as_string())
|
||||
break
|
||||
except smtplib.SMTPServerDisconnected:
|
||||
_LOGGER.warning(
|
||||
"SMTPServerDisconnected sending mail: retrying connection")
|
||||
mail.quit()
|
||||
mail = self.connect()
|
||||
except smtplib.SMTPException:
|
||||
_LOGGER.warning(
|
||||
"SMTPException sending mail: retrying connection")
|
||||
mail.quit()
|
||||
mail = self.connect()
|
||||
|
||||
mail.quit()
|
||||
|
||||
|
||||
|
@ -204,3 +223,25 @@ def _build_multipart_msg(message, images):
|
|||
body_html = MIMEText(''.join(body_text), 'html')
|
||||
msg_alt.attach(body_html)
|
||||
return msg
|
||||
|
||||
|
||||
def _build_html_msg(text, html, images):
|
||||
"""Build Multipart message with in-line images and rich html (UTF-8)."""
|
||||
_LOGGER.debug("Building html rich email")
|
||||
msg = MIMEMultipart('related')
|
||||
alternative = MIMEMultipart('alternative')
|
||||
alternative.attach(MIMEText(text, _charset='utf-8'))
|
||||
alternative.attach(MIMEText(html, ATTR_HTML, _charset='utf-8'))
|
||||
msg.attach(alternative)
|
||||
|
||||
for atch_num, atch_name in enumerate(images):
|
||||
name = os.path.basename(atch_name)
|
||||
try:
|
||||
with open(atch_name, 'rb') as attachment_file:
|
||||
attachment = MIMEImage(attachment_file.read(), filename=name)
|
||||
msg.attach(attachment)
|
||||
attachment.add_header('Content-ID', '<{}>'.format(name))
|
||||
except FileNotFoundError:
|
||||
_LOGGER.warning('Attachment %s [#%s] not found. Skipping',
|
||||
atch_name, atch_num)
|
||||
return msg
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue