No longer allow invalid slugs or extra keys (#24176)
* No longer allow slugs * Lint * Remove HASchema * Lint * Fix tests
This commit is contained in:
parent
b3d8f8620c
commit
59ce31f44f
6 changed files with 8 additions and 275 deletions
|
@ -17,7 +17,6 @@ from homeassistant.util.logging import AsyncHandler
|
||||||
from homeassistant.util.package import async_get_user_site, is_virtual_env
|
from homeassistant.util.package import async_get_user_site, is_virtual_env
|
||||||
from homeassistant.util.yaml import clear_secret_cache
|
from homeassistant.util.yaml import clear_secret_cache
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import config_validation as cv
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -101,51 +100,6 @@ async def async_from_config_dict(config: Dict[str, Any],
|
||||||
"upgrade Python.", "Python version", "python_version"
|
"upgrade Python.", "Python version", "python_version"
|
||||||
)
|
)
|
||||||
|
|
||||||
# TEMP: warn users for invalid slugs
|
|
||||||
# Remove after 0.94 or 1.0
|
|
||||||
if cv.INVALID_SLUGS_FOUND or cv.INVALID_ENTITY_IDS_FOUND:
|
|
||||||
msg = []
|
|
||||||
|
|
||||||
if cv.INVALID_ENTITY_IDS_FOUND:
|
|
||||||
msg.append(
|
|
||||||
"Your configuration contains invalid entity ID references. "
|
|
||||||
"Please find and update the following. "
|
|
||||||
"This will become a breaking change."
|
|
||||||
)
|
|
||||||
msg.append('\n'.join('- {} -> {}'.format(*item)
|
|
||||||
for item
|
|
||||||
in cv.INVALID_ENTITY_IDS_FOUND.items()))
|
|
||||||
|
|
||||||
if cv.INVALID_SLUGS_FOUND:
|
|
||||||
msg.append(
|
|
||||||
"Your configuration contains invalid slugs. "
|
|
||||||
"Please find and update the following. "
|
|
||||||
"This will become a breaking change."
|
|
||||||
)
|
|
||||||
msg.append('\n'.join('- {} -> {}'.format(*item)
|
|
||||||
for item in cv.INVALID_SLUGS_FOUND.items()))
|
|
||||||
|
|
||||||
hass.components.persistent_notification.async_create(
|
|
||||||
'\n\n'.join(msg), "Config Warning", "config_warning"
|
|
||||||
)
|
|
||||||
|
|
||||||
# TEMP: warn users of invalid extra keys
|
|
||||||
# Remove after 0.92
|
|
||||||
if cv.INVALID_EXTRA_KEYS_FOUND:
|
|
||||||
msg = []
|
|
||||||
msg.append(
|
|
||||||
"Your configuration contains extra keys "
|
|
||||||
"that the platform does not support (but were silently "
|
|
||||||
"accepted before 0.88). Please find and remove the following."
|
|
||||||
"This will become a breaking change."
|
|
||||||
)
|
|
||||||
msg.append('\n'.join('- {}'.format(it)
|
|
||||||
for it in cv.INVALID_EXTRA_KEYS_FOUND))
|
|
||||||
|
|
||||||
hass.components.persistent_notification.async_create(
|
|
||||||
'\n\n'.join(msg), "Config Warning", "config_warning"
|
|
||||||
)
|
|
||||||
|
|
||||||
return hass
|
return hass
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
"""Helpers for config validation using voluptuous."""
|
"""Helpers for config validation using voluptuous."""
|
||||||
import inspect
|
import inspect
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
@ -17,7 +16,7 @@ from pkg_resources import parse_version
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_ABOVE, CONF_ALIAS, CONF_BELOW, CONF_CONDITION, CONF_ENTITY_ID,
|
CONF_ABOVE, CONF_ALIAS, CONF_BELOW, CONF_CONDITION, CONF_ENTITY_ID,
|
||||||
CONF_ENTITY_NAMESPACE, CONF_NAME, CONF_PLATFORM, CONF_SCAN_INTERVAL,
|
CONF_ENTITY_NAMESPACE, CONF_PLATFORM, CONF_SCAN_INTERVAL,
|
||||||
CONF_UNIT_SYSTEM_IMPERIAL, CONF_UNIT_SYSTEM_METRIC, CONF_VALUE_TEMPLATE,
|
CONF_UNIT_SYSTEM_IMPERIAL, CONF_UNIT_SYSTEM_METRIC, CONF_VALUE_TEMPLATE,
|
||||||
CONF_TIMEOUT, ENTITY_MATCH_ALL, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET,
|
CONF_TIMEOUT, ENTITY_MATCH_ALL, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET,
|
||||||
TEMP_CELSIUS, TEMP_FAHRENHEIT, WEEKDAYS, __version__)
|
TEMP_CELSIUS, TEMP_FAHRENHEIT, WEEKDAYS, __version__)
|
||||||
|
@ -29,13 +28,6 @@ from homeassistant.util import slugify as util_slugify
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
|
|
||||||
TIME_PERIOD_ERROR = "offset {} should be format 'HH:MM' or 'HH:MM:SS'"
|
TIME_PERIOD_ERROR = "offset {} should be format 'HH:MM' or 'HH:MM:SS'"
|
||||||
OLD_SLUG_VALIDATION = r'^[a-z0-9_]+$'
|
|
||||||
OLD_ENTITY_ID_VALIDATION = r"^(\w+)\.(\w+)$"
|
|
||||||
# Keep track of invalid slugs and entity ids found so we can create a
|
|
||||||
# persistent notification. Rare temporary exception to use a global.
|
|
||||||
INVALID_SLUGS_FOUND = {}
|
|
||||||
INVALID_ENTITY_IDS_FOUND = {}
|
|
||||||
INVALID_EXTRA_KEYS_FOUND = []
|
|
||||||
|
|
||||||
|
|
||||||
# Home Assistant types
|
# Home Assistant types
|
||||||
|
@ -176,17 +168,6 @@ def entity_id(value: Any) -> str:
|
||||||
value = string(value).lower()
|
value = string(value).lower()
|
||||||
if valid_entity_id(value):
|
if valid_entity_id(value):
|
||||||
return value
|
return value
|
||||||
if re.match(OLD_ENTITY_ID_VALIDATION, value):
|
|
||||||
# To ease the breaking change, we allow old slugs for now
|
|
||||||
# Remove after 0.94 or 1.0
|
|
||||||
fixed = '.'.join(util_slugify(part) for part in value.split('.', 1))
|
|
||||||
INVALID_ENTITY_IDS_FOUND[value] = fixed
|
|
||||||
logging.getLogger(__name__).warning(
|
|
||||||
"Found invalid entity_id %s, please update with %s. This "
|
|
||||||
"will become a breaking change.",
|
|
||||||
value, fixed
|
|
||||||
)
|
|
||||||
return value
|
|
||||||
|
|
||||||
raise vol.Invalid('Entity ID {} is an invalid entity id'.format(value))
|
raise vol.Invalid('Entity ID {} is an invalid entity id'.format(value))
|
||||||
|
|
||||||
|
@ -377,21 +358,7 @@ def schema_with_slug_keys(value_schema: Union[T, Callable]) -> Callable:
|
||||||
raise vol.Invalid('expected dictionary')
|
raise vol.Invalid('expected dictionary')
|
||||||
|
|
||||||
for key in value.keys():
|
for key in value.keys():
|
||||||
try:
|
|
||||||
slug(key)
|
slug(key)
|
||||||
except vol.Invalid:
|
|
||||||
# To ease the breaking change, we allow old slugs for now
|
|
||||||
# Remove after 0.94 or 1.0
|
|
||||||
if re.match(OLD_SLUG_VALIDATION, key):
|
|
||||||
fixed = util_slugify(key)
|
|
||||||
INVALID_SLUGS_FOUND[key] = fixed
|
|
||||||
logging.getLogger(__name__).warning(
|
|
||||||
"Found invalid slug %s, please update with %s. This "
|
|
||||||
"will be come a breaking change.",
|
|
||||||
key, fixed
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
return schema(value)
|
return schema(value)
|
||||||
return verify
|
return verify
|
||||||
|
@ -656,88 +623,7 @@ def key_dependency(key, dependency):
|
||||||
|
|
||||||
|
|
||||||
# Schemas
|
# Schemas
|
||||||
class HASchema(vol.Schema):
|
PLATFORM_SCHEMA = vol.Schema({
|
||||||
"""Schema class that allows us to mark PREVENT_EXTRA errors as warnings."""
|
|
||||||
|
|
||||||
def __call__(self, data):
|
|
||||||
"""Override __call__ to mark PREVENT_EXTRA as warning."""
|
|
||||||
try:
|
|
||||||
return super().__call__(data)
|
|
||||||
except vol.Invalid as orig_err:
|
|
||||||
if self.extra != vol.PREVENT_EXTRA:
|
|
||||||
raise
|
|
||||||
|
|
||||||
# orig_error is of type vol.MultipleInvalid (see super __call__)
|
|
||||||
assert isinstance(orig_err, vol.MultipleInvalid)
|
|
||||||
# pylint: disable=no-member
|
|
||||||
# If it fails with PREVENT_EXTRA, try with ALLOW_EXTRA
|
|
||||||
self.extra = vol.ALLOW_EXTRA
|
|
||||||
# In case it still fails the following will raise
|
|
||||||
try:
|
|
||||||
validated = super().__call__(data)
|
|
||||||
finally:
|
|
||||||
self.extra = vol.PREVENT_EXTRA
|
|
||||||
|
|
||||||
# This is a legacy config, print warning
|
|
||||||
extra_key_errs = [err.path[-1] for err in orig_err.errors
|
|
||||||
if err.error_message == 'extra keys not allowed']
|
|
||||||
|
|
||||||
if not extra_key_errs:
|
|
||||||
# This should not happen (all errors should be extra key
|
|
||||||
# errors). Let's raise the original error anyway.
|
|
||||||
raise orig_err
|
|
||||||
|
|
||||||
WHITELIST = [
|
|
||||||
re.compile(CONF_NAME),
|
|
||||||
re.compile(CONF_PLATFORM),
|
|
||||||
re.compile('.*_topic'),
|
|
||||||
]
|
|
||||||
|
|
||||||
msg = "Your configuration contains extra keys " \
|
|
||||||
"that the platform does not support.\n" \
|
|
||||||
"Please remove "
|
|
||||||
submsg = ', '.join('[{}]'.format(err) for err in
|
|
||||||
extra_key_errs)
|
|
||||||
submsg += '. '
|
|
||||||
|
|
||||||
# Add file+line information, if available
|
|
||||||
if hasattr(data, '__config_file__'):
|
|
||||||
submsg += " (See {}, line {}). ".format(
|
|
||||||
data.__config_file__, data.__line__)
|
|
||||||
|
|
||||||
# Add configuration source information, if available
|
|
||||||
if hasattr(data, '__configuration_source__'):
|
|
||||||
submsg += "\nConfiguration source: {}. ".format(
|
|
||||||
data.__configuration_source__)
|
|
||||||
redacted_data = {}
|
|
||||||
|
|
||||||
# Print configuration causing the error, but filter any potentially
|
|
||||||
# sensitive data
|
|
||||||
for k, v in data.items():
|
|
||||||
if (any(regex.match(k) for regex in WHITELIST) or
|
|
||||||
k in extra_key_errs):
|
|
||||||
redacted_data[k] = v
|
|
||||||
else:
|
|
||||||
redacted_data[k] = '<redacted>'
|
|
||||||
submsg += "\nOffending data: {}".format(
|
|
||||||
json.dumps(redacted_data))
|
|
||||||
|
|
||||||
msg += submsg
|
|
||||||
logging.getLogger(__name__).warning(msg)
|
|
||||||
INVALID_EXTRA_KEYS_FOUND.append(submsg)
|
|
||||||
|
|
||||||
# Return legacy validated config
|
|
||||||
return validated
|
|
||||||
|
|
||||||
def extend(self, schema, required=None, extra=None):
|
|
||||||
"""Extend this schema and convert it to HASchema if necessary."""
|
|
||||||
ret = super().extend(schema, required=required, extra=extra)
|
|
||||||
if extra is not None:
|
|
||||||
return ret
|
|
||||||
return HASchema(ret.schema, required=required, extra=self.extra)
|
|
||||||
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = HASchema({
|
|
||||||
vol.Required(CONF_PLATFORM): string,
|
vol.Required(CONF_PLATFORM): string,
|
||||||
vol.Optional(CONF_ENTITY_NAMESPACE): string,
|
vol.Optional(CONF_ENTITY_NAMESPACE): string,
|
||||||
vol.Optional(CONF_SCAN_INTERVAL): time_period
|
vol.Optional(CONF_SCAN_INTERVAL): time_period
|
||||||
|
|
|
@ -612,7 +612,6 @@ async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
|
||||||
vacuum.DOMAIN: {
|
vacuum.DOMAIN: {
|
||||||
'platform': 'mqtt',
|
'platform': 'mqtt',
|
||||||
'name': 'test',
|
'name': 'test',
|
||||||
'state_topic': 'test-topic',
|
|
||||||
'json_attributes_topic': 'attr-topic'
|
'json_attributes_topic': 'attr-topic'
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -629,7 +628,6 @@ async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
|
||||||
vacuum.DOMAIN: {
|
vacuum.DOMAIN: {
|
||||||
'platform': 'mqtt',
|
'platform': 'mqtt',
|
||||||
'name': 'test',
|
'name': 'test',
|
||||||
'state_topic': 'test-topic',
|
|
||||||
'json_attributes_topic': 'attr-topic'
|
'json_attributes_topic': 'attr-topic'
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -647,7 +645,6 @@ async def test_update_with_json_attrs_bad_json(hass, mqtt_mock, caplog):
|
||||||
vacuum.DOMAIN: {
|
vacuum.DOMAIN: {
|
||||||
'platform': 'mqtt',
|
'platform': 'mqtt',
|
||||||
'name': 'test',
|
'name': 'test',
|
||||||
'state_topic': 'test-topic',
|
|
||||||
'json_attributes_topic': 'attr-topic'
|
'json_attributes_topic': 'attr-topic'
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -766,7 +763,6 @@ async def test_entity_device_info_update(hass, mqtt_mock):
|
||||||
config = {
|
config = {
|
||||||
'platform': 'mqtt',
|
'platform': 'mqtt',
|
||||||
'name': 'Test 1',
|
'name': 'Test 1',
|
||||||
'state_topic': 'test-topic',
|
|
||||||
'command_topic': 'test-command-topic',
|
'command_topic': 'test-command-topic',
|
||||||
'device': {
|
'device': {
|
||||||
'identifiers': ['helloworld'],
|
'identifiers': ['helloworld'],
|
||||||
|
|
|
@ -331,8 +331,7 @@ async def test_discovery_removal_vacuum(hass, mqtt_mock):
|
||||||
|
|
||||||
data = (
|
data = (
|
||||||
'{ "name": "Beer",'
|
'{ "name": "Beer",'
|
||||||
' "command_topic": "test_topic",'
|
' "command_topic": "test_topic"}'
|
||||||
' "component": "state" }'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
|
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
|
||||||
|
@ -357,13 +356,11 @@ async def test_discovery_broken(hass, mqtt_mock, caplog):
|
||||||
|
|
||||||
data1 = (
|
data1 = (
|
||||||
'{ "name": "Beer",'
|
'{ "name": "Beer",'
|
||||||
' "command_topic": "test_topic#",'
|
' "command_topic": "test_topic#"}'
|
||||||
' "component": "state" }'
|
|
||||||
)
|
)
|
||||||
data2 = (
|
data2 = (
|
||||||
'{ "name": "Milk",'
|
'{ "name": "Milk",'
|
||||||
' "command_topic": "test_topic",'
|
' "command_topic": "test_topic"}'
|
||||||
' "component": "state" }'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
|
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
|
||||||
|
@ -391,13 +388,11 @@ async def test_discovery_update_vacuum(hass, mqtt_mock):
|
||||||
|
|
||||||
data1 = (
|
data1 = (
|
||||||
'{ "name": "Beer",'
|
'{ "name": "Beer",'
|
||||||
' "command_topic": "test_topic",'
|
' "command_topic": "test_topic"}'
|
||||||
'"component": "state" }'
|
|
||||||
)
|
)
|
||||||
data2 = (
|
data2 = (
|
||||||
'{ "name": "Milk",'
|
'{ "name": "Milk",'
|
||||||
' "command_topic": "test_topic",'
|
' "command_topic": "test_topic"}'
|
||||||
' "component": "state"}'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
|
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
|
||||||
|
@ -425,7 +420,6 @@ async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
|
||||||
vacuum.DOMAIN: {
|
vacuum.DOMAIN: {
|
||||||
'platform': 'mqtt',
|
'platform': 'mqtt',
|
||||||
'name': 'test',
|
'name': 'test',
|
||||||
'state_topic': 'test-topic',
|
|
||||||
'json_attributes_topic': 'attr-topic'
|
'json_attributes_topic': 'attr-topic'
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -442,7 +436,6 @@ async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
|
||||||
vacuum.DOMAIN: {
|
vacuum.DOMAIN: {
|
||||||
'platform': 'mqtt',
|
'platform': 'mqtt',
|
||||||
'name': 'test',
|
'name': 'test',
|
||||||
'state_topic': 'test-topic',
|
|
||||||
'json_attributes_topic': 'attr-topic'
|
'json_attributes_topic': 'attr-topic'
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -460,7 +453,6 @@ async def test_update_with_json_attrs_bad_json(hass, mqtt_mock, caplog):
|
||||||
vacuum.DOMAIN: {
|
vacuum.DOMAIN: {
|
||||||
'platform': 'mqtt',
|
'platform': 'mqtt',
|
||||||
'name': 'test',
|
'name': 'test',
|
||||||
'state_topic': 'test-topic',
|
|
||||||
'json_attributes_topic': 'attr-topic'
|
'json_attributes_topic': 'attr-topic'
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -579,7 +571,6 @@ async def test_entity_device_info_update(hass, mqtt_mock):
|
||||||
config = {
|
config = {
|
||||||
'platform': 'mqtt',
|
'platform': 'mqtt',
|
||||||
'name': 'Test 1',
|
'name': 'Test 1',
|
||||||
'state_topic': 'test-topic',
|
|
||||||
'command_topic': 'test-command-topic',
|
'command_topic': 'test-command-topic',
|
||||||
'device': {
|
'device': {
|
||||||
'identifiers': ['helloworld'],
|
'identifiers': ['helloworld'],
|
||||||
|
|
|
@ -943,34 +943,6 @@ def test_comp_entity_ids():
|
||||||
schema(invalid)
|
schema(invalid)
|
||||||
|
|
||||||
|
|
||||||
def test_schema_with_slug_keys_allows_old_slugs(caplog):
|
|
||||||
"""Test schema with slug keys allowing old slugs."""
|
|
||||||
schema = cv.schema_with_slug_keys(str)
|
|
||||||
|
|
||||||
with patch.dict(cv.INVALID_SLUGS_FOUND, clear=True):
|
|
||||||
for value in ('_world', 'wow__yeah'):
|
|
||||||
caplog.clear()
|
|
||||||
# Will raise if not allowing old slugs
|
|
||||||
schema({value: 'yo'})
|
|
||||||
assert "Found invalid slug {}".format(value) in caplog.text
|
|
||||||
|
|
||||||
assert len(cv.INVALID_SLUGS_FOUND) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_entity_id_allow_old_validation(caplog):
|
|
||||||
"""Test schema allowing old entity_ids."""
|
|
||||||
schema = vol.Schema(cv.entity_id)
|
|
||||||
|
|
||||||
with patch.dict(cv.INVALID_ENTITY_IDS_FOUND, clear=True):
|
|
||||||
for value in ('hello.__world', 'great.wow__yeah'):
|
|
||||||
caplog.clear()
|
|
||||||
# Will raise if not allowing old entity ID
|
|
||||||
schema(value)
|
|
||||||
assert "Found invalid entity_id {}".format(value) in caplog.text
|
|
||||||
|
|
||||||
assert len(cv.INVALID_ENTITY_IDS_FOUND) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_uuid4_hex(caplog):
|
def test_uuid4_hex(caplog):
|
||||||
"""Test uuid validation."""
|
"""Test uuid validation."""
|
||||||
schema = vol.Schema(cv.uuid4_hex)
|
schema = vol.Schema(cv.uuid4_hex)
|
||||||
|
|
|
@ -108,37 +108,6 @@ class TestSetup:
|
||||||
'platform_conf.whatever',
|
'platform_conf.whatever',
|
||||||
MockPlatform(platform_schema=platform_schema))
|
MockPlatform(platform_schema=platform_schema))
|
||||||
|
|
||||||
with assert_setup_component(1):
|
|
||||||
assert setup.setup_component(self.hass, 'platform_conf', {
|
|
||||||
'platform_conf': {
|
|
||||||
'platform': 'whatever',
|
|
||||||
'hello': 'world',
|
|
||||||
'invalid': 'extra',
|
|
||||||
}
|
|
||||||
})
|
|
||||||
assert caplog.text.count('Your configuration contains '
|
|
||||||
'extra keys') == 1
|
|
||||||
|
|
||||||
self.hass.data.pop(setup.DATA_SETUP)
|
|
||||||
self.hass.config.components.remove('platform_conf')
|
|
||||||
|
|
||||||
with assert_setup_component(2):
|
|
||||||
assert setup.setup_component(self.hass, 'platform_conf', {
|
|
||||||
'platform_conf': {
|
|
||||||
'platform': 'whatever',
|
|
||||||
'hello': 'world',
|
|
||||||
},
|
|
||||||
'platform_conf 2': {
|
|
||||||
'platform': 'whatever',
|
|
||||||
'invalid': True
|
|
||||||
}
|
|
||||||
})
|
|
||||||
assert caplog.text.count('Your configuration contains '
|
|
||||||
'extra keys') == 2
|
|
||||||
|
|
||||||
self.hass.data.pop(setup.DATA_SETUP)
|
|
||||||
self.hass.config.components.remove('platform_conf')
|
|
||||||
|
|
||||||
with assert_setup_component(0):
|
with assert_setup_component(0):
|
||||||
assert setup.setup_component(self.hass, 'platform_conf', {
|
assert setup.setup_component(self.hass, 'platform_conf', {
|
||||||
'platform_conf': {
|
'platform_conf': {
|
||||||
|
@ -206,21 +175,6 @@ class TestSetup:
|
||||||
MockPlatform('whatever',
|
MockPlatform('whatever',
|
||||||
platform_schema=platform_schema))
|
platform_schema=platform_schema))
|
||||||
|
|
||||||
with assert_setup_component(1):
|
|
||||||
assert setup.setup_component(self.hass, 'platform_conf', {
|
|
||||||
# fail: no extra keys allowed in platform schema
|
|
||||||
'platform_conf': {
|
|
||||||
'platform': 'whatever',
|
|
||||||
'hello': 'world',
|
|
||||||
'invalid': 'extra',
|
|
||||||
}
|
|
||||||
})
|
|
||||||
assert caplog.text.count('Your configuration contains '
|
|
||||||
'extra keys') == 1
|
|
||||||
|
|
||||||
self.hass.data.pop(setup.DATA_SETUP)
|
|
||||||
self.hass.config.components.remove('platform_conf')
|
|
||||||
|
|
||||||
with assert_setup_component(1):
|
with assert_setup_component(1):
|
||||||
assert setup.setup_component(self.hass, 'platform_conf', {
|
assert setup.setup_component(self.hass, 'platform_conf', {
|
||||||
# pass
|
# pass
|
||||||
|
@ -235,9 +189,6 @@ class TestSetup:
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
self.hass.data.pop(setup.DATA_SETUP)
|
|
||||||
self.hass.config.components.remove('platform_conf')
|
|
||||||
|
|
||||||
def test_validate_platform_config_3(self, caplog):
|
def test_validate_platform_config_3(self, caplog):
|
||||||
"""Test fallback to component PLATFORM_SCHEMA."""
|
"""Test fallback to component PLATFORM_SCHEMA."""
|
||||||
component_schema = PLATFORM_SCHEMA_BASE.extend({
|
component_schema = PLATFORM_SCHEMA_BASE.extend({
|
||||||
|
@ -258,20 +209,6 @@ class TestSetup:
|
||||||
MockPlatform('whatever',
|
MockPlatform('whatever',
|
||||||
platform_schema=platform_schema))
|
platform_schema=platform_schema))
|
||||||
|
|
||||||
with assert_setup_component(1):
|
|
||||||
assert setup.setup_component(self.hass, 'platform_conf', {
|
|
||||||
'platform_conf': {
|
|
||||||
'platform': 'whatever',
|
|
||||||
'hello': 'world',
|
|
||||||
'invalid': 'extra',
|
|
||||||
}
|
|
||||||
})
|
|
||||||
assert caplog.text.count('Your configuration contains '
|
|
||||||
'extra keys') == 1
|
|
||||||
|
|
||||||
self.hass.data.pop(setup.DATA_SETUP)
|
|
||||||
self.hass.config.components.remove('platform_conf')
|
|
||||||
|
|
||||||
with assert_setup_component(1):
|
with assert_setup_component(1):
|
||||||
assert setup.setup_component(self.hass, 'platform_conf', {
|
assert setup.setup_component(self.hass, 'platform_conf', {
|
||||||
# pass
|
# pass
|
||||||
|
@ -286,9 +223,6 @@ class TestSetup:
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
self.hass.data.pop(setup.DATA_SETUP)
|
|
||||||
self.hass.config.components.remove('platform_conf')
|
|
||||||
|
|
||||||
def test_validate_platform_config_4(self):
|
def test_validate_platform_config_4(self):
|
||||||
"""Test entity_namespace in PLATFORM_SCHEMA."""
|
"""Test entity_namespace in PLATFORM_SCHEMA."""
|
||||||
component_schema = PLATFORM_SCHEMA_BASE
|
component_schema = PLATFORM_SCHEMA_BASE
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue