Drop white blacklist pt1 (#37816)
This commit is contained in:
parent
ccbc3b5e39
commit
8ed1a29c82
21 changed files with 73 additions and 50 deletions
|
@ -190,7 +190,7 @@ class TrustedNetworksLoginFlow(LoginFlow):
|
||||||
).async_validate_access(self._ip_address)
|
).async_validate_access(self._ip_address)
|
||||||
|
|
||||||
except InvalidAuthError:
|
except InvalidAuthError:
|
||||||
return self.async_abort(reason="not_whitelisted")
|
return self.async_abort(reason="not_allowed")
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
return await self.async_finish(user_input)
|
return await self.async_finish(user_input)
|
||||||
|
|
|
@ -40,7 +40,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
||||||
if hass.config.is_allowed_path(file_path):
|
if hass.config.is_allowed_path(file_path):
|
||||||
async_add_entities([FileSensor(name, file_path, unit, value_template)], True)
|
async_add_entities([FileSensor(name, file_path, unit, value_template)], True)
|
||||||
else:
|
else:
|
||||||
_LOGGER.error("'%s' is not a whitelisted directory", file_path)
|
_LOGGER.error("'%s' is not an allowed directory", file_path)
|
||||||
|
|
||||||
|
|
||||||
class FileSensor(Entity):
|
class FileSensor(Entity):
|
||||||
|
|
|
@ -172,7 +172,7 @@ class LogiCam(Camera):
|
||||||
filename.hass = self.hass
|
filename.hass = self.hass
|
||||||
stream_file = filename.async_render(variables={ATTR_ENTITY_ID: self.entity_id})
|
stream_file = filename.async_render(variables={ATTR_ENTITY_ID: self.entity_id})
|
||||||
|
|
||||||
# Respect configured path whitelist.
|
# Respect configured allowed paths.
|
||||||
if not self.hass.config.is_allowed_path(stream_file):
|
if not self.hass.config.is_allowed_path(stream_file):
|
||||||
_LOGGER.error("Can't write %s, no access to path!", stream_file)
|
_LOGGER.error("Can't write %s, no access to path!", stream_file)
|
||||||
return
|
return
|
||||||
|
@ -191,7 +191,7 @@ class LogiCam(Camera):
|
||||||
variables={ATTR_ENTITY_ID: self.entity_id}
|
variables={ATTR_ENTITY_ID: self.entity_id}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Respect configured path whitelist.
|
# Respect configured allowed paths.
|
||||||
if not self.hass.config.is_allowed_path(snapshot_file):
|
if not self.hass.config.is_allowed_path(snapshot_file):
|
||||||
_LOGGER.error("Can't write %s, no access to path!", snapshot_file)
|
_LOGGER.error("Can't write %s, no access to path!", snapshot_file)
|
||||||
return
|
return
|
||||||
|
|
|
@ -74,10 +74,10 @@ def setup(hass, config):
|
||||||
|
|
||||||
include = conf[CONF_INCLUDE]
|
include = conf[CONF_INCLUDE]
|
||||||
exclude = conf[CONF_EXCLUDE]
|
exclude = conf[CONF_EXCLUDE]
|
||||||
whitelist_e = set(include[CONF_ENTITIES])
|
include_e = set(include[CONF_ENTITIES])
|
||||||
whitelist_d = set(include[CONF_DOMAINS])
|
include_d = set(include[CONF_DOMAINS])
|
||||||
blacklist_e = set(exclude[CONF_ENTITIES])
|
exclude_e = set(exclude[CONF_ENTITIES])
|
||||||
blacklist_d = set(exclude[CONF_DOMAINS])
|
exclude_d = set(exclude[CONF_DOMAINS])
|
||||||
|
|
||||||
client_args = {
|
client_args = {
|
||||||
"org": conf[CONF_ORG],
|
"org": conf[CONF_ORG],
|
||||||
|
@ -94,13 +94,13 @@ def setup(hass, config):
|
||||||
if (
|
if (
|
||||||
state is None
|
state is None
|
||||||
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
|
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
|
||||||
or state.entity_id in blacklist_e
|
or state.entity_id in exclude_e
|
||||||
or state.domain in blacklist_d
|
or state.domain in exclude_d
|
||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
if (whitelist_e and state.entity_id not in whitelist_e) or (
|
if (include_e and state.entity_id not in include_e) or (
|
||||||
whitelist_d and state.domain not in whitelist_d
|
include_d and state.domain not in include_d
|
||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
@ -301,7 +301,7 @@ async def async_send_message(
|
||||||
_LOGGER.info("Uploading file from path, %s", path)
|
_LOGGER.info("Uploading file from path, %s", path)
|
||||||
|
|
||||||
if not hass.config.is_allowed_path(path):
|
if not hass.config.is_allowed_path(path):
|
||||||
raise PermissionError("Could not access file. Not in whitelist")
|
raise PermissionError("Could not access file. Path not allowed")
|
||||||
|
|
||||||
with open(path, "rb") as upfile:
|
with open(path, "rb") as upfile:
|
||||||
_LOGGER.debug("Reading file %s", path)
|
_LOGGER.debug("Reading file %s", path)
|
||||||
|
|
|
@ -20,6 +20,7 @@ from homeassistant.const import (
|
||||||
ATTR_ASSUMED_STATE,
|
ATTR_ASSUMED_STATE,
|
||||||
ATTR_FRIENDLY_NAME,
|
ATTR_FRIENDLY_NAME,
|
||||||
ATTR_HIDDEN,
|
ATTR_HIDDEN,
|
||||||
|
CONF_ALLOWLIST_EXTERNAL_DIRS,
|
||||||
CONF_ALLOWLIST_EXTERNAL_URLS,
|
CONF_ALLOWLIST_EXTERNAL_URLS,
|
||||||
CONF_AUTH_MFA_MODULES,
|
CONF_AUTH_MFA_MODULES,
|
||||||
CONF_AUTH_PROVIDERS,
|
CONF_AUTH_PROVIDERS,
|
||||||
|
@ -39,7 +40,7 @@ from homeassistant.const import (
|
||||||
CONF_TYPE,
|
CONF_TYPE,
|
||||||
CONF_UNIT_SYSTEM,
|
CONF_UNIT_SYSTEM,
|
||||||
CONF_UNIT_SYSTEM_IMPERIAL,
|
CONF_UNIT_SYSTEM_IMPERIAL,
|
||||||
CONF_WHITELIST_EXTERNAL_DIRS,
|
LEGACY_CONF_WHITELIST_EXTERNAL_DIRS,
|
||||||
TEMP_CELSIUS,
|
TEMP_CELSIUS,
|
||||||
__version__,
|
__version__,
|
||||||
)
|
)
|
||||||
|
@ -183,7 +184,10 @@ CORE_CONFIG_SCHEMA = CUSTOMIZE_CONFIG_SCHEMA.extend(
|
||||||
CONF_TIME_ZONE: cv.time_zone,
|
CONF_TIME_ZONE: cv.time_zone,
|
||||||
vol.Optional(CONF_INTERNAL_URL): cv.url,
|
vol.Optional(CONF_INTERNAL_URL): cv.url,
|
||||||
vol.Optional(CONF_EXTERNAL_URL): cv.url,
|
vol.Optional(CONF_EXTERNAL_URL): cv.url,
|
||||||
vol.Optional(CONF_WHITELIST_EXTERNAL_DIRS): vol.All(
|
vol.Optional(CONF_ALLOWLIST_EXTERNAL_DIRS): vol.All(
|
||||||
|
cv.ensure_list, [vol.IsDir()] # pylint: disable=no-value-for-parameter
|
||||||
|
),
|
||||||
|
vol.Optional(LEGACY_CONF_WHITELIST_EXTERNAL_DIRS): vol.All(
|
||||||
cv.ensure_list, [vol.IsDir()] # pylint: disable=no-value-for-parameter
|
cv.ensure_list, [vol.IsDir()] # pylint: disable=no-value-for-parameter
|
||||||
),
|
),
|
||||||
vol.Optional(CONF_ALLOWLIST_EXTERNAL_URLS): vol.All(cv.ensure_list, [cv.url]),
|
vol.Optional(CONF_ALLOWLIST_EXTERNAL_URLS): vol.All(cv.ensure_list, [cv.url]),
|
||||||
|
@ -500,9 +504,19 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: Dict) -> Non
|
||||||
hac.set_time_zone(config[CONF_TIME_ZONE])
|
hac.set_time_zone(config[CONF_TIME_ZONE])
|
||||||
|
|
||||||
# Init whitelist external dir
|
# Init whitelist external dir
|
||||||
hac.whitelist_external_dirs = {hass.config.path("www")}
|
hac.allowlist_external_dirs = {hass.config.path("www")}
|
||||||
if CONF_WHITELIST_EXTERNAL_DIRS in config:
|
if CONF_ALLOWLIST_EXTERNAL_DIRS in config:
|
||||||
hac.whitelist_external_dirs.update(set(config[CONF_WHITELIST_EXTERNAL_DIRS]))
|
hac.allowlist_external_dirs.update(set(config[CONF_ALLOWLIST_EXTERNAL_DIRS]))
|
||||||
|
|
||||||
|
elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Key %s has been replaced with %s. Please update your config",
|
||||||
|
CONF_ALLOWLIST_EXTERNAL_DIRS,
|
||||||
|
LEGACY_CONF_WHITELIST_EXTERNAL_DIRS,
|
||||||
|
)
|
||||||
|
hac.allowlist_external_dirs.update(
|
||||||
|
set(config[LEGACY_CONF_WHITELIST_EXTERNAL_DIRS])
|
||||||
|
)
|
||||||
|
|
||||||
# Init whitelist external URL list – make sure to add / to every URL that doesn't
|
# Init whitelist external URL list – make sure to add / to every URL that doesn't
|
||||||
# already have it so that we can properly test "path ownership"
|
# already have it so that we can properly test "path ownership"
|
||||||
|
|
|
@ -44,7 +44,6 @@ CONF_BASE = "base"
|
||||||
CONF_BEFORE = "before"
|
CONF_BEFORE = "before"
|
||||||
CONF_BELOW = "below"
|
CONF_BELOW = "below"
|
||||||
CONF_BINARY_SENSORS = "binary_sensors"
|
CONF_BINARY_SENSORS = "binary_sensors"
|
||||||
CONF_BLACKLIST = "blacklist"
|
|
||||||
CONF_BRIGHTNESS = "brightness"
|
CONF_BRIGHTNESS = "brightness"
|
||||||
CONF_BROADCAST_ADDRESS = "broadcast_address"
|
CONF_BROADCAST_ADDRESS = "broadcast_address"
|
||||||
CONF_BROADCAST_PORT = "broadcast_port"
|
CONF_BROADCAST_PORT = "broadcast_port"
|
||||||
|
@ -181,7 +180,8 @@ CONF_WEBHOOK_ID = "webhook_id"
|
||||||
CONF_WEEKDAY = "weekday"
|
CONF_WEEKDAY = "weekday"
|
||||||
CONF_WHILE = "while"
|
CONF_WHILE = "while"
|
||||||
CONF_WHITELIST = "whitelist"
|
CONF_WHITELIST = "whitelist"
|
||||||
CONF_WHITELIST_EXTERNAL_DIRS = "whitelist_external_dirs"
|
CONF_ALLOWLIST_EXTERNAL_DIRS = "allowlist_external_dirs"
|
||||||
|
LEGACY_CONF_WHITELIST_EXTERNAL_DIRS = "whitelist_external_dirs"
|
||||||
CONF_WHITE_VALUE = "white_value"
|
CONF_WHITE_VALUE = "white_value"
|
||||||
CONF_XY = "xy"
|
CONF_XY = "xy"
|
||||||
CONF_ZONE = "zone"
|
CONF_ZONE = "zone"
|
||||||
|
|
|
@ -1320,7 +1320,7 @@ class Config:
|
||||||
self.config_dir: Optional[str] = None
|
self.config_dir: Optional[str] = None
|
||||||
|
|
||||||
# List of allowed external dirs to access
|
# List of allowed external dirs to access
|
||||||
self.whitelist_external_dirs: Set[str] = set()
|
self.allowlist_external_dirs: Set[str] = set()
|
||||||
|
|
||||||
# List of allowed external URLs that integrations may use
|
# List of allowed external URLs that integrations may use
|
||||||
self.allowlist_external_urls: Set[str] = set()
|
self.allowlist_external_urls: Set[str] = set()
|
||||||
|
@ -1370,9 +1370,9 @@ class Config:
|
||||||
except (FileNotFoundError, RuntimeError, PermissionError):
|
except (FileNotFoundError, RuntimeError, PermissionError):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for whitelisted_path in self.whitelist_external_dirs:
|
for allowed_path in self.allowlist_external_dirs:
|
||||||
try:
|
try:
|
||||||
thepath.relative_to(whitelisted_path)
|
thepath.relative_to(allowed_path)
|
||||||
return True
|
return True
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
@ -1397,7 +1397,9 @@ class Config:
|
||||||
"time_zone": time_zone,
|
"time_zone": time_zone,
|
||||||
"components": self.components,
|
"components": self.components,
|
||||||
"config_dir": self.config_dir,
|
"config_dir": self.config_dir,
|
||||||
"whitelist_external_dirs": self.whitelist_external_dirs,
|
# legacy, backwards compat
|
||||||
|
"whitelist_external_dirs": self.allowlist_external_dirs,
|
||||||
|
"allowlist_external_dirs": self.allowlist_external_dirs,
|
||||||
"allowlist_external_urls": self.allowlist_external_urls,
|
"allowlist_external_urls": self.allowlist_external_urls,
|
||||||
"version": __version__,
|
"version": __version__,
|
||||||
"config_source": self.config_source,
|
"config_source": self.config_source,
|
||||||
|
|
|
@ -151,7 +151,7 @@ async def test_login_flow(manager, provider):
|
||||||
flow = await provider.async_login_flow({"ip_address": ip_address("127.0.0.1")})
|
flow = await provider.async_login_flow({"ip_address": ip_address("127.0.0.1")})
|
||||||
step = await flow.async_step_init()
|
step = await flow.async_step_init()
|
||||||
assert step["type"] == "abort"
|
assert step["type"] == "abort"
|
||||||
assert step["reason"] == "not_whitelisted"
|
assert step["reason"] == "not_allowed"
|
||||||
|
|
||||||
# from trusted network, list users
|
# from trusted network, list users
|
||||||
flow = await provider.async_login_flow({"ip_address": ip_address("192.168.0.1")})
|
flow = await provider.async_login_flow({"ip_address": ip_address("192.168.0.1")})
|
||||||
|
@ -190,7 +190,7 @@ async def test_trusted_users_login(manager_with_user, provider_with_user):
|
||||||
)
|
)
|
||||||
step = await flow.async_step_init()
|
step = await flow.async_step_init()
|
||||||
assert step["type"] == "abort"
|
assert step["type"] == "abort"
|
||||||
assert step["reason"] == "not_whitelisted"
|
assert step["reason"] == "not_allowed"
|
||||||
|
|
||||||
# from trusted network, list users intersect trusted_users
|
# from trusted network, list users intersect trusted_users
|
||||||
flow = await provider_with_user.async_login_flow(
|
flow = await provider_with_user.async_login_flow(
|
||||||
|
@ -274,7 +274,7 @@ async def test_trusted_group_login(manager_with_user, provider_with_user):
|
||||||
)
|
)
|
||||||
step = await flow.async_step_init()
|
step = await flow.async_step_init()
|
||||||
assert step["type"] == "abort"
|
assert step["type"] == "abort"
|
||||||
assert step["reason"] == "not_whitelisted"
|
assert step["reason"] == "not_allowed"
|
||||||
|
|
||||||
# from trusted network, list users intersect trusted_users
|
# from trusted network, list users intersect trusted_users
|
||||||
flow = await provider_with_user.async_login_flow(
|
flow = await provider_with_user.async_login_flow(
|
||||||
|
@ -313,7 +313,7 @@ async def test_bypass_login_flow(manager_bypass_login, provider_bypass_login):
|
||||||
)
|
)
|
||||||
step = await flow.async_step_init()
|
step = await flow.async_step_init()
|
||||||
assert step["type"] == "abort"
|
assert step["type"] == "abort"
|
||||||
assert step["reason"] == "not_whitelisted"
|
assert step["reason"] == "not_allowed"
|
||||||
|
|
||||||
# from trusted network, only one available user, bypass the login flow
|
# from trusted network, only one available user, bypass the login flow
|
||||||
flow = await provider_bypass_login.async_login_flow(
|
flow = await provider_bypass_login.async_login_flow(
|
||||||
|
|
|
@ -212,6 +212,8 @@ async def test_api_get_config(hass, mock_api_client):
|
||||||
result["components"] = set(result["components"])
|
result["components"] = set(result["components"])
|
||||||
if "whitelist_external_dirs" in result:
|
if "whitelist_external_dirs" in result:
|
||||||
result["whitelist_external_dirs"] = set(result["whitelist_external_dirs"])
|
result["whitelist_external_dirs"] = set(result["whitelist_external_dirs"])
|
||||||
|
if "allowlist_external_dirs" in result:
|
||||||
|
result["allowlist_external_dirs"] = set(result["allowlist_external_dirs"])
|
||||||
if "allowlist_external_urls" in result:
|
if "allowlist_external_urls" in result:
|
||||||
result["allowlist_external_urls"] = set(result["allowlist_external_urls"])
|
result["allowlist_external_urls"] = set(result["allowlist_external_urls"])
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ class TestFileSensor(unittest.TestCase):
|
||||||
def setup_method(self, method):
|
def setup_method(self, method):
|
||||||
"""Set up things to be run when tests are started."""
|
"""Set up things to be run when tests are started."""
|
||||||
self.hass = get_test_home_assistant()
|
self.hass = get_test_home_assistant()
|
||||||
self.hass.config.whitelist_external_dirs = {TEST_DIR}
|
self.hass.config.allowlist_external_dirs = {TEST_DIR}
|
||||||
|
|
||||||
def teardown_method(self, method):
|
def teardown_method(self, method):
|
||||||
"""Stop everything that was started."""
|
"""Stop everything that was started."""
|
||||||
|
|
|
@ -28,7 +28,7 @@ class TestFolderSensor(unittest.TestCase):
|
||||||
self.hass = get_test_home_assistant()
|
self.hass = get_test_home_assistant()
|
||||||
if not os.path.isdir(TEST_DIR):
|
if not os.path.isdir(TEST_DIR):
|
||||||
os.mkdir(TEST_DIR)
|
os.mkdir(TEST_DIR)
|
||||||
self.hass.config.whitelist_external_dirs = {TEST_DIR}
|
self.hass.config.allowlist_external_dirs = {TEST_DIR}
|
||||||
|
|
||||||
def teardown_method(self, method):
|
def teardown_method(self, method):
|
||||||
"""Stop everything that was started."""
|
"""Stop everything that was started."""
|
||||||
|
|
|
@ -19,7 +19,7 @@ async def test_invalid_path_setup(hass):
|
||||||
async def test_valid_path_setup(hass):
|
async def test_valid_path_setup(hass):
|
||||||
"""Test that a valid path is setup."""
|
"""Test that a valid path is setup."""
|
||||||
cwd = os.path.join(os.path.dirname(__file__))
|
cwd = os.path.join(os.path.dirname(__file__))
|
||||||
hass.config.whitelist_external_dirs = {cwd}
|
hass.config.allowlist_external_dirs = {cwd}
|
||||||
with patch.object(folder_watcher, "Watcher"):
|
with patch.object(folder_watcher, "Watcher"):
|
||||||
assert await async_setup_component(
|
assert await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
|
|
|
@ -730,7 +730,7 @@ class TestComponentHistory(unittest.TestCase):
|
||||||
):
|
):
|
||||||
# This state will be skipped only different in time
|
# This state will be skipped only different in time
|
||||||
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)})
|
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)})
|
||||||
# This state will be skipped because domain blacklisted
|
# This state will be skipped because domain is excluded
|
||||||
set_state(zone, "zoning")
|
set_state(zone, "zoning")
|
||||||
set_state(script_nc, "off")
|
set_state(script_nc, "off")
|
||||||
states[script_c].append(
|
states[script_c].append(
|
||||||
|
|
|
@ -188,7 +188,7 @@ async def _setup(hass, mock_influx_client, config_ext, get_write_api):
|
||||||
config = {
|
config = {
|
||||||
"influxdb": {
|
"influxdb": {
|
||||||
"host": "host",
|
"host": "host",
|
||||||
"exclude": {"entities": ["fake.blacklisted"], "domains": ["another_fake"]},
|
"exclude": {"entities": ["fake.excluded"], "domains": ["another_fake"]},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
config["influxdb"].update(config_ext)
|
config["influxdb"].update(config_ext)
|
||||||
|
|
|
@ -54,7 +54,7 @@ def minio_client_event_fixture():
|
||||||
|
|
||||||
async def test_minio_services(hass, caplog, minio_client):
|
async def test_minio_services(hass, caplog, minio_client):
|
||||||
"""Test Minio services."""
|
"""Test Minio services."""
|
||||||
hass.config.whitelist_external_dirs = set("/test")
|
hass.config.allowlist_external_dirs = {"/test"}
|
||||||
|
|
||||||
await async_setup_component(
|
await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
|
|
|
@ -171,8 +171,8 @@ async def test_webhook_handle_get_config(hass, create_registrations, webhook_cli
|
||||||
json = await resp.json()
|
json = await resp.json()
|
||||||
if "components" in json:
|
if "components" in json:
|
||||||
json["components"] = set(json["components"])
|
json["components"] = set(json["components"])
|
||||||
if "whitelist_external_dirs" in json:
|
if "allowlist_external_dirs" in json:
|
||||||
json["whitelist_external_dirs"] = set(json["whitelist_external_dirs"])
|
json["allowlist_external_dirs"] = set(json["allowlist_external_dirs"])
|
||||||
|
|
||||||
hass_config = hass.config.as_dict()
|
hass_config = hass.config.as_dict()
|
||||||
|
|
||||||
|
|
|
@ -1228,8 +1228,8 @@ async def test_waypoint_import_simple(hass, context):
|
||||||
assert wayp is not None
|
assert wayp is not None
|
||||||
|
|
||||||
|
|
||||||
async def test_waypoint_import_blacklist(hass, context):
|
async def test_waypoint_import_block(hass, context):
|
||||||
"""Test import of list of waypoints for blacklisted user."""
|
"""Test import of list of waypoints for blocked user."""
|
||||||
waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy()
|
waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy()
|
||||||
await send_message(hass, WAYPOINTS_TOPIC_BLOCKED, waypoints_message)
|
await send_message(hass, WAYPOINTS_TOPIC_BLOCKED, waypoints_message)
|
||||||
# Check if it made it into states
|
# Check if it made it into states
|
||||||
|
|
|
@ -234,6 +234,10 @@ async def test_get_config(hass, websocket_client):
|
||||||
msg["result"]["whitelist_external_dirs"] = set(
|
msg["result"]["whitelist_external_dirs"] = set(
|
||||||
msg["result"]["whitelist_external_dirs"]
|
msg["result"]["whitelist_external_dirs"]
|
||||||
)
|
)
|
||||||
|
if "allowlist_external_dirs" in msg["result"]:
|
||||||
|
msg["result"]["allowlist_external_dirs"] = set(
|
||||||
|
msg["result"]["allowlist_external_dirs"]
|
||||||
|
)
|
||||||
if "allowlist_external_urls" in msg["result"]:
|
if "allowlist_external_urls" in msg["result"]:
|
||||||
msg["result"]["allowlist_external_urls"] = set(
|
msg["result"]["allowlist_external_urls"] = set(
|
||||||
msg["result"]["allowlist_external_urls"]
|
msg["result"]["allowlist_external_urls"]
|
||||||
|
|
|
@ -353,7 +353,7 @@ async def test_loading_configuration_from_storage(hass, hass_storage):
|
||||||
"version": 1,
|
"version": 1,
|
||||||
}
|
}
|
||||||
await config_util.async_process_ha_core_config(
|
await config_util.async_process_ha_core_config(
|
||||||
hass, {"whitelist_external_dirs": "/etc"}
|
hass, {"allowlist_external_dirs": "/etc"}
|
||||||
)
|
)
|
||||||
|
|
||||||
assert hass.config.latitude == 55
|
assert hass.config.latitude == 55
|
||||||
|
@ -364,8 +364,8 @@ async def test_loading_configuration_from_storage(hass, hass_storage):
|
||||||
assert hass.config.time_zone.zone == "Europe/Copenhagen"
|
assert hass.config.time_zone.zone == "Europe/Copenhagen"
|
||||||
assert hass.config.external_url == "https://www.example.com"
|
assert hass.config.external_url == "https://www.example.com"
|
||||||
assert hass.config.internal_url == "http://example.local"
|
assert hass.config.internal_url == "http://example.local"
|
||||||
assert len(hass.config.whitelist_external_dirs) == 2
|
assert len(hass.config.allowlist_external_dirs) == 2
|
||||||
assert "/etc" in hass.config.whitelist_external_dirs
|
assert "/etc" in hass.config.allowlist_external_dirs
|
||||||
assert hass.config.config_source == SOURCE_STORAGE
|
assert hass.config.config_source == SOURCE_STORAGE
|
||||||
|
|
||||||
|
|
||||||
|
@ -387,7 +387,7 @@ async def test_updating_configuration(hass, hass_storage):
|
||||||
}
|
}
|
||||||
hass_storage["core.config"] = dict(core_data)
|
hass_storage["core.config"] = dict(core_data)
|
||||||
await config_util.async_process_ha_core_config(
|
await config_util.async_process_ha_core_config(
|
||||||
hass, {"whitelist_external_dirs": "/etc"}
|
hass, {"allowlist_external_dirs": "/etc"}
|
||||||
)
|
)
|
||||||
await hass.config.async_update(latitude=50)
|
await hass.config.async_update(latitude=50)
|
||||||
|
|
||||||
|
@ -412,7 +412,7 @@ async def test_override_stored_configuration(hass, hass_storage):
|
||||||
"version": 1,
|
"version": 1,
|
||||||
}
|
}
|
||||||
await config_util.async_process_ha_core_config(
|
await config_util.async_process_ha_core_config(
|
||||||
hass, {"latitude": 60, "whitelist_external_dirs": "/etc"}
|
hass, {"latitude": 60, "allowlist_external_dirs": "/etc"}
|
||||||
)
|
)
|
||||||
|
|
||||||
assert hass.config.latitude == 60
|
assert hass.config.latitude == 60
|
||||||
|
@ -421,8 +421,8 @@ async def test_override_stored_configuration(hass, hass_storage):
|
||||||
assert hass.config.location_name == "Home"
|
assert hass.config.location_name == "Home"
|
||||||
assert hass.config.units.name == CONF_UNIT_SYSTEM_METRIC
|
assert hass.config.units.name == CONF_UNIT_SYSTEM_METRIC
|
||||||
assert hass.config.time_zone.zone == "Europe/Copenhagen"
|
assert hass.config.time_zone.zone == "Europe/Copenhagen"
|
||||||
assert len(hass.config.whitelist_external_dirs) == 2
|
assert len(hass.config.allowlist_external_dirs) == 2
|
||||||
assert "/etc" in hass.config.whitelist_external_dirs
|
assert "/etc" in hass.config.allowlist_external_dirs
|
||||||
assert hass.config.config_source == config_util.SOURCE_YAML
|
assert hass.config.config_source == config_util.SOURCE_YAML
|
||||||
|
|
||||||
|
|
||||||
|
@ -437,7 +437,7 @@ async def test_loading_configuration(hass):
|
||||||
"name": "Huis",
|
"name": "Huis",
|
||||||
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
|
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
|
||||||
"time_zone": "America/New_York",
|
"time_zone": "America/New_York",
|
||||||
"whitelist_external_dirs": "/etc",
|
"allowlist_external_dirs": "/etc",
|
||||||
"external_url": "https://www.example.com",
|
"external_url": "https://www.example.com",
|
||||||
"internal_url": "http://example.local",
|
"internal_url": "http://example.local",
|
||||||
},
|
},
|
||||||
|
@ -451,8 +451,8 @@ async def test_loading_configuration(hass):
|
||||||
assert hass.config.time_zone.zone == "America/New_York"
|
assert hass.config.time_zone.zone == "America/New_York"
|
||||||
assert hass.config.external_url == "https://www.example.com"
|
assert hass.config.external_url == "https://www.example.com"
|
||||||
assert hass.config.internal_url == "http://example.local"
|
assert hass.config.internal_url == "http://example.local"
|
||||||
assert len(hass.config.whitelist_external_dirs) == 2
|
assert len(hass.config.allowlist_external_dirs) == 2
|
||||||
assert "/etc" in hass.config.whitelist_external_dirs
|
assert "/etc" in hass.config.allowlist_external_dirs
|
||||||
assert hass.config.config_source == config_util.SOURCE_YAML
|
assert hass.config.config_source == config_util.SOURCE_YAML
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -915,6 +915,7 @@ class TestConfig(unittest.TestCase):
|
||||||
"components": set(),
|
"components": set(),
|
||||||
"config_dir": "/test/ha-config",
|
"config_dir": "/test/ha-config",
|
||||||
"whitelist_external_dirs": set(),
|
"whitelist_external_dirs": set(),
|
||||||
|
"allowlist_external_dirs": set(),
|
||||||
"allowlist_external_urls": set(),
|
"allowlist_external_urls": set(),
|
||||||
"version": __version__,
|
"version": __version__,
|
||||||
"config_source": "default",
|
"config_source": "default",
|
||||||
|
@ -931,7 +932,7 @@ class TestConfig(unittest.TestCase):
|
||||||
with TemporaryDirectory() as tmp_dir:
|
with TemporaryDirectory() as tmp_dir:
|
||||||
# The created dir is in /tmp. This is a symlink on OS X
|
# The created dir is in /tmp. This is a symlink on OS X
|
||||||
# causing this test to fail unless we resolve path first.
|
# causing this test to fail unless we resolve path first.
|
||||||
self.config.whitelist_external_dirs = {os.path.realpath(tmp_dir)}
|
self.config.allowlist_external_dirs = {os.path.realpath(tmp_dir)}
|
||||||
|
|
||||||
test_file = os.path.join(tmp_dir, "test.jpg")
|
test_file = os.path.join(tmp_dir, "test.jpg")
|
||||||
with open(test_file, "w") as tmp_file:
|
with open(test_file, "w") as tmp_file:
|
||||||
|
@ -941,7 +942,7 @@ class TestConfig(unittest.TestCase):
|
||||||
for path in valid:
|
for path in valid:
|
||||||
assert self.config.is_allowed_path(path)
|
assert self.config.is_allowed_path(path)
|
||||||
|
|
||||||
self.config.whitelist_external_dirs = {"/home", "/var"}
|
self.config.allowlist_external_dirs = {"/home", "/var"}
|
||||||
|
|
||||||
unvalid = [
|
unvalid = [
|
||||||
"/hass/config/secure",
|
"/hass/config/secure",
|
||||||
|
|
Loading…
Add table
Reference in a new issue