Drop white blacklist pt1 (#37816)
This commit is contained in:
parent
ccbc3b5e39
commit
8ed1a29c82
21 changed files with 73 additions and 50 deletions
|
@ -190,7 +190,7 @@ class TrustedNetworksLoginFlow(LoginFlow):
|
|||
).async_validate_access(self._ip_address)
|
||||
|
||||
except InvalidAuthError:
|
||||
return self.async_abort(reason="not_whitelisted")
|
||||
return self.async_abort(reason="not_allowed")
|
||||
|
||||
if user_input is not None:
|
||||
return await self.async_finish(user_input)
|
||||
|
|
|
@ -40,7 +40,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||
if hass.config.is_allowed_path(file_path):
|
||||
async_add_entities([FileSensor(name, file_path, unit, value_template)], True)
|
||||
else:
|
||||
_LOGGER.error("'%s' is not a whitelisted directory", file_path)
|
||||
_LOGGER.error("'%s' is not an allowed directory", file_path)
|
||||
|
||||
|
||||
class FileSensor(Entity):
|
||||
|
|
|
@ -172,7 +172,7 @@ class LogiCam(Camera):
|
|||
filename.hass = self.hass
|
||||
stream_file = filename.async_render(variables={ATTR_ENTITY_ID: self.entity_id})
|
||||
|
||||
# Respect configured path whitelist.
|
||||
# Respect configured allowed paths.
|
||||
if not self.hass.config.is_allowed_path(stream_file):
|
||||
_LOGGER.error("Can't write %s, no access to path!", stream_file)
|
||||
return
|
||||
|
@ -191,7 +191,7 @@ class LogiCam(Camera):
|
|||
variables={ATTR_ENTITY_ID: self.entity_id}
|
||||
)
|
||||
|
||||
# Respect configured path whitelist.
|
||||
# Respect configured allowed paths.
|
||||
if not self.hass.config.is_allowed_path(snapshot_file):
|
||||
_LOGGER.error("Can't write %s, no access to path!", snapshot_file)
|
||||
return
|
||||
|
|
|
@ -74,10 +74,10 @@ def setup(hass, config):
|
|||
|
||||
include = conf[CONF_INCLUDE]
|
||||
exclude = conf[CONF_EXCLUDE]
|
||||
whitelist_e = set(include[CONF_ENTITIES])
|
||||
whitelist_d = set(include[CONF_DOMAINS])
|
||||
blacklist_e = set(exclude[CONF_ENTITIES])
|
||||
blacklist_d = set(exclude[CONF_DOMAINS])
|
||||
include_e = set(include[CONF_ENTITIES])
|
||||
include_d = set(include[CONF_DOMAINS])
|
||||
exclude_e = set(exclude[CONF_ENTITIES])
|
||||
exclude_d = set(exclude[CONF_DOMAINS])
|
||||
|
||||
client_args = {
|
||||
"org": conf[CONF_ORG],
|
||||
|
@ -94,13 +94,13 @@ def setup(hass, config):
|
|||
if (
|
||||
state is None
|
||||
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
|
||||
or state.entity_id in blacklist_e
|
||||
or state.domain in blacklist_d
|
||||
or state.entity_id in exclude_e
|
||||
or state.domain in exclude_d
|
||||
):
|
||||
return
|
||||
|
||||
if (whitelist_e and state.entity_id not in whitelist_e) or (
|
||||
whitelist_d and state.domain not in whitelist_d
|
||||
if (include_e and state.entity_id not in include_e) or (
|
||||
include_d and state.domain not in include_d
|
||||
):
|
||||
return
|
||||
|
||||
|
|
|
@ -301,7 +301,7 @@ async def async_send_message(
|
|||
_LOGGER.info("Uploading file from path, %s", path)
|
||||
|
||||
if not hass.config.is_allowed_path(path):
|
||||
raise PermissionError("Could not access file. Not in whitelist")
|
||||
raise PermissionError("Could not access file. Path not allowed")
|
||||
|
||||
with open(path, "rb") as upfile:
|
||||
_LOGGER.debug("Reading file %s", path)
|
||||
|
|
|
@ -20,6 +20,7 @@ from homeassistant.const import (
|
|||
ATTR_ASSUMED_STATE,
|
||||
ATTR_FRIENDLY_NAME,
|
||||
ATTR_HIDDEN,
|
||||
CONF_ALLOWLIST_EXTERNAL_DIRS,
|
||||
CONF_ALLOWLIST_EXTERNAL_URLS,
|
||||
CONF_AUTH_MFA_MODULES,
|
||||
CONF_AUTH_PROVIDERS,
|
||||
|
@ -39,7 +40,7 @@ from homeassistant.const import (
|
|||
CONF_TYPE,
|
||||
CONF_UNIT_SYSTEM,
|
||||
CONF_UNIT_SYSTEM_IMPERIAL,
|
||||
CONF_WHITELIST_EXTERNAL_DIRS,
|
||||
LEGACY_CONF_WHITELIST_EXTERNAL_DIRS,
|
||||
TEMP_CELSIUS,
|
||||
__version__,
|
||||
)
|
||||
|
@ -183,7 +184,10 @@ CORE_CONFIG_SCHEMA = CUSTOMIZE_CONFIG_SCHEMA.extend(
|
|||
CONF_TIME_ZONE: cv.time_zone,
|
||||
vol.Optional(CONF_INTERNAL_URL): cv.url,
|
||||
vol.Optional(CONF_EXTERNAL_URL): cv.url,
|
||||
vol.Optional(CONF_WHITELIST_EXTERNAL_DIRS): vol.All(
|
||||
vol.Optional(CONF_ALLOWLIST_EXTERNAL_DIRS): vol.All(
|
||||
cv.ensure_list, [vol.IsDir()] # pylint: disable=no-value-for-parameter
|
||||
),
|
||||
vol.Optional(LEGACY_CONF_WHITELIST_EXTERNAL_DIRS): vol.All(
|
||||
cv.ensure_list, [vol.IsDir()] # pylint: disable=no-value-for-parameter
|
||||
),
|
||||
vol.Optional(CONF_ALLOWLIST_EXTERNAL_URLS): vol.All(cv.ensure_list, [cv.url]),
|
||||
|
@ -500,9 +504,19 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: Dict) -> Non
|
|||
hac.set_time_zone(config[CONF_TIME_ZONE])
|
||||
|
||||
# Init whitelist external dir
|
||||
hac.whitelist_external_dirs = {hass.config.path("www")}
|
||||
if CONF_WHITELIST_EXTERNAL_DIRS in config:
|
||||
hac.whitelist_external_dirs.update(set(config[CONF_WHITELIST_EXTERNAL_DIRS]))
|
||||
hac.allowlist_external_dirs = {hass.config.path("www")}
|
||||
if CONF_ALLOWLIST_EXTERNAL_DIRS in config:
|
||||
hac.allowlist_external_dirs.update(set(config[CONF_ALLOWLIST_EXTERNAL_DIRS]))
|
||||
|
||||
elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config:
|
||||
_LOGGER.warning(
|
||||
"Key %s has been replaced with %s. Please update your config",
|
||||
CONF_ALLOWLIST_EXTERNAL_DIRS,
|
||||
LEGACY_CONF_WHITELIST_EXTERNAL_DIRS,
|
||||
)
|
||||
hac.allowlist_external_dirs.update(
|
||||
set(config[LEGACY_CONF_WHITELIST_EXTERNAL_DIRS])
|
||||
)
|
||||
|
||||
# Init whitelist external URL list – make sure to add / to every URL that doesn't
|
||||
# already have it so that we can properly test "path ownership"
|
||||
|
|
|
@ -44,7 +44,6 @@ CONF_BASE = "base"
|
|||
CONF_BEFORE = "before"
|
||||
CONF_BELOW = "below"
|
||||
CONF_BINARY_SENSORS = "binary_sensors"
|
||||
CONF_BLACKLIST = "blacklist"
|
||||
CONF_BRIGHTNESS = "brightness"
|
||||
CONF_BROADCAST_ADDRESS = "broadcast_address"
|
||||
CONF_BROADCAST_PORT = "broadcast_port"
|
||||
|
@ -181,7 +180,8 @@ CONF_WEBHOOK_ID = "webhook_id"
|
|||
CONF_WEEKDAY = "weekday"
|
||||
CONF_WHILE = "while"
|
||||
CONF_WHITELIST = "whitelist"
|
||||
CONF_WHITELIST_EXTERNAL_DIRS = "whitelist_external_dirs"
|
||||
CONF_ALLOWLIST_EXTERNAL_DIRS = "allowlist_external_dirs"
|
||||
LEGACY_CONF_WHITELIST_EXTERNAL_DIRS = "whitelist_external_dirs"
|
||||
CONF_WHITE_VALUE = "white_value"
|
||||
CONF_XY = "xy"
|
||||
CONF_ZONE = "zone"
|
||||
|
|
|
@ -1320,7 +1320,7 @@ class Config:
|
|||
self.config_dir: Optional[str] = None
|
||||
|
||||
# List of allowed external dirs to access
|
||||
self.whitelist_external_dirs: Set[str] = set()
|
||||
self.allowlist_external_dirs: Set[str] = set()
|
||||
|
||||
# List of allowed external URLs that integrations may use
|
||||
self.allowlist_external_urls: Set[str] = set()
|
||||
|
@ -1370,9 +1370,9 @@ class Config:
|
|||
except (FileNotFoundError, RuntimeError, PermissionError):
|
||||
return False
|
||||
|
||||
for whitelisted_path in self.whitelist_external_dirs:
|
||||
for allowed_path in self.allowlist_external_dirs:
|
||||
try:
|
||||
thepath.relative_to(whitelisted_path)
|
||||
thepath.relative_to(allowed_path)
|
||||
return True
|
||||
except ValueError:
|
||||
pass
|
||||
|
@ -1397,7 +1397,9 @@ class Config:
|
|||
"time_zone": time_zone,
|
||||
"components": self.components,
|
||||
"config_dir": self.config_dir,
|
||||
"whitelist_external_dirs": self.whitelist_external_dirs,
|
||||
# legacy, backwards compat
|
||||
"whitelist_external_dirs": self.allowlist_external_dirs,
|
||||
"allowlist_external_dirs": self.allowlist_external_dirs,
|
||||
"allowlist_external_urls": self.allowlist_external_urls,
|
||||
"version": __version__,
|
||||
"config_source": self.config_source,
|
||||
|
|
|
@ -151,7 +151,7 @@ async def test_login_flow(manager, provider):
|
|||
flow = await provider.async_login_flow({"ip_address": ip_address("127.0.0.1")})
|
||||
step = await flow.async_step_init()
|
||||
assert step["type"] == "abort"
|
||||
assert step["reason"] == "not_whitelisted"
|
||||
assert step["reason"] == "not_allowed"
|
||||
|
||||
# from trusted network, list users
|
||||
flow = await provider.async_login_flow({"ip_address": ip_address("192.168.0.1")})
|
||||
|
@ -190,7 +190,7 @@ async def test_trusted_users_login(manager_with_user, provider_with_user):
|
|||
)
|
||||
step = await flow.async_step_init()
|
||||
assert step["type"] == "abort"
|
||||
assert step["reason"] == "not_whitelisted"
|
||||
assert step["reason"] == "not_allowed"
|
||||
|
||||
# from trusted network, list users intersect trusted_users
|
||||
flow = await provider_with_user.async_login_flow(
|
||||
|
@ -274,7 +274,7 @@ async def test_trusted_group_login(manager_with_user, provider_with_user):
|
|||
)
|
||||
step = await flow.async_step_init()
|
||||
assert step["type"] == "abort"
|
||||
assert step["reason"] == "not_whitelisted"
|
||||
assert step["reason"] == "not_allowed"
|
||||
|
||||
# from trusted network, list users intersect trusted_users
|
||||
flow = await provider_with_user.async_login_flow(
|
||||
|
@ -313,7 +313,7 @@ async def test_bypass_login_flow(manager_bypass_login, provider_bypass_login):
|
|||
)
|
||||
step = await flow.async_step_init()
|
||||
assert step["type"] == "abort"
|
||||
assert step["reason"] == "not_whitelisted"
|
||||
assert step["reason"] == "not_allowed"
|
||||
|
||||
# from trusted network, only one available user, bypass the login flow
|
||||
flow = await provider_bypass_login.async_login_flow(
|
||||
|
|
|
@ -212,6 +212,8 @@ async def test_api_get_config(hass, mock_api_client):
|
|||
result["components"] = set(result["components"])
|
||||
if "whitelist_external_dirs" in result:
|
||||
result["whitelist_external_dirs"] = set(result["whitelist_external_dirs"])
|
||||
if "allowlist_external_dirs" in result:
|
||||
result["allowlist_external_dirs"] = set(result["allowlist_external_dirs"])
|
||||
if "allowlist_external_urls" in result:
|
||||
result["allowlist_external_urls"] = set(result["allowlist_external_urls"])
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ class TestFileSensor(unittest.TestCase):
|
|||
def setup_method(self, method):
|
||||
"""Set up things to be run when tests are started."""
|
||||
self.hass = get_test_home_assistant()
|
||||
self.hass.config.whitelist_external_dirs = {TEST_DIR}
|
||||
self.hass.config.allowlist_external_dirs = {TEST_DIR}
|
||||
|
||||
def teardown_method(self, method):
|
||||
"""Stop everything that was started."""
|
||||
|
|
|
@ -28,7 +28,7 @@ class TestFolderSensor(unittest.TestCase):
|
|||
self.hass = get_test_home_assistant()
|
||||
if not os.path.isdir(TEST_DIR):
|
||||
os.mkdir(TEST_DIR)
|
||||
self.hass.config.whitelist_external_dirs = {TEST_DIR}
|
||||
self.hass.config.allowlist_external_dirs = {TEST_DIR}
|
||||
|
||||
def teardown_method(self, method):
|
||||
"""Stop everything that was started."""
|
||||
|
|
|
@ -19,7 +19,7 @@ async def test_invalid_path_setup(hass):
|
|||
async def test_valid_path_setup(hass):
|
||||
"""Test that a valid path is setup."""
|
||||
cwd = os.path.join(os.path.dirname(__file__))
|
||||
hass.config.whitelist_external_dirs = {cwd}
|
||||
hass.config.allowlist_external_dirs = {cwd}
|
||||
with patch.object(folder_watcher, "Watcher"):
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
|
|
|
@ -730,7 +730,7 @@ class TestComponentHistory(unittest.TestCase):
|
|||
):
|
||||
# This state will be skipped only different in time
|
||||
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)})
|
||||
# This state will be skipped because domain blacklisted
|
||||
# This state will be skipped because domain is excluded
|
||||
set_state(zone, "zoning")
|
||||
set_state(script_nc, "off")
|
||||
states[script_c].append(
|
||||
|
|
|
@ -188,7 +188,7 @@ async def _setup(hass, mock_influx_client, config_ext, get_write_api):
|
|||
config = {
|
||||
"influxdb": {
|
||||
"host": "host",
|
||||
"exclude": {"entities": ["fake.blacklisted"], "domains": ["another_fake"]},
|
||||
"exclude": {"entities": ["fake.excluded"], "domains": ["another_fake"]},
|
||||
}
|
||||
}
|
||||
config["influxdb"].update(config_ext)
|
||||
|
|
|
@ -54,7 +54,7 @@ def minio_client_event_fixture():
|
|||
|
||||
async def test_minio_services(hass, caplog, minio_client):
|
||||
"""Test Minio services."""
|
||||
hass.config.whitelist_external_dirs = set("/test")
|
||||
hass.config.allowlist_external_dirs = {"/test"}
|
||||
|
||||
await async_setup_component(
|
||||
hass,
|
||||
|
|
|
@ -171,8 +171,8 @@ async def test_webhook_handle_get_config(hass, create_registrations, webhook_cli
|
|||
json = await resp.json()
|
||||
if "components" in json:
|
||||
json["components"] = set(json["components"])
|
||||
if "whitelist_external_dirs" in json:
|
||||
json["whitelist_external_dirs"] = set(json["whitelist_external_dirs"])
|
||||
if "allowlist_external_dirs" in json:
|
||||
json["allowlist_external_dirs"] = set(json["allowlist_external_dirs"])
|
||||
|
||||
hass_config = hass.config.as_dict()
|
||||
|
||||
|
|
|
@ -1228,8 +1228,8 @@ async def test_waypoint_import_simple(hass, context):
|
|||
assert wayp is not None
|
||||
|
||||
|
||||
async def test_waypoint_import_blacklist(hass, context):
|
||||
"""Test import of list of waypoints for blacklisted user."""
|
||||
async def test_waypoint_import_block(hass, context):
|
||||
"""Test import of list of waypoints for blocked user."""
|
||||
waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy()
|
||||
await send_message(hass, WAYPOINTS_TOPIC_BLOCKED, waypoints_message)
|
||||
# Check if it made it into states
|
||||
|
|
|
@ -234,6 +234,10 @@ async def test_get_config(hass, websocket_client):
|
|||
msg["result"]["whitelist_external_dirs"] = set(
|
||||
msg["result"]["whitelist_external_dirs"]
|
||||
)
|
||||
if "allowlist_external_dirs" in msg["result"]:
|
||||
msg["result"]["allowlist_external_dirs"] = set(
|
||||
msg["result"]["allowlist_external_dirs"]
|
||||
)
|
||||
if "allowlist_external_urls" in msg["result"]:
|
||||
msg["result"]["allowlist_external_urls"] = set(
|
||||
msg["result"]["allowlist_external_urls"]
|
||||
|
|
|
@ -353,7 +353,7 @@ async def test_loading_configuration_from_storage(hass, hass_storage):
|
|||
"version": 1,
|
||||
}
|
||||
await config_util.async_process_ha_core_config(
|
||||
hass, {"whitelist_external_dirs": "/etc"}
|
||||
hass, {"allowlist_external_dirs": "/etc"}
|
||||
)
|
||||
|
||||
assert hass.config.latitude == 55
|
||||
|
@ -364,8 +364,8 @@ async def test_loading_configuration_from_storage(hass, hass_storage):
|
|||
assert hass.config.time_zone.zone == "Europe/Copenhagen"
|
||||
assert hass.config.external_url == "https://www.example.com"
|
||||
assert hass.config.internal_url == "http://example.local"
|
||||
assert len(hass.config.whitelist_external_dirs) == 2
|
||||
assert "/etc" in hass.config.whitelist_external_dirs
|
||||
assert len(hass.config.allowlist_external_dirs) == 2
|
||||
assert "/etc" in hass.config.allowlist_external_dirs
|
||||
assert hass.config.config_source == SOURCE_STORAGE
|
||||
|
||||
|
||||
|
@ -387,7 +387,7 @@ async def test_updating_configuration(hass, hass_storage):
|
|||
}
|
||||
hass_storage["core.config"] = dict(core_data)
|
||||
await config_util.async_process_ha_core_config(
|
||||
hass, {"whitelist_external_dirs": "/etc"}
|
||||
hass, {"allowlist_external_dirs": "/etc"}
|
||||
)
|
||||
await hass.config.async_update(latitude=50)
|
||||
|
||||
|
@ -412,7 +412,7 @@ async def test_override_stored_configuration(hass, hass_storage):
|
|||
"version": 1,
|
||||
}
|
||||
await config_util.async_process_ha_core_config(
|
||||
hass, {"latitude": 60, "whitelist_external_dirs": "/etc"}
|
||||
hass, {"latitude": 60, "allowlist_external_dirs": "/etc"}
|
||||
)
|
||||
|
||||
assert hass.config.latitude == 60
|
||||
|
@ -421,8 +421,8 @@ async def test_override_stored_configuration(hass, hass_storage):
|
|||
assert hass.config.location_name == "Home"
|
||||
assert hass.config.units.name == CONF_UNIT_SYSTEM_METRIC
|
||||
assert hass.config.time_zone.zone == "Europe/Copenhagen"
|
||||
assert len(hass.config.whitelist_external_dirs) == 2
|
||||
assert "/etc" in hass.config.whitelist_external_dirs
|
||||
assert len(hass.config.allowlist_external_dirs) == 2
|
||||
assert "/etc" in hass.config.allowlist_external_dirs
|
||||
assert hass.config.config_source == config_util.SOURCE_YAML
|
||||
|
||||
|
||||
|
@ -437,7 +437,7 @@ async def test_loading_configuration(hass):
|
|||
"name": "Huis",
|
||||
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
|
||||
"time_zone": "America/New_York",
|
||||
"whitelist_external_dirs": "/etc",
|
||||
"allowlist_external_dirs": "/etc",
|
||||
"external_url": "https://www.example.com",
|
||||
"internal_url": "http://example.local",
|
||||
},
|
||||
|
@ -451,8 +451,8 @@ async def test_loading_configuration(hass):
|
|||
assert hass.config.time_zone.zone == "America/New_York"
|
||||
assert hass.config.external_url == "https://www.example.com"
|
||||
assert hass.config.internal_url == "http://example.local"
|
||||
assert len(hass.config.whitelist_external_dirs) == 2
|
||||
assert "/etc" in hass.config.whitelist_external_dirs
|
||||
assert len(hass.config.allowlist_external_dirs) == 2
|
||||
assert "/etc" in hass.config.allowlist_external_dirs
|
||||
assert hass.config.config_source == config_util.SOURCE_YAML
|
||||
|
||||
|
||||
|
|
|
@ -915,6 +915,7 @@ class TestConfig(unittest.TestCase):
|
|||
"components": set(),
|
||||
"config_dir": "/test/ha-config",
|
||||
"whitelist_external_dirs": set(),
|
||||
"allowlist_external_dirs": set(),
|
||||
"allowlist_external_urls": set(),
|
||||
"version": __version__,
|
||||
"config_source": "default",
|
||||
|
@ -931,7 +932,7 @@ class TestConfig(unittest.TestCase):
|
|||
with TemporaryDirectory() as tmp_dir:
|
||||
# The created dir is in /tmp. This is a symlink on OS X
|
||||
# causing this test to fail unless we resolve path first.
|
||||
self.config.whitelist_external_dirs = {os.path.realpath(tmp_dir)}
|
||||
self.config.allowlist_external_dirs = {os.path.realpath(tmp_dir)}
|
||||
|
||||
test_file = os.path.join(tmp_dir, "test.jpg")
|
||||
with open(test_file, "w") as tmp_file:
|
||||
|
@ -941,7 +942,7 @@ class TestConfig(unittest.TestCase):
|
|||
for path in valid:
|
||||
assert self.config.is_allowed_path(path)
|
||||
|
||||
self.config.whitelist_external_dirs = {"/home", "/var"}
|
||||
self.config.allowlist_external_dirs = {"/home", "/var"}
|
||||
|
||||
unvalid = [
|
||||
"/hass/config/secure",
|
||||
|
|
Loading…
Add table
Reference in a new issue