Enable some more bandit checks (#30857)
* Enable B108 (hardcoded tmp dir), address findings * Enable B602 (subprocess popen with shell), address findings * Enable B604 (start process with shell), address findings * Enable B306 (mktemp), B307 (eval), and B325 (tempnam), no issues to address
This commit is contained in:
parent
6cf20fc7fa
commit
5e2ba2eb77
21 changed files with 110 additions and 91 deletions
|
@ -91,7 +91,7 @@ class CommandCover(CoverDevice):
|
|||
"""Execute the actual commands."""
|
||||
_LOGGER.info("Running command: %s", command)
|
||||
|
||||
success = subprocess.call(command, shell=True) == 0
|
||||
success = subprocess.call(command, shell=True) == 0 # nosec # shell by design
|
||||
|
||||
if not success:
|
||||
_LOGGER.error("Command failed: %s", command)
|
||||
|
@ -104,7 +104,9 @@ class CommandCover(CoverDevice):
|
|||
_LOGGER.info("Running state command: %s", command)
|
||||
|
||||
try:
|
||||
return_value = subprocess.check_output(command, shell=True)
|
||||
return_value = subprocess.check_output(
|
||||
command, shell=True # nosec # shell by design
|
||||
)
|
||||
return return_value.strip().decode("utf-8")
|
||||
except subprocess.CalledProcessError:
|
||||
_LOGGER.error("Command failed: %s", command)
|
||||
|
|
|
@ -33,7 +33,10 @@ class CommandLineNotificationService(BaseNotificationService):
|
|||
"""Send a message to a command line."""
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
self.command, universal_newlines=True, stdin=subprocess.PIPE, shell=True
|
||||
self.command,
|
||||
universal_newlines=True,
|
||||
stdin=subprocess.PIPE,
|
||||
shell=True, # nosec # shell by design
|
||||
)
|
||||
proc.communicate(input=message)
|
||||
if proc.returncode != 0:
|
||||
|
|
|
@ -168,15 +168,14 @@ class CommandSensorData:
|
|||
|
||||
if rendered_args == args:
|
||||
# No template used. default behavior
|
||||
shell = True
|
||||
pass
|
||||
else:
|
||||
# Template used. Construct the string used in the shell
|
||||
command = str(" ".join([prog] + shlex.split(rendered_args)))
|
||||
shell = True
|
||||
try:
|
||||
_LOGGER.debug("Running command: %s", command)
|
||||
return_value = subprocess.check_output(
|
||||
command, shell=shell, timeout=self.timeout
|
||||
command, shell=True, timeout=self.timeout # nosec # shell by design
|
||||
)
|
||||
self.value = return_value.strip().decode("utf-8")
|
||||
except subprocess.CalledProcessError:
|
||||
|
|
|
@ -94,7 +94,7 @@ class CommandSwitch(SwitchDevice):
|
|||
"""Execute the actual commands."""
|
||||
_LOGGER.info("Running command: %s", command)
|
||||
|
||||
success = subprocess.call(command, shell=True) == 0
|
||||
success = subprocess.call(command, shell=True) == 0 # nosec # shell by design
|
||||
|
||||
if not success:
|
||||
_LOGGER.error("Command failed: %s", command)
|
||||
|
@ -107,7 +107,9 @@ class CommandSwitch(SwitchDevice):
|
|||
_LOGGER.info("Running state command: %s", command)
|
||||
|
||||
try:
|
||||
return_value = subprocess.check_output(command, shell=True)
|
||||
return_value = subprocess.check_output(
|
||||
command, shell=True # nosec # shell by design
|
||||
)
|
||||
return return_value.strip().decode("utf-8")
|
||||
except subprocess.CalledProcessError:
|
||||
_LOGGER.error("Command failed: %s", command)
|
||||
|
@ -116,7 +118,7 @@ class CommandSwitch(SwitchDevice):
|
|||
def _query_state_code(command):
|
||||
"""Execute state command for return code."""
|
||||
_LOGGER.info("Running state command: %s", command)
|
||||
return subprocess.call(command, shell=True) == 0
|
||||
return subprocess.call(command, shell=True) == 0 # nosec # shell by design
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
|
|
|
@ -10,6 +10,7 @@ import voluptuous as vol
|
|||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers import discovery
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -54,7 +55,7 @@ CONFIG_SCHEMA = vol.Schema(
|
|||
def setup(hass, config):
|
||||
"""Create the ViCare component."""
|
||||
conf = config[DOMAIN]
|
||||
params = {"token_file": "/tmp/vicare_token.save"}
|
||||
params = {"token_file": hass.config.path(STORAGE_DIR, "vicare_token.save")}
|
||||
if conf.get(CONF_CIRCUIT) is not None:
|
||||
params["circuit"] = conf[CONF_CIRCUIT]
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ def x10_command(command):
|
|||
|
||||
def get_unit_status(code):
|
||||
"""Get on/off status for given unit."""
|
||||
output = check_output(f"heyu onstate {code}", shell=True)
|
||||
output = check_output(["heyu", "onstate", code])
|
||||
return int(output.decode("utf-8")[0])
|
||||
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ _LOGGER = logging.getLogger(__name__)
|
|||
|
||||
DEFAULT_BRAND = "YI Home Camera"
|
||||
DEFAULT_PASSWORD = ""
|
||||
DEFAULT_PATH = "/tmp/sd/record"
|
||||
DEFAULT_PATH = "/tmp/sd/record" # nosec
|
||||
DEFAULT_PORT = 21
|
||||
DEFAULT_USERNAME = "root"
|
||||
DEFAULT_ARGUMENTS = "-pred 1"
|
||||
|
|
|
@ -72,20 +72,20 @@ def main():
|
|||
if args.template != "integration":
|
||||
generate.generate(args.template, info)
|
||||
|
||||
pipe_null = "" if args.develop else "> /dev/null"
|
||||
pipe_null = {} if args.develop else {"stdout": subprocess.DEVNULL}
|
||||
|
||||
print("Running hassfest to pick up new information.")
|
||||
subprocess.run(f"python -m script.hassfest {pipe_null}", shell=True)
|
||||
subprocess.run(["python", "-m", "script.hassfest"], **pipe_null)
|
||||
print()
|
||||
|
||||
print("Running gen_requirements_all to pick up new information.")
|
||||
subprocess.run(f"python -m script.gen_requirements_all {pipe_null}", shell=True)
|
||||
subprocess.run(["python", "-m", "script.gen_requirements_all"], **pipe_null)
|
||||
print()
|
||||
|
||||
if args.develop:
|
||||
print("Running tests")
|
||||
print(f"$ pytest -vvv tests/components/{info.domain}")
|
||||
subprocess.run(f"pytest -vvv tests/components/{info.domain}", shell=True)
|
||||
subprocess.run(["pytest", "-vvv", "tests/components/{info.domain}"])
|
||||
print()
|
||||
|
||||
docs.print_relevant_docs(args.template, info)
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
# https://bandit.readthedocs.io/en/latest/config.html
|
||||
|
||||
tests:
|
||||
- B108
|
||||
- B306
|
||||
- B307
|
||||
- B313
|
||||
- B314
|
||||
- B315
|
||||
|
@ -9,3 +12,6 @@ tests:
|
|||
- B318
|
||||
- B319
|
||||
- B320
|
||||
- B325
|
||||
- B602
|
||||
- B604
|
||||
|
|
|
@ -54,7 +54,7 @@ def mock_lj(hass):
|
|||
mock_lj.on_switch_pressed.side_effect = on_switch_pressed
|
||||
mock_lj.on_switch_released.side_effect = on_switch_released
|
||||
|
||||
config = {"litejet": {"port": "/tmp/this_will_be_mocked"}}
|
||||
config = {"litejet": {"port": "/dev/serial/by-id/mock-litejet"}}
|
||||
assert hass.loop.run_until_complete(
|
||||
setup.async_setup_component(hass, litejet.DOMAIN, config)
|
||||
)
|
||||
|
|
|
@ -102,7 +102,7 @@ async def test_snapshot_service(hass, mock_camera):
|
|||
with patch(
|
||||
"homeassistant.components.camera.open", mopen, create=True
|
||||
), patch.object(hass.config, "is_allowed_path", return_value=True):
|
||||
common.async_snapshot(hass, "/tmp/bla")
|
||||
common.async_snapshot(hass, "/test/snapshot.jpg")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
mock_write = mopen().write
|
||||
|
|
|
@ -44,7 +44,9 @@ def test_query_state_value(rs):
|
|||
result = rs._query_state_value("runme")
|
||||
assert "foo bar" == result
|
||||
assert mock_run.call_count == 1
|
||||
assert mock_run.call_args == mock.call("runme", shell=True)
|
||||
assert mock_run.call_args == mock.call(
|
||||
"runme", shell=True, # nosec # shell by design
|
||||
)
|
||||
|
||||
|
||||
async def test_state_value(hass):
|
||||
|
|
|
@ -97,7 +97,7 @@ class TestBanSensor(unittest.TestCase):
|
|||
|
||||
def test_single_ban(self):
|
||||
"""Test that log is parsed correctly for single ban."""
|
||||
log_parser = BanLogParser("/tmp")
|
||||
log_parser = BanLogParser("/test/fail2ban.log")
|
||||
sensor = BanSensor("fail2ban", "jail_one", log_parser)
|
||||
assert sensor.name == "fail2ban jail_one"
|
||||
mock_fh = MockOpen(read_data=fake_log("single_ban"))
|
||||
|
@ -112,7 +112,7 @@ class TestBanSensor(unittest.TestCase):
|
|||
|
||||
def test_ipv6_ban(self):
|
||||
"""Test that log is parsed correctly for IPV6 bans."""
|
||||
log_parser = BanLogParser("/tmp")
|
||||
log_parser = BanLogParser("/test/fail2ban.log")
|
||||
sensor = BanSensor("fail2ban", "jail_one", log_parser)
|
||||
assert sensor.name == "fail2ban jail_one"
|
||||
mock_fh = MockOpen(read_data=fake_log("ipv6_ban"))
|
||||
|
@ -127,7 +127,7 @@ class TestBanSensor(unittest.TestCase):
|
|||
|
||||
def test_multiple_ban(self):
|
||||
"""Test that log is parsed correctly for multiple ban."""
|
||||
log_parser = BanLogParser("/tmp")
|
||||
log_parser = BanLogParser("/test/fail2ban.log")
|
||||
sensor = BanSensor("fail2ban", "jail_one", log_parser)
|
||||
assert sensor.name == "fail2ban jail_one"
|
||||
mock_fh = MockOpen(read_data=fake_log("multi_ban"))
|
||||
|
@ -148,7 +148,7 @@ class TestBanSensor(unittest.TestCase):
|
|||
|
||||
def test_unban_all(self):
|
||||
"""Test that log is parsed correctly when unbanning."""
|
||||
log_parser = BanLogParser("/tmp")
|
||||
log_parser = BanLogParser("/test/fail2ban.log")
|
||||
sensor = BanSensor("fail2ban", "jail_one", log_parser)
|
||||
assert sensor.name == "fail2ban jail_one"
|
||||
mock_fh = MockOpen(read_data=fake_log("unban_all"))
|
||||
|
@ -166,7 +166,7 @@ class TestBanSensor(unittest.TestCase):
|
|||
|
||||
def test_unban_one(self):
|
||||
"""Test that log is parsed correctly when unbanning one ip."""
|
||||
log_parser = BanLogParser("/tmp")
|
||||
log_parser = BanLogParser("/test/fail2ban.log")
|
||||
sensor = BanSensor("fail2ban", "jail_one", log_parser)
|
||||
assert sensor.name == "fail2ban jail_one"
|
||||
mock_fh = MockOpen(read_data=fake_log("unban_one"))
|
||||
|
@ -184,7 +184,7 @@ class TestBanSensor(unittest.TestCase):
|
|||
|
||||
def test_multi_jail(self):
|
||||
"""Test that log is parsed correctly when using multiple jails."""
|
||||
log_parser = BanLogParser("/tmp")
|
||||
log_parser = BanLogParser("/test/fail2ban.log")
|
||||
sensor1 = BanSensor("fail2ban", "jail_one", log_parser)
|
||||
sensor2 = BanSensor("fail2ban", "jail_two", log_parser)
|
||||
assert sensor1.name == "fail2ban jail_one"
|
||||
|
@ -205,7 +205,7 @@ class TestBanSensor(unittest.TestCase):
|
|||
|
||||
def test_ban_active_after_update(self):
|
||||
"""Test that ban persists after subsequent update."""
|
||||
log_parser = BanLogParser("/tmp")
|
||||
log_parser = BanLogParser("/test/fail2ban.log")
|
||||
sensor = BanSensor("fail2ban", "jail_one", log_parser)
|
||||
assert sensor.name == "fail2ban jail_one"
|
||||
mock_fh = MockOpen(read_data=fake_log("single_ban"))
|
||||
|
|
|
@ -50,7 +50,9 @@ class TestLiteJetLight(unittest.TestCase):
|
|||
self.mock_lj.on_load_deactivated.side_effect = on_load_deactivated
|
||||
|
||||
assert setup.setup_component(
|
||||
self.hass, litejet.DOMAIN, {"litejet": {"port": "/tmp/this_will_be_mocked"}}
|
||||
self.hass,
|
||||
litejet.DOMAIN,
|
||||
{"litejet": {"port": "/dev/serial/by-id/mock-litejet"}},
|
||||
)
|
||||
self.hass.block_till_done()
|
||||
|
||||
|
|
|
@ -37,7 +37,9 @@ class TestLiteJetScene(unittest.TestCase):
|
|||
self.mock_lj.get_scene_name.side_effect = get_scene_name
|
||||
|
||||
assert setup.setup_component(
|
||||
self.hass, litejet.DOMAIN, {"litejet": {"port": "/tmp/this_will_be_mocked"}}
|
||||
self.hass,
|
||||
litejet.DOMAIN,
|
||||
{"litejet": {"port": "/dev/serial/by-id/mock-litejet"}},
|
||||
)
|
||||
self.hass.block_till_done()
|
||||
|
||||
|
|
|
@ -48,7 +48,7 @@ class TestLiteJetSwitch(unittest.TestCase):
|
|||
self.mock_lj.on_switch_pressed.side_effect = on_switch_pressed
|
||||
self.mock_lj.on_switch_released.side_effect = on_switch_released
|
||||
|
||||
config = {"litejet": {"port": "/tmp/this_will_be_mocked"}}
|
||||
config = {"litejet": {"port": "/dev/serial/by-id/mock-litejet"}}
|
||||
if method == self.test_include_switches_False:
|
||||
config["litejet"]["include_switches"] = False
|
||||
elif method != self.test_include_switches_unspecified:
|
||||
|
|
|
@ -55,7 +55,7 @@ def minio_client_event_fixture():
|
|||
|
||||
async def test_minio_services(hass, caplog, minio_client):
|
||||
"""Test Minio services."""
|
||||
hass.config.whitelist_external_dirs = set("/tmp")
|
||||
hass.config.whitelist_external_dirs = set("/test")
|
||||
|
||||
await async_setup_component(
|
||||
hass,
|
||||
|
@ -80,22 +80,22 @@ async def test_minio_services(hass, caplog, minio_client):
|
|||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"put",
|
||||
{"file_path": "/tmp/some_file", "key": "some_key", "bucket": "some_bucket"},
|
||||
{"file_path": "/test/some_file", "key": "some_key", "bucket": "some_bucket"},
|
||||
blocking=True,
|
||||
)
|
||||
assert minio_client.fput_object.call_args == call(
|
||||
"some_bucket", "some_key", "/tmp/some_file"
|
||||
"some_bucket", "some_key", "/test/some_file"
|
||||
)
|
||||
minio_client.reset_mock()
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"get",
|
||||
{"file_path": "/tmp/some_file", "key": "some_key", "bucket": "some_bucket"},
|
||||
{"file_path": "/test/some_file", "key": "some_key", "bucket": "some_bucket"},
|
||||
blocking=True,
|
||||
)
|
||||
assert minio_client.fget_object.call_args == call(
|
||||
"some_bucket", "some_key", "/tmp/some_file"
|
||||
"some_bucket", "some_key", "/test/some_file"
|
||||
)
|
||||
minio_client.reset_mock()
|
||||
|
||||
|
|
|
@ -137,7 +137,7 @@ def test_config_verify_ssl_but_no_ssl_enabled(hass, mock_session_send):
|
|||
CONF_HOST: "tomato-router",
|
||||
CONF_PORT: 1234,
|
||||
CONF_SSL: False,
|
||||
CONF_VERIFY_SSL: "/tmp/tomato.crt",
|
||||
CONF_VERIFY_SSL: "/test/tomato.crt",
|
||||
CONF_USERNAME: "foo",
|
||||
CONF_PASSWORD: "password",
|
||||
tomato.CONF_HTTP_ID: "1234567890",
|
||||
|
@ -171,7 +171,7 @@ def test_config_valid_verify_ssl_path(hass, mock_session_send):
|
|||
CONF_HOST: "tomato-router",
|
||||
CONF_PORT: 1234,
|
||||
CONF_SSL: True,
|
||||
CONF_VERIFY_SSL: "/tmp/tomato.crt",
|
||||
CONF_VERIFY_SSL: "/test/tomato.crt",
|
||||
CONF_USERNAME: "bar",
|
||||
CONF_PASSWORD: "foo",
|
||||
tomato.CONF_HTTP_ID: "0987654321",
|
||||
|
@ -189,7 +189,7 @@ def test_config_valid_verify_ssl_path(hass, mock_session_send):
|
|||
assert "exec=devlist" in result.req.body
|
||||
assert mock_session_send.call_count == 1
|
||||
assert mock_session_send.mock_calls[0] == mock.call(
|
||||
result.req, timeout=3, verify="/tmp/tomato.crt"
|
||||
result.req, timeout=3, verify="/test/tomato.crt"
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -347,7 +347,7 @@ async def test_loading_configuration_from_storage(hass, hass_storage):
|
|||
"version": 1,
|
||||
}
|
||||
await config_util.async_process_ha_core_config(
|
||||
hass, {"whitelist_external_dirs": "/tmp"}
|
||||
hass, {"whitelist_external_dirs": "/etc"}
|
||||
)
|
||||
|
||||
assert hass.config.latitude == 55
|
||||
|
@ -357,7 +357,7 @@ async def test_loading_configuration_from_storage(hass, hass_storage):
|
|||
assert hass.config.units.name == CONF_UNIT_SYSTEM_METRIC
|
||||
assert hass.config.time_zone.zone == "Europe/Copenhagen"
|
||||
assert len(hass.config.whitelist_external_dirs) == 2
|
||||
assert "/tmp" in hass.config.whitelist_external_dirs
|
||||
assert "/etc" in hass.config.whitelist_external_dirs
|
||||
assert hass.config.config_source == SOURCE_STORAGE
|
||||
|
||||
|
||||
|
@ -377,7 +377,7 @@ async def test_updating_configuration(hass, hass_storage):
|
|||
}
|
||||
hass_storage["core.config"] = dict(core_data)
|
||||
await config_util.async_process_ha_core_config(
|
||||
hass, {"whitelist_external_dirs": "/tmp"}
|
||||
hass, {"whitelist_external_dirs": "/etc"}
|
||||
)
|
||||
await hass.config.async_update(latitude=50)
|
||||
|
||||
|
@ -402,7 +402,7 @@ async def test_override_stored_configuration(hass, hass_storage):
|
|||
"version": 1,
|
||||
}
|
||||
await config_util.async_process_ha_core_config(
|
||||
hass, {"latitude": 60, "whitelist_external_dirs": "/tmp"}
|
||||
hass, {"latitude": 60, "whitelist_external_dirs": "/etc"}
|
||||
)
|
||||
|
||||
assert hass.config.latitude == 60
|
||||
|
@ -412,7 +412,7 @@ async def test_override_stored_configuration(hass, hass_storage):
|
|||
assert hass.config.units.name == CONF_UNIT_SYSTEM_METRIC
|
||||
assert hass.config.time_zone.zone == "Europe/Copenhagen"
|
||||
assert len(hass.config.whitelist_external_dirs) == 2
|
||||
assert "/tmp" in hass.config.whitelist_external_dirs
|
||||
assert "/etc" in hass.config.whitelist_external_dirs
|
||||
assert hass.config.config_source == config_util.SOURCE_YAML
|
||||
|
||||
|
||||
|
@ -427,7 +427,7 @@ async def test_loading_configuration(hass):
|
|||
"name": "Huis",
|
||||
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
|
||||
"time_zone": "America/New_York",
|
||||
"whitelist_external_dirs": "/tmp",
|
||||
"whitelist_external_dirs": "/etc",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -438,7 +438,7 @@ async def test_loading_configuration(hass):
|
|||
assert hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL
|
||||
assert hass.config.time_zone.zone == "America/New_York"
|
||||
assert len(hass.config.whitelist_external_dirs) == 2
|
||||
assert "/tmp" in hass.config.whitelist_external_dirs
|
||||
assert "/etc" in hass.config.whitelist_external_dirs
|
||||
assert hass.config.config_source == config_util.SOURCE_YAML
|
||||
|
||||
|
||||
|
|
|
@ -881,17 +881,17 @@ class TestConfig(unittest.TestCase):
|
|||
|
||||
def test_path_with_file(self):
|
||||
"""Test get_config_path method."""
|
||||
self.config.config_dir = "/tmp/ha-config"
|
||||
assert "/tmp/ha-config/test.conf" == self.config.path("test.conf")
|
||||
self.config.config_dir = "/test/ha-config"
|
||||
assert "/test/ha-config/test.conf" == self.config.path("test.conf")
|
||||
|
||||
def test_path_with_dir_and_file(self):
|
||||
"""Test get_config_path method."""
|
||||
self.config.config_dir = "/tmp/ha-config"
|
||||
assert "/tmp/ha-config/dir/test.conf" == self.config.path("dir", "test.conf")
|
||||
self.config.config_dir = "/test/ha-config"
|
||||
assert "/test/ha-config/dir/test.conf" == self.config.path("dir", "test.conf")
|
||||
|
||||
def test_as_dict(self):
|
||||
"""Test as dict."""
|
||||
self.config.config_dir = "/tmp/ha-config"
|
||||
self.config.config_dir = "/test/ha-config"
|
||||
expected = {
|
||||
"latitude": 0,
|
||||
"longitude": 0,
|
||||
|
@ -900,7 +900,7 @@ class TestConfig(unittest.TestCase):
|
|||
"location_name": "Home",
|
||||
"time_zone": "UTC",
|
||||
"components": set(),
|
||||
"config_dir": "/tmp/ha-config",
|
||||
"config_dir": "/test/ha-config",
|
||||
"whitelist_external_dirs": set(),
|
||||
"version": __version__,
|
||||
"config_source": "default",
|
||||
|
|
|
@ -97,10 +97,10 @@ def test_include_yaml():
|
|||
@patch("homeassistant.util.yaml.loader.os.walk")
|
||||
def test_include_dir_list(mock_walk):
|
||||
"""Test include dir list yaml."""
|
||||
mock_walk.return_value = [["/tmp", [], ["two.yaml", "one.yaml"]]]
|
||||
mock_walk.return_value = [["/test", [], ["two.yaml", "one.yaml"]]]
|
||||
|
||||
with patch_yaml_files({"/tmp/one.yaml": "one", "/tmp/two.yaml": "two"}):
|
||||
conf = "key: !include_dir_list /tmp"
|
||||
with patch_yaml_files({"/test/one.yaml": "one", "/test/two.yaml": "two"}):
|
||||
conf = "key: !include_dir_list /test"
|
||||
with io.StringIO(conf) as file:
|
||||
doc = yaml_loader.yaml.safe_load(file)
|
||||
assert doc["key"] == sorted(["one", "two"])
|
||||
|
@ -110,19 +110,19 @@ def test_include_dir_list(mock_walk):
|
|||
def test_include_dir_list_recursive(mock_walk):
|
||||
"""Test include dir recursive list yaml."""
|
||||
mock_walk.return_value = [
|
||||
["/tmp", ["tmp2", ".ignore", "ignore"], ["zero.yaml"]],
|
||||
["/tmp/tmp2", [], ["one.yaml", "two.yaml"]],
|
||||
["/tmp/ignore", [], [".ignore.yaml"]],
|
||||
["/test", ["tmp2", ".ignore", "ignore"], ["zero.yaml"]],
|
||||
["/test/tmp2", [], ["one.yaml", "two.yaml"]],
|
||||
["/test/ignore", [], [".ignore.yaml"]],
|
||||
]
|
||||
|
||||
with patch_yaml_files(
|
||||
{
|
||||
"/tmp/zero.yaml": "zero",
|
||||
"/tmp/tmp2/one.yaml": "one",
|
||||
"/tmp/tmp2/two.yaml": "two",
|
||||
"/test/zero.yaml": "zero",
|
||||
"/test/tmp2/one.yaml": "one",
|
||||
"/test/tmp2/two.yaml": "two",
|
||||
}
|
||||
):
|
||||
conf = "key: !include_dir_list /tmp"
|
||||
conf = "key: !include_dir_list /test"
|
||||
with io.StringIO(conf) as file:
|
||||
assert (
|
||||
".ignore" in mock_walk.return_value[0][1]
|
||||
|
@ -137,11 +137,11 @@ def test_include_dir_list_recursive(mock_walk):
|
|||
def test_include_dir_named(mock_walk):
|
||||
"""Test include dir named yaml."""
|
||||
mock_walk.return_value = [
|
||||
["/tmp", [], ["first.yaml", "second.yaml", "secrets.yaml"]]
|
||||
["/test", [], ["first.yaml", "second.yaml", "secrets.yaml"]]
|
||||
]
|
||||
|
||||
with patch_yaml_files({"/tmp/first.yaml": "one", "/tmp/second.yaml": "two"}):
|
||||
conf = "key: !include_dir_named /tmp"
|
||||
with patch_yaml_files({"/test/first.yaml": "one", "/test/second.yaml": "two"}):
|
||||
conf = "key: !include_dir_named /test"
|
||||
correct = {"first": "one", "second": "two"}
|
||||
with io.StringIO(conf) as file:
|
||||
doc = yaml_loader.yaml.safe_load(file)
|
||||
|
@ -152,19 +152,19 @@ def test_include_dir_named(mock_walk):
|
|||
def test_include_dir_named_recursive(mock_walk):
|
||||
"""Test include dir named yaml."""
|
||||
mock_walk.return_value = [
|
||||
["/tmp", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
|
||||
["/tmp/tmp2", [], ["second.yaml", "third.yaml"]],
|
||||
["/tmp/ignore", [], [".ignore.yaml"]],
|
||||
["/test", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
|
||||
["/test/tmp2", [], ["second.yaml", "third.yaml"]],
|
||||
["/test/ignore", [], [".ignore.yaml"]],
|
||||
]
|
||||
|
||||
with patch_yaml_files(
|
||||
{
|
||||
"/tmp/first.yaml": "one",
|
||||
"/tmp/tmp2/second.yaml": "two",
|
||||
"/tmp/tmp2/third.yaml": "three",
|
||||
"/test/first.yaml": "one",
|
||||
"/test/tmp2/second.yaml": "two",
|
||||
"/test/tmp2/third.yaml": "three",
|
||||
}
|
||||
):
|
||||
conf = "key: !include_dir_named /tmp"
|
||||
conf = "key: !include_dir_named /test"
|
||||
correct = {"first": "one", "second": "two", "third": "three"}
|
||||
with io.StringIO(conf) as file:
|
||||
assert (
|
||||
|
@ -179,12 +179,12 @@ def test_include_dir_named_recursive(mock_walk):
|
|||
@patch("homeassistant.util.yaml.loader.os.walk")
|
||||
def test_include_dir_merge_list(mock_walk):
|
||||
"""Test include dir merge list yaml."""
|
||||
mock_walk.return_value = [["/tmp", [], ["first.yaml", "second.yaml"]]]
|
||||
mock_walk.return_value = [["/test", [], ["first.yaml", "second.yaml"]]]
|
||||
|
||||
with patch_yaml_files(
|
||||
{"/tmp/first.yaml": "- one", "/tmp/second.yaml": "- two\n- three"}
|
||||
{"/test/first.yaml": "- one", "/test/second.yaml": "- two\n- three"}
|
||||
):
|
||||
conf = "key: !include_dir_merge_list /tmp"
|
||||
conf = "key: !include_dir_merge_list /test"
|
||||
with io.StringIO(conf) as file:
|
||||
doc = yaml_loader.yaml.safe_load(file)
|
||||
assert sorted(doc["key"]) == sorted(["one", "two", "three"])
|
||||
|
@ -194,19 +194,19 @@ def test_include_dir_merge_list(mock_walk):
|
|||
def test_include_dir_merge_list_recursive(mock_walk):
|
||||
"""Test include dir merge list yaml."""
|
||||
mock_walk.return_value = [
|
||||
["/tmp", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
|
||||
["/tmp/tmp2", [], ["second.yaml", "third.yaml"]],
|
||||
["/tmp/ignore", [], [".ignore.yaml"]],
|
||||
["/test", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
|
||||
["/test/tmp2", [], ["second.yaml", "third.yaml"]],
|
||||
["/test/ignore", [], [".ignore.yaml"]],
|
||||
]
|
||||
|
||||
with patch_yaml_files(
|
||||
{
|
||||
"/tmp/first.yaml": "- one",
|
||||
"/tmp/tmp2/second.yaml": "- two",
|
||||
"/tmp/tmp2/third.yaml": "- three\n- four",
|
||||
"/test/first.yaml": "- one",
|
||||
"/test/tmp2/second.yaml": "- two",
|
||||
"/test/tmp2/third.yaml": "- three\n- four",
|
||||
}
|
||||
):
|
||||
conf = "key: !include_dir_merge_list /tmp"
|
||||
conf = "key: !include_dir_merge_list /test"
|
||||
with io.StringIO(conf) as file:
|
||||
assert (
|
||||
".ignore" in mock_walk.return_value[0][1]
|
||||
|
@ -220,15 +220,15 @@ def test_include_dir_merge_list_recursive(mock_walk):
|
|||
@patch("homeassistant.util.yaml.loader.os.walk")
|
||||
def test_include_dir_merge_named(mock_walk):
|
||||
"""Test include dir merge named yaml."""
|
||||
mock_walk.return_value = [["/tmp", [], ["first.yaml", "second.yaml"]]]
|
||||
mock_walk.return_value = [["/test", [], ["first.yaml", "second.yaml"]]]
|
||||
|
||||
files = {
|
||||
"/tmp/first.yaml": "key1: one",
|
||||
"/tmp/second.yaml": "key2: two\nkey3: three",
|
||||
"/test/first.yaml": "key1: one",
|
||||
"/test/second.yaml": "key2: two\nkey3: three",
|
||||
}
|
||||
|
||||
with patch_yaml_files(files):
|
||||
conf = "key: !include_dir_merge_named /tmp"
|
||||
conf = "key: !include_dir_merge_named /test"
|
||||
with io.StringIO(conf) as file:
|
||||
doc = yaml_loader.yaml.safe_load(file)
|
||||
assert doc["key"] == {"key1": "one", "key2": "two", "key3": "three"}
|
||||
|
@ -238,19 +238,19 @@ def test_include_dir_merge_named(mock_walk):
|
|||
def test_include_dir_merge_named_recursive(mock_walk):
|
||||
"""Test include dir merge named yaml."""
|
||||
mock_walk.return_value = [
|
||||
["/tmp", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
|
||||
["/tmp/tmp2", [], ["second.yaml", "third.yaml"]],
|
||||
["/tmp/ignore", [], [".ignore.yaml"]],
|
||||
["/test", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
|
||||
["/test/tmp2", [], ["second.yaml", "third.yaml"]],
|
||||
["/test/ignore", [], [".ignore.yaml"]],
|
||||
]
|
||||
|
||||
with patch_yaml_files(
|
||||
{
|
||||
"/tmp/first.yaml": "key1: one",
|
||||
"/tmp/tmp2/second.yaml": "key2: two",
|
||||
"/tmp/tmp2/third.yaml": "key3: three\nkey4: four",
|
||||
"/test/first.yaml": "key1: one",
|
||||
"/test/tmp2/second.yaml": "key2: two",
|
||||
"/test/tmp2/third.yaml": "key3: three\nkey4: four",
|
||||
}
|
||||
):
|
||||
conf = "key: !include_dir_merge_named /tmp"
|
||||
conf = "key: !include_dir_merge_named /test"
|
||||
with io.StringIO(conf) as file:
|
||||
assert (
|
||||
".ignore" in mock_walk.return_value[0][1]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue