Fix a collection of tests with missing asserts (#48127)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
This commit is contained in:
parent
f8755a52c2
commit
08870690a6
7 changed files with 23 additions and 25 deletions
|
@ -128,8 +128,8 @@ async def test_forwarding_user_info(hassio_client, hass_admin_user, aioclient_mo
|
||||||
assert len(aioclient_mock.mock_calls) == 1
|
assert len(aioclient_mock.mock_calls) == 1
|
||||||
|
|
||||||
req_headers = aioclient_mock.mock_calls[0][-1]
|
req_headers = aioclient_mock.mock_calls[0][-1]
|
||||||
req_headers["X-Hass-User-ID"] == hass_admin_user.id
|
assert req_headers["X-Hass-User-ID"] == hass_admin_user.id
|
||||||
req_headers["X-Hass-Is-Admin"] == "1"
|
assert req_headers["X-Hass-Is-Admin"] == "1"
|
||||||
|
|
||||||
|
|
||||||
async def test_snapshot_upload_headers(hassio_client, aioclient_mock):
|
async def test_snapshot_upload_headers(hassio_client, aioclient_mock):
|
||||||
|
@ -147,7 +147,7 @@ async def test_snapshot_upload_headers(hassio_client, aioclient_mock):
|
||||||
assert len(aioclient_mock.mock_calls) == 1
|
assert len(aioclient_mock.mock_calls) == 1
|
||||||
|
|
||||||
req_headers = aioclient_mock.mock_calls[0][-1]
|
req_headers = aioclient_mock.mock_calls[0][-1]
|
||||||
req_headers["Content-Type"] == content_type
|
assert req_headers["Content-Type"] == content_type
|
||||||
|
|
||||||
|
|
||||||
async def test_snapshot_download_headers(hassio_client, aioclient_mock):
|
async def test_snapshot_download_headers(hassio_client, aioclient_mock):
|
||||||
|
@ -168,7 +168,7 @@ async def test_snapshot_download_headers(hassio_client, aioclient_mock):
|
||||||
|
|
||||||
assert len(aioclient_mock.mock_calls) == 1
|
assert len(aioclient_mock.mock_calls) == 1
|
||||||
|
|
||||||
resp.headers["Content-Disposition"] == content_disposition
|
assert resp.headers["Content-Disposition"] == content_disposition
|
||||||
|
|
||||||
|
|
||||||
def test_need_auth(hass):
|
def test_need_auth(hass):
|
||||||
|
|
|
@ -165,9 +165,9 @@ async def test_can_set_level(hass):
|
||||||
logger.DOMAIN, "set_level", {f"{UNCONFIG_NS}.any": "debug"}, blocking=True
|
logger.DOMAIN, "set_level", {f"{UNCONFIG_NS}.any": "debug"}, blocking=True
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.getLogger(UNCONFIG_NS).level == logging.NOTSET
|
assert logging.getLogger(UNCONFIG_NS).level == logging.NOTSET
|
||||||
logging.getLogger(f"{UNCONFIG_NS}.any").level == logging.DEBUG
|
assert logging.getLogger(f"{UNCONFIG_NS}.any").level == logging.DEBUG
|
||||||
logging.getLogger(UNCONFIG_NS).level == logging.NOTSET
|
assert logging.getLogger(UNCONFIG_NS).level == logging.NOTSET
|
||||||
|
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
logger.DOMAIN, "set_default_level", {"level": "debug"}, blocking=True
|
logger.DOMAIN, "set_default_level", {"level": "debug"}, blocking=True
|
||||||
|
|
|
@ -364,9 +364,9 @@ async def test_purge_filtered_states(
|
||||||
)
|
)
|
||||||
assert states_sensor_excluded.count() == 0
|
assert states_sensor_excluded.count() == 0
|
||||||
|
|
||||||
session.query(States).get(71).old_state_id is None
|
assert session.query(States).get(72).old_state_id is None
|
||||||
session.query(States).get(72).old_state_id is None
|
assert session.query(States).get(73).old_state_id is None
|
||||||
session.query(States).get(73).old_state_id == 62 # should have been keeped
|
assert session.query(States).get(74).old_state_id == 62 # should have been kept
|
||||||
|
|
||||||
|
|
||||||
async def test_purge_filtered_events(
|
async def test_purge_filtered_events(
|
||||||
|
@ -550,9 +550,9 @@ async def test_purge_filtered_events_state_changed(
|
||||||
assert events_purge.count() == 0
|
assert events_purge.count() == 0
|
||||||
assert states.count() == 3
|
assert states.count() == 3
|
||||||
|
|
||||||
session.query(States).get(61).old_state_id is None
|
assert session.query(States).get(61).old_state_id is None
|
||||||
session.query(States).get(62).old_state_id is None
|
assert session.query(States).get(62).old_state_id is None
|
||||||
session.query(States).get(63).old_state_id == 62 # should have been keeped
|
assert session.query(States).get(63).old_state_id == 62 # should have been kept
|
||||||
|
|
||||||
|
|
||||||
async def _add_test_states(hass: HomeAssistantType, instance: recorder.Recorder):
|
async def _add_test_states(hass: HomeAssistantType, instance: recorder.Recorder):
|
||||||
|
|
|
@ -88,8 +88,7 @@ def test_validate_or_move_away_sqlite_database_with_integrity_check(
|
||||||
test_db_file = f"{test_dir}/broken.db"
|
test_db_file = f"{test_dir}/broken.db"
|
||||||
dburl = f"{SQLITE_URL_PREFIX}{test_db_file}"
|
dburl = f"{SQLITE_URL_PREFIX}{test_db_file}"
|
||||||
|
|
||||||
util.validate_sqlite_database(test_db_file, db_integrity_check) is True
|
assert util.validate_sqlite_database(test_db_file, db_integrity_check) is False
|
||||||
|
|
||||||
assert os.path.exists(test_db_file) is True
|
assert os.path.exists(test_db_file) is True
|
||||||
assert (
|
assert (
|
||||||
util.validate_or_move_away_sqlite_database(dburl, db_integrity_check) is False
|
util.validate_or_move_away_sqlite_database(dburl, db_integrity_check) is False
|
||||||
|
@ -125,8 +124,7 @@ def test_validate_or_move_away_sqlite_database_without_integrity_check(
|
||||||
test_db_file = f"{test_dir}/broken.db"
|
test_db_file = f"{test_dir}/broken.db"
|
||||||
dburl = f"{SQLITE_URL_PREFIX}{test_db_file}"
|
dburl = f"{SQLITE_URL_PREFIX}{test_db_file}"
|
||||||
|
|
||||||
util.validate_sqlite_database(test_db_file, db_integrity_check) is True
|
assert util.validate_sqlite_database(test_db_file, db_integrity_check) is False
|
||||||
|
|
||||||
assert os.path.exists(test_db_file) is True
|
assert os.path.exists(test_db_file) is True
|
||||||
assert (
|
assert (
|
||||||
util.validate_or_move_away_sqlite_database(dburl, db_integrity_check) is False
|
util.validate_or_move_away_sqlite_database(dburl, db_integrity_check) is False
|
||||||
|
|
|
@ -903,7 +903,7 @@ async def test_dst(hass):
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
state.attributes["after"] == "2019-03-31T03:30:00+02:00"
|
assert state.attributes["after"] == "2019-03-31T03:30:00+02:00"
|
||||||
state.attributes["before"] == "2019-03-31T03:40:00+02:00"
|
assert state.attributes["before"] == "2019-03-31T03:40:00+02:00"
|
||||||
state.attributes["next_update"] == "2019-03-31T03:30:00+02:00"
|
assert state.attributes["next_update"] == "2019-03-31T03:30:00+02:00"
|
||||||
assert state.state == STATE_OFF
|
assert state.state == STATE_OFF
|
||||||
|
|
|
@ -944,7 +944,7 @@ async def test_master_state_with_template(hass):
|
||||||
await hass.async_start()
|
await hass.async_start()
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
hass.states.get("media_player.tv").state == STATE_ON
|
assert hass.states.get("media_player.tv").state == STATE_ON
|
||||||
|
|
||||||
events = []
|
events = []
|
||||||
|
|
||||||
|
@ -956,7 +956,7 @@ async def test_master_state_with_template(hass):
|
||||||
hass.states.async_set("input_boolean.test", STATE_ON, context=context)
|
hass.states.async_set("input_boolean.test", STATE_ON, context=context)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
hass.states.get("media_player.tv").state == STATE_OFF
|
assert hass.states.get("media_player.tv").state == STATE_OFF
|
||||||
assert events[0].context == context
|
assert events[0].context == context
|
||||||
|
|
||||||
|
|
||||||
|
@ -987,12 +987,12 @@ async def test_reload(hass):
|
||||||
await hass.async_start()
|
await hass.async_start()
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
hass.states.get("media_player.tv").state == STATE_ON
|
assert hass.states.get("media_player.tv").state == STATE_ON
|
||||||
|
|
||||||
hass.states.async_set("input_boolean.test", STATE_ON)
|
hass.states.async_set("input_boolean.test", STATE_ON)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
hass.states.get("media_player.tv").state == STATE_OFF
|
assert hass.states.get("media_player.tv").state == STATE_OFF
|
||||||
|
|
||||||
hass.states.async_set("media_player.master_bedroom_2", STATE_OFF)
|
hass.states.async_set("media_player.master_bedroom_2", STATE_OFF)
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
|
|
|
@ -573,7 +573,7 @@ async def test_loading_saving_data(hass, registry, area_registry):
|
||||||
orig_kitchen_light_witout_suggested_area = registry.async_update_device(
|
orig_kitchen_light_witout_suggested_area = registry.async_update_device(
|
||||||
orig_kitchen_light.id, suggested_area=None
|
orig_kitchen_light.id, suggested_area=None
|
||||||
)
|
)
|
||||||
orig_kitchen_light_witout_suggested_area.suggested_area is None
|
assert orig_kitchen_light_witout_suggested_area.suggested_area is None
|
||||||
assert orig_kitchen_light_witout_suggested_area == new_kitchen_light
|
assert orig_kitchen_light_witout_suggested_area == new_kitchen_light
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue