Avoid updating hassio addon data when there are no entities consuming it (#101382)
This commit is contained in:
parent
7c8c063149
commit
c0904c905d
4 changed files with 236 additions and 57 deletions
|
@ -2,6 +2,7 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections import defaultdict
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
import logging
|
import logging
|
||||||
|
@ -29,6 +30,7 @@ from homeassistant.const import (
|
||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
from homeassistant.core import (
|
from homeassistant.core import (
|
||||||
|
CALLBACK_TYPE,
|
||||||
DOMAIN as HASS_DOMAIN,
|
DOMAIN as HASS_DOMAIN,
|
||||||
HassJob,
|
HassJob,
|
||||||
HomeAssistant,
|
HomeAssistant,
|
||||||
|
@ -55,6 +57,9 @@ from .addon_manager import AddonError, AddonInfo, AddonManager, AddonState # no
|
||||||
from .addon_panel import async_setup_addon_panel
|
from .addon_panel import async_setup_addon_panel
|
||||||
from .auth import async_setup_auth_view
|
from .auth import async_setup_auth_view
|
||||||
from .const import (
|
from .const import (
|
||||||
|
ADDON_UPDATE_CHANGELOG,
|
||||||
|
ADDON_UPDATE_INFO,
|
||||||
|
ADDON_UPDATE_STATS,
|
||||||
ATTR_ADDON,
|
ATTR_ADDON,
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
ATTR_AUTO_UPDATE,
|
ATTR_AUTO_UPDATE,
|
||||||
|
@ -800,11 +805,16 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||||
self.entry_id = config_entry.entry_id
|
self.entry_id = config_entry.entry_id
|
||||||
self.dev_reg = dev_reg
|
self.dev_reg = dev_reg
|
||||||
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
|
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
|
||||||
|
self._enabled_updates_by_addon: defaultdict[
|
||||||
|
str, dict[str, set[str]]
|
||||||
|
] = defaultdict(lambda: defaultdict(set))
|
||||||
|
|
||||||
async def _async_update_data(self) -> dict[str, Any]:
|
async def _async_update_data(self) -> dict[str, Any]:
|
||||||
"""Update data via library."""
|
"""Update data via library."""
|
||||||
|
is_first_update = not self.data
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.force_data_refresh()
|
await self.force_data_refresh(is_first_update)
|
||||||
except HassioAPIError as err:
|
except HassioAPIError as err:
|
||||||
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
||||||
|
|
||||||
|
@ -848,7 +858,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||||
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
|
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
|
||||||
|
|
||||||
# If this is the initial refresh, register all addons and return the dict
|
# If this is the initial refresh, register all addons and return the dict
|
||||||
if not self.data:
|
if is_first_update:
|
||||||
async_register_addons_in_dev_reg(
|
async_register_addons_in_dev_reg(
|
||||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
|
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
|
||||||
)
|
)
|
||||||
|
@ -898,47 +908,75 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||||
self.hass.data[DATA_SUPERVISOR_INFO] = await self.hassio.get_supervisor_info()
|
self.hass.data[DATA_SUPERVISOR_INFO] = await self.hassio.get_supervisor_info()
|
||||||
await self.async_refresh()
|
await self.async_refresh()
|
||||||
|
|
||||||
async def force_data_refresh(self) -> None:
|
async def force_data_refresh(self, first_update: bool) -> None:
|
||||||
"""Force update of the addon info."""
|
"""Force update of the addon info."""
|
||||||
|
data = self.hass.data
|
||||||
|
hassio = self.hassio
|
||||||
(
|
(
|
||||||
self.hass.data[DATA_INFO],
|
data[DATA_INFO],
|
||||||
self.hass.data[DATA_CORE_INFO],
|
data[DATA_CORE_INFO],
|
||||||
self.hass.data[DATA_CORE_STATS],
|
data[DATA_CORE_STATS],
|
||||||
self.hass.data[DATA_SUPERVISOR_INFO],
|
data[DATA_SUPERVISOR_INFO],
|
||||||
self.hass.data[DATA_SUPERVISOR_STATS],
|
data[DATA_SUPERVISOR_STATS],
|
||||||
self.hass.data[DATA_OS_INFO],
|
data[DATA_OS_INFO],
|
||||||
) = await asyncio.gather(
|
) = await asyncio.gather(
|
||||||
self.hassio.get_info(),
|
hassio.get_info(),
|
||||||
self.hassio.get_core_info(),
|
hassio.get_core_info(),
|
||||||
self.hassio.get_core_stats(),
|
hassio.get_core_stats(),
|
||||||
self.hassio.get_supervisor_info(),
|
hassio.get_supervisor_info(),
|
||||||
self.hassio.get_supervisor_stats(),
|
hassio.get_supervisor_stats(),
|
||||||
self.hassio.get_os_info(),
|
hassio.get_os_info(),
|
||||||
)
|
)
|
||||||
|
|
||||||
all_addons = self.hass.data[DATA_SUPERVISOR_INFO].get("addons", [])
|
_addon_data = data[DATA_SUPERVISOR_INFO].get("addons", [])
|
||||||
started_addons = [
|
all_addons: list[str] = []
|
||||||
addon for addon in all_addons if addon[ATTR_STATE] == ATTR_STARTED
|
started_addons: list[str] = []
|
||||||
]
|
for addon in _addon_data:
|
||||||
stats_data = await asyncio.gather(
|
slug = addon[ATTR_SLUG]
|
||||||
*[self._update_addon_stats(addon[ATTR_SLUG]) for addon in started_addons]
|
all_addons.append(slug)
|
||||||
)
|
if addon[ATTR_STATE] == ATTR_STARTED:
|
||||||
self.hass.data[DATA_ADDONS_STATS] = dict(stats_data)
|
started_addons.append(slug)
|
||||||
self.hass.data[DATA_ADDONS_CHANGELOGS] = dict(
|
#
|
||||||
await asyncio.gather(
|
# Update add-on info if its the first update or
|
||||||
*[
|
# there is at least one entity that needs the data.
|
||||||
self._update_addon_changelog(addon[ATTR_SLUG])
|
#
|
||||||
for addon in all_addons
|
# When entities are added they call async_enable_addon_updates
|
||||||
]
|
# to enable updates for the endpoints they need via
|
||||||
|
# async_added_to_hass. This ensures that we only update
|
||||||
|
# the data for the endpoints that are needed to avoid unnecessary
|
||||||
|
# API calls since otherwise we would fetch stats for all add-ons
|
||||||
|
# and throw them away.
|
||||||
|
#
|
||||||
|
enabled_updates_by_addon = self._enabled_updates_by_addon
|
||||||
|
for data_key, update_func, enabled_key, wanted_addons in (
|
||||||
|
(
|
||||||
|
DATA_ADDONS_STATS,
|
||||||
|
self._update_addon_stats,
|
||||||
|
ADDON_UPDATE_STATS,
|
||||||
|
started_addons,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
DATA_ADDONS_CHANGELOGS,
|
||||||
|
self._update_addon_changelog,
|
||||||
|
ADDON_UPDATE_CHANGELOG,
|
||||||
|
all_addons,
|
||||||
|
),
|
||||||
|
(DATA_ADDONS_INFO, self._update_addon_info, ADDON_UPDATE_INFO, all_addons),
|
||||||
|
):
|
||||||
|
data.setdefault(data_key, {}).update(
|
||||||
|
dict(
|
||||||
|
await asyncio.gather(
|
||||||
|
*[
|
||||||
|
update_func(slug)
|
||||||
|
for slug in wanted_addons
|
||||||
|
if first_update
|
||||||
|
or enabled_key in enabled_updates_by_addon[slug]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
self.hass.data[DATA_ADDONS_INFO] = dict(
|
|
||||||
await asyncio.gather(
|
|
||||||
*[self._update_addon_info(addon[ATTR_SLUG]) for addon in all_addons]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _update_addon_stats(self, slug):
|
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
|
||||||
"""Update single addon stats."""
|
"""Update single addon stats."""
|
||||||
try:
|
try:
|
||||||
stats = await self.hassio.get_addon_stats(slug)
|
stats = await self.hassio.get_addon_stats(slug)
|
||||||
|
@ -947,7 +985,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||||
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
|
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
|
||||||
return (slug, None)
|
return (slug, None)
|
||||||
|
|
||||||
async def _update_addon_changelog(self, slug):
|
async def _update_addon_changelog(self, slug: str) -> tuple[str, str | None]:
|
||||||
"""Return the changelog for an add-on."""
|
"""Return the changelog for an add-on."""
|
||||||
try:
|
try:
|
||||||
changelog = await self.hassio.get_addon_changelog(slug)
|
changelog = await self.hassio.get_addon_changelog(slug)
|
||||||
|
@ -956,7 +994,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||||
_LOGGER.warning("Could not fetch changelog for %s: %s", slug, err)
|
_LOGGER.warning("Could not fetch changelog for %s: %s", slug, err)
|
||||||
return (slug, None)
|
return (slug, None)
|
||||||
|
|
||||||
async def _update_addon_info(self, slug):
|
async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]:
|
||||||
"""Return the info for an add-on."""
|
"""Return the info for an add-on."""
|
||||||
try:
|
try:
|
||||||
info = await self.hassio.get_addon_info(slug)
|
info = await self.hassio.get_addon_info(slug)
|
||||||
|
@ -965,6 +1003,22 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||||
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
|
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
|
||||||
return (slug, None)
|
return (slug, None)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_enable_addon_updates(
|
||||||
|
self, slug: str, entity_id: str, types: set[str]
|
||||||
|
) -> CALLBACK_TYPE:
|
||||||
|
"""Enable updates for an add-on."""
|
||||||
|
enabled_updates = self._enabled_updates_by_addon[slug]
|
||||||
|
for key in types:
|
||||||
|
enabled_updates[key].add(entity_id)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _remove():
|
||||||
|
for key in types:
|
||||||
|
enabled_updates[key].remove(entity_id)
|
||||||
|
|
||||||
|
return _remove
|
||||||
|
|
||||||
async def _async_refresh(
|
async def _async_refresh(
|
||||||
self,
|
self,
|
||||||
log_failures: bool = True,
|
log_failures: bool = True,
|
||||||
|
|
|
@ -82,6 +82,21 @@ PLACEHOLDER_KEY_COMPONENTS = "components"
|
||||||
|
|
||||||
ISSUE_KEY_SYSTEM_DOCKER_CONFIG = "issue_system_docker_config"
|
ISSUE_KEY_SYSTEM_DOCKER_CONFIG = "issue_system_docker_config"
|
||||||
|
|
||||||
|
ADDON_UPDATE_STATS = "stats"
|
||||||
|
ADDON_UPDATE_CHANGELOG = "changelog"
|
||||||
|
ADDON_UPDATE_INFO = "info"
|
||||||
|
|
||||||
|
# This is a mapping of which endpoint the key in the addon data
|
||||||
|
# is obtained from so we know which endpoint to update when the
|
||||||
|
# coordinator polls for updates.
|
||||||
|
KEY_TO_UPDATE_TYPES: dict[str, set[str]] = {
|
||||||
|
ATTR_VERSION_LATEST: {ADDON_UPDATE_INFO, ADDON_UPDATE_CHANGELOG},
|
||||||
|
ATTR_MEMORY_PERCENT: {ADDON_UPDATE_STATS},
|
||||||
|
ATTR_CPU_PERCENT: {ADDON_UPDATE_STATS},
|
||||||
|
ATTR_VERSION: {ADDON_UPDATE_INFO},
|
||||||
|
ATTR_STATE: {ADDON_UPDATE_INFO},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class SupervisorEntityModel(StrEnum):
|
class SupervisorEntityModel(StrEnum):
|
||||||
"""Supervisor entity model."""
|
"""Supervisor entity model."""
|
||||||
|
|
|
@ -15,6 +15,7 @@ from .const import (
|
||||||
DATA_KEY_HOST,
|
DATA_KEY_HOST,
|
||||||
DATA_KEY_OS,
|
DATA_KEY_OS,
|
||||||
DATA_KEY_SUPERVISOR,
|
DATA_KEY_SUPERVISOR,
|
||||||
|
KEY_TO_UPDATE_TYPES,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -46,6 +47,16 @@ class HassioAddonEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||||
in self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
|
in self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Subscribe to updates."""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key]
|
||||||
|
self.async_on_remove(
|
||||||
|
self.coordinator.async_enable_addon_updates(
|
||||||
|
self._addon_slug, self.entity_id, update_types
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class HassioOSEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
class HassioOSEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||||
"""Base Entity for Hass.io OS."""
|
"""Base Entity for Hass.io OS."""
|
||||||
|
|
|
@ -1,23 +1,63 @@
|
||||||
"""The tests for the hassio sensors."""
|
"""The tests for the hassio sensors."""
|
||||||
|
from datetime import timedelta
|
||||||
import os
|
import os
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.hassio import DOMAIN
|
from homeassistant.components.hassio import (
|
||||||
|
DOMAIN,
|
||||||
|
HASSIO_UPDATE_INTERVAL,
|
||||||
|
HassioAPIError,
|
||||||
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
|
import homeassistant.util.dt as dt_util
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||||
|
|
||||||
MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"}
|
MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"}
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def mock_all(aioclient_mock, request):
|
def mock_all(aioclient_mock: AiohttpClientMocker, request):
|
||||||
"""Mock all setup requests."""
|
"""Mock all setup requests."""
|
||||||
|
_install_default_mocks(aioclient_mock)
|
||||||
|
_install_test_addon_stats_mock(aioclient_mock)
|
||||||
|
|
||||||
|
|
||||||
|
def _install_test_addon_stats_mock(aioclient_mock: AiohttpClientMocker):
|
||||||
|
"""Install mock to provide valid stats for the test addon."""
|
||||||
|
aioclient_mock.get(
|
||||||
|
"http://127.0.0.1/addons/test/stats",
|
||||||
|
json={
|
||||||
|
"result": "ok",
|
||||||
|
"data": {
|
||||||
|
"cpu_percent": 0.99,
|
||||||
|
"memory_usage": 182611968,
|
||||||
|
"memory_limit": 3977146368,
|
||||||
|
"memory_percent": 4.59,
|
||||||
|
"network_rx": 362570232,
|
||||||
|
"network_tx": 82374138,
|
||||||
|
"blk_read": 46010945536,
|
||||||
|
"blk_write": 15051526144,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _install_test_addon_stats_failure_mock(aioclient_mock: AiohttpClientMocker):
|
||||||
|
"""Install mocks to raise an exception when fetching stats for the test addon."""
|
||||||
|
aioclient_mock.get(
|
||||||
|
"http://127.0.0.1/addons/test/stats",
|
||||||
|
exc=HassioAPIError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _install_default_mocks(aioclient_mock: AiohttpClientMocker):
|
||||||
|
"""Install default mocks."""
|
||||||
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
|
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
|
||||||
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"})
|
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"})
|
||||||
aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"})
|
aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"})
|
||||||
|
@ -79,6 +119,7 @@ def mock_all(aioclient_mock, request):
|
||||||
"version_latest": "2.0.1",
|
"version_latest": "2.0.1",
|
||||||
"repository": "core",
|
"repository": "core",
|
||||||
"url": "https://github.com/home-assistant/addons/test",
|
"url": "https://github.com/home-assistant/addons/test",
|
||||||
|
"icon": False,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "test2",
|
"name": "test2",
|
||||||
|
@ -90,27 +131,12 @@ def mock_all(aioclient_mock, request):
|
||||||
"version_latest": "3.2.0",
|
"version_latest": "3.2.0",
|
||||||
"repository": "core",
|
"repository": "core",
|
||||||
"url": "https://github.com",
|
"url": "https://github.com",
|
||||||
|
"icon": False,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
aioclient_mock.get(
|
|
||||||
"http://127.0.0.1/addons/test/stats",
|
|
||||||
json={
|
|
||||||
"result": "ok",
|
|
||||||
"data": {
|
|
||||||
"cpu_percent": 0.99,
|
|
||||||
"memory_usage": 182611968,
|
|
||||||
"memory_limit": 3977146368,
|
|
||||||
"memory_percent": 4.59,
|
|
||||||
"network_rx": 362570232,
|
|
||||||
"network_tx": 82374138,
|
|
||||||
"blk_read": 46010945536,
|
|
||||||
"blk_write": 15051526144,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
"http://127.0.0.1/core/stats",
|
"http://127.0.0.1/core/stats",
|
||||||
json={
|
json={
|
||||||
|
@ -196,6 +222,7 @@ async def test_sensor(
|
||||||
expected,
|
expected,
|
||||||
aioclient_mock: AiohttpClientMocker,
|
aioclient_mock: AiohttpClientMocker,
|
||||||
entity_registry: er.EntityRegistry,
|
entity_registry: er.EntityRegistry,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test hassio OS and addons sensor."""
|
"""Test hassio OS and addons sensor."""
|
||||||
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||||
|
@ -221,3 +248,75 @@ async def test_sensor(
|
||||||
# Verify that the entity have the expected state.
|
# Verify that the entity have the expected state.
|
||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
assert state.state == expected
|
assert state.state == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("entity_id", "expected"),
|
||||||
|
[
|
||||||
|
("sensor.test_cpu_percent", "0.99"),
|
||||||
|
("sensor.test_memory_percent", "4.59"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_stats_addon_sensor(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entity_id,
|
||||||
|
expected,
|
||||||
|
aioclient_mock: AiohttpClientMocker,
|
||||||
|
entity_registry: er.EntityRegistry,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
) -> None:
|
||||||
|
"""Test stats addons sensor."""
|
||||||
|
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
|
||||||
|
config_entry.add_to_hass(hass)
|
||||||
|
|
||||||
|
with patch.dict(os.environ, MOCK_ENVIRON):
|
||||||
|
result = await async_setup_component(
|
||||||
|
hass,
|
||||||
|
"hassio",
|
||||||
|
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
|
||||||
|
)
|
||||||
|
assert result
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# Verify that the entity is disabled by default.
|
||||||
|
assert hass.states.get(entity_id) is None
|
||||||
|
|
||||||
|
aioclient_mock.clear_requests()
|
||||||
|
_install_default_mocks(aioclient_mock)
|
||||||
|
_install_test_addon_stats_failure_mock(aioclient_mock)
|
||||||
|
|
||||||
|
async_fire_time_changed(
|
||||||
|
hass, dt_util.utcnow() + HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert "Could not fetch stats" not in caplog.text
|
||||||
|
|
||||||
|
aioclient_mock.clear_requests()
|
||||||
|
_install_default_mocks(aioclient_mock)
|
||||||
|
_install_test_addon_stats_mock(aioclient_mock)
|
||||||
|
|
||||||
|
async_fire_time_changed(
|
||||||
|
hass, dt_util.utcnow() + HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# Enable the entity.
|
||||||
|
entity_registry.async_update_entity(entity_id, disabled_by=None)
|
||||||
|
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# Verify that the entity have the expected state.
|
||||||
|
state = hass.states.get(entity_id)
|
||||||
|
assert state.state == expected
|
||||||
|
|
||||||
|
aioclient_mock.clear_requests()
|
||||||
|
_install_default_mocks(aioclient_mock)
|
||||||
|
_install_test_addon_stats_failure_mock(aioclient_mock)
|
||||||
|
|
||||||
|
async_fire_time_changed(
|
||||||
|
hass, dt_util.utcnow() + HASSIO_UPDATE_INTERVAL + timedelta(seconds=1)
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert "Could not fetch stats" in caplog.text
|
||||||
|
|
Loading…
Add table
Reference in a new issue