Convert Feedreader to use an update coordinator (#118007)

This commit is contained in:
Michael 2024-05-27 14:48:41 +02:00 committed by GitHub
parent 97f6b578c8
commit a24d97d79d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 251 additions and 387 deletions

View file

@ -2,37 +2,25 @@
from __future__ import annotations
from calendar import timegm
from datetime import datetime, timedelta
from logging import getLogger
import os
import pickle
from time import gmtime, struct_time
import asyncio
from datetime import timedelta
import feedparser
import voluptuous as vol
from homeassistant.const import CONF_SCAN_INTERVAL, EVENT_HOMEASSISTANT_START
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.const import CONF_SCAN_INTERVAL
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.storage import Store
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util
_LOGGER = getLogger(__name__)
from .const import DOMAIN
from .coordinator import FeedReaderCoordinator, StoredData
CONF_URLS = "urls"
CONF_MAX_ENTRIES = "max_entries"
DEFAULT_MAX_ENTRIES = 20
DEFAULT_SCAN_INTERVAL = timedelta(hours=1)
DELAY_SAVE = 30
DOMAIN = "feedreader"
EVENT_FEEDREADER = "feedreader"
STORAGE_VERSION = 1
CONFIG_SCHEMA = vol.Schema(
{
@ -58,240 +46,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
scan_interval: timedelta = config[DOMAIN][CONF_SCAN_INTERVAL]
max_entries: int = config[DOMAIN][CONF_MAX_ENTRIES]
old_data_file = hass.config.path(f"{DOMAIN}.pickle")
storage = StoredData(hass, old_data_file)
storage = StoredData(hass)
await storage.async_setup()
feeds = [
FeedManager(hass, url, scan_interval, max_entries, storage) for url in urls
FeedReaderCoordinator(hass, url, scan_interval, max_entries, storage)
for url in urls
]
for feed in feeds:
feed.async_setup()
await asyncio.gather(*[feed.async_refresh() for feed in feeds])
# workaround because coordinators without listeners won't update
# can be removed when we have entities to update
[feed.async_add_listener(lambda: None) for feed in feeds]
return True
class FeedManager:
"""Abstraction over Feedparser module."""
def __init__(
self,
hass: HomeAssistant,
url: str,
scan_interval: timedelta,
max_entries: int,
storage: StoredData,
) -> None:
"""Initialize the FeedManager object, poll as per scan interval."""
self._hass = hass
self._url = url
self._scan_interval = scan_interval
self._max_entries = max_entries
self._feed: feedparser.FeedParserDict | None = None
self._firstrun = True
self._storage = storage
self._last_entry_timestamp: struct_time | None = None
self._has_published_parsed = False
self._has_updated_parsed = False
self._event_type = EVENT_FEEDREADER
self._feed_id = url
@callback
def async_setup(self) -> None:
"""Set up the feed manager."""
self._hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, self._async_update)
async_track_time_interval(
self._hass, self._async_update, self._scan_interval, cancel_on_shutdown=True
)
def _log_no_entries(self) -> None:
"""Send no entries log at debug level."""
_LOGGER.debug("No new entries to be published in feed %s", self._url)
async def _async_update(self, _: datetime | Event) -> None:
"""Update the feed and publish new entries to the event bus."""
last_entry_timestamp = await self._hass.async_add_executor_job(self._update)
if last_entry_timestamp:
self._storage.async_put_timestamp(self._feed_id, last_entry_timestamp)
def _update(self) -> struct_time | None:
"""Update the feed and publish new entries to the event bus."""
_LOGGER.debug("Fetching new data from feed %s", self._url)
self._feed = feedparser.parse(
self._url,
etag=None if not self._feed else self._feed.get("etag"),
modified=None if not self._feed else self._feed.get("modified"),
)
if not self._feed:
_LOGGER.error("Error fetching feed data from %s", self._url)
return None
# The 'bozo' flag really only indicates that there was an issue
# during the initial parsing of the XML, but it doesn't indicate
# whether this is an unrecoverable error. In this case the
# feedparser lib is trying a less strict parsing approach.
# If an error is detected here, log warning message but continue
# processing the feed entries if present.
if self._feed.bozo != 0:
_LOGGER.warning(
"Possible issue parsing feed %s: %s",
self._url,
self._feed.bozo_exception,
)
# Using etag and modified, if there's no new data available,
# the entries list will be empty
_LOGGER.debug(
"%s entri(es) available in feed %s",
len(self._feed.entries),
self._url,
)
if not self._feed.entries:
self._log_no_entries()
return None
self._filter_entries()
self._publish_new_entries()
_LOGGER.debug("Fetch from feed %s completed", self._url)
if (
self._has_published_parsed or self._has_updated_parsed
) and self._last_entry_timestamp:
return self._last_entry_timestamp
return None
def _filter_entries(self) -> None:
"""Filter the entries provided and return the ones to keep."""
assert self._feed is not None
if len(self._feed.entries) > self._max_entries:
_LOGGER.debug(
"Processing only the first %s entries in feed %s",
self._max_entries,
self._url,
)
self._feed.entries = self._feed.entries[0 : self._max_entries]
def _update_and_fire_entry(self, entry: feedparser.FeedParserDict) -> None:
"""Update last_entry_timestamp and fire entry."""
# Check if the entry has a updated or published date.
# Start from a updated date because generally `updated` > `published`.
if "updated_parsed" in entry and entry.updated_parsed:
# We are lucky, `updated_parsed` data available, let's make use of
# it to publish only new available entries since the last run
self._has_updated_parsed = True
self._last_entry_timestamp = max(
entry.updated_parsed, self._last_entry_timestamp
)
elif "published_parsed" in entry and entry.published_parsed:
# We are lucky, `published_parsed` data available, let's make use of
# it to publish only new available entries since the last run
self._has_published_parsed = True
self._last_entry_timestamp = max(
entry.published_parsed, self._last_entry_timestamp
)
else:
self._has_updated_parsed = False
self._has_published_parsed = False
_LOGGER.debug(
"No updated_parsed or published_parsed info available for entry %s",
entry,
)
entry.update({"feed_url": self._url})
self._hass.bus.fire(self._event_type, entry)
_LOGGER.debug("New event fired for entry %s", entry.get("link"))
def _publish_new_entries(self) -> None:
"""Publish new entries to the event bus."""
assert self._feed is not None
new_entry_count = 0
self._last_entry_timestamp = self._storage.get_timestamp(self._feed_id)
if self._last_entry_timestamp:
self._firstrun = False
else:
# Set last entry timestamp as epoch time if not available
self._last_entry_timestamp = dt_util.utc_from_timestamp(0).timetuple()
# locally cache self._last_entry_timestamp so that entries published at identical times can be processed
last_entry_timestamp = self._last_entry_timestamp
for entry in self._feed.entries:
if (
self._firstrun
or (
"published_parsed" in entry
and entry.published_parsed > last_entry_timestamp
)
or (
"updated_parsed" in entry
and entry.updated_parsed > last_entry_timestamp
)
):
self._update_and_fire_entry(entry)
new_entry_count += 1
else:
_LOGGER.debug("Already processed entry %s", entry.get("link"))
if new_entry_count == 0:
self._log_no_entries()
else:
_LOGGER.debug("%d entries published in feed %s", new_entry_count, self._url)
self._firstrun = False
class StoredData:
"""Represent a data storage."""
def __init__(self, hass: HomeAssistant, legacy_data_file: str) -> None:
"""Initialize data storage."""
self._legacy_data_file = legacy_data_file
self._data: dict[str, struct_time] = {}
self._hass = hass
self._store: Store[dict[str, str]] = Store(hass, STORAGE_VERSION, DOMAIN)
async def async_setup(self) -> None:
"""Set up storage."""
if not os.path.exists(self._store.path):
# Remove the legacy store loading after deprecation period.
data = await self._hass.async_add_executor_job(self._legacy_fetch_data)
else:
if (store_data := await self._store.async_load()) is None:
return
# Make sure that dst is set to 0, by using gmtime() on the timestamp.
data = {
feed_id: gmtime(datetime.fromisoformat(timestamp_string).timestamp())
for feed_id, timestamp_string in store_data.items()
}
self._data = data
def _legacy_fetch_data(self) -> dict[str, struct_time]:
"""Fetch data stored in pickle file."""
_LOGGER.debug("Fetching data from legacy file %s", self._legacy_data_file)
try:
with open(self._legacy_data_file, "rb") as myfile:
return pickle.load(myfile) or {}
except FileNotFoundError:
pass
except (OSError, pickle.PickleError) as err:
_LOGGER.error(
"Error loading data from pickled file %s: %s",
self._legacy_data_file,
err,
)
return {}
def get_timestamp(self, feed_id: str) -> struct_time | None:
"""Return stored timestamp for given feed id."""
return self._data.get(feed_id)
@callback
def async_put_timestamp(self, feed_id: str, timestamp: struct_time) -> None:
"""Update timestamp for given feed id."""
self._data[feed_id] = timestamp
self._store.async_delay_save(self._async_save_data, DELAY_SAVE)
@callback
def _async_save_data(self) -> dict[str, str]:
"""Save feed data to storage."""
return {
feed_id: dt_util.utc_from_timestamp(timegm(struct_utc)).isoformat()
for feed_id, struct_utc in self._data.items()
}

View file

@ -0,0 +1,3 @@
"""Constants for RSS/Atom feeds."""
DOMAIN = "feedreader"

View file

@ -0,0 +1,199 @@
"""Data update coordinator for RSS/Atom feeds."""
from __future__ import annotations
from calendar import timegm
from datetime import datetime, timedelta
from logging import getLogger
from time import gmtime, struct_time
import feedparser
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.storage import Store
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.util import dt as dt_util
from .const import DOMAIN
DELAY_SAVE = 30
EVENT_FEEDREADER = "feedreader"
STORAGE_VERSION = 1
_LOGGER = getLogger(__name__)
class FeedReaderCoordinator(DataUpdateCoordinator[None]):
"""Abstraction over Feedparser module."""
def __init__(
self,
hass: HomeAssistant,
url: str,
scan_interval: timedelta,
max_entries: int,
storage: StoredData,
) -> None:
"""Initialize the FeedManager object, poll as per scan interval."""
super().__init__(
hass=hass,
logger=_LOGGER,
name=f"{DOMAIN} {url}",
update_interval=scan_interval,
)
self._url = url
self._max_entries = max_entries
self._feed: feedparser.FeedParserDict | None = None
self._storage = storage
self._last_entry_timestamp: struct_time | None = None
self._event_type = EVENT_FEEDREADER
self._feed_id = url
@callback
def _log_no_entries(self) -> None:
"""Send no entries log at debug level."""
_LOGGER.debug("No new entries to be published in feed %s", self._url)
def _fetch_feed(self) -> feedparser.FeedParserDict:
"""Fetch the feed data."""
return feedparser.parse(
self._url,
etag=None if not self._feed else self._feed.get("etag"),
modified=None if not self._feed else self._feed.get("modified"),
)
async def _async_update_data(self) -> None:
"""Update the feed and publish new entries to the event bus."""
_LOGGER.debug("Fetching new data from feed %s", self._url)
self._feed = await self.hass.async_add_executor_job(self._fetch_feed)
if not self._feed:
_LOGGER.error("Error fetching feed data from %s", self._url)
return None
# The 'bozo' flag really only indicates that there was an issue
# during the initial parsing of the XML, but it doesn't indicate
# whether this is an unrecoverable error. In this case the
# feedparser lib is trying a less strict parsing approach.
# If an error is detected here, log warning message but continue
# processing the feed entries if present.
if self._feed.bozo != 0:
_LOGGER.warning(
"Possible issue parsing feed %s: %s",
self._url,
self._feed.bozo_exception,
)
# Using etag and modified, if there's no new data available,
# the entries list will be empty
_LOGGER.debug(
"%s entri(es) available in feed %s",
len(self._feed.entries),
self._url,
)
if not self._feed.entries:
self._log_no_entries()
return None
self._filter_entries()
self._publish_new_entries()
_LOGGER.debug("Fetch from feed %s completed", self._url)
if self._last_entry_timestamp:
self._storage.async_put_timestamp(self._feed_id, self._last_entry_timestamp)
@callback
def _filter_entries(self) -> None:
"""Filter the entries provided and return the ones to keep."""
assert self._feed is not None
if len(self._feed.entries) > self._max_entries:
_LOGGER.debug(
"Processing only the first %s entries in feed %s",
self._max_entries,
self._url,
)
self._feed.entries = self._feed.entries[0 : self._max_entries]
@callback
def _update_and_fire_entry(self, entry: feedparser.FeedParserDict) -> None:
"""Update last_entry_timestamp and fire entry."""
# Check if the entry has a updated or published date.
# Start from a updated date because generally `updated` > `published`.
if time_stamp := entry.get("updated_parsed") or entry.get("published_parsed"):
self._last_entry_timestamp = time_stamp
else:
_LOGGER.debug(
"No updated_parsed or published_parsed info available for entry %s",
entry,
)
entry["feed_url"] = self._url
self.hass.bus.async_fire(self._event_type, entry)
_LOGGER.debug("New event fired for entry %s", entry.get("link"))
@callback
def _publish_new_entries(self) -> None:
"""Publish new entries to the event bus."""
assert self._feed is not None
new_entry_count = 0
firstrun = False
self._last_entry_timestamp = self._storage.get_timestamp(self._feed_id)
if not self._last_entry_timestamp:
firstrun = True
# Set last entry timestamp as epoch time if not available
self._last_entry_timestamp = dt_util.utc_from_timestamp(0).timetuple()
# locally cache self._last_entry_timestamp so that entries published at identical times can be processed
last_entry_timestamp = self._last_entry_timestamp
for entry in self._feed.entries:
if firstrun or (
(
time_stamp := entry.get("updated_parsed")
or entry.get("published_parsed")
)
and time_stamp > last_entry_timestamp
):
self._update_and_fire_entry(entry)
new_entry_count += 1
else:
_LOGGER.debug("Already processed entry %s", entry.get("link"))
if new_entry_count == 0:
self._log_no_entries()
else:
_LOGGER.debug("%d entries published in feed %s", new_entry_count, self._url)
class StoredData:
"""Represent a data storage."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize data storage."""
self._data: dict[str, struct_time] = {}
self.hass = hass
self._store: Store[dict[str, str]] = Store(hass, STORAGE_VERSION, DOMAIN)
async def async_setup(self) -> None:
"""Set up storage."""
if (store_data := await self._store.async_load()) is None:
return
# Make sure that dst is set to 0, by using gmtime() on the timestamp.
self._data = {
feed_id: gmtime(datetime.fromisoformat(timestamp_string).timestamp())
for feed_id, timestamp_string in store_data.items()
}
def get_timestamp(self, feed_id: str) -> struct_time | None:
"""Return stored timestamp for given feed id."""
return self._data.get(feed_id)
@callback
def async_put_timestamp(self, feed_id: str, timestamp: struct_time) -> None:
"""Update timestamp for given feed id."""
self._data[feed_id] = timestamp
self._store.async_delay_save(self._async_save_data, DELAY_SAVE)
@callback
def _async_save_data(self) -> dict[str, str]:
"""Save feed data to storage."""
return {
feed_id: dt_util.utc_from_timestamp(timegm(struct_utc)).isoformat()
for feed_id, struct_utc in self._data.items()
}

View file

@ -1,23 +1,15 @@
"""The tests for the feedreader component."""
from collections.abc import Generator
from datetime import datetime, timedelta
import pickle
from time import gmtime
from typing import Any
from unittest import mock
from unittest.mock import MagicMock, mock_open, patch
from unittest.mock import patch
import pytest
from homeassistant.components import feedreader
from homeassistant.components.feedreader import (
CONF_MAX_ENTRIES,
CONF_URLS,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
EVENT_FEEDREADER,
)
from homeassistant.components.feedreader import CONF_MAX_ENTRIES, CONF_URLS
from homeassistant.components.feedreader.const import DOMAIN
from homeassistant.components.feedreader.coordinator import EVENT_FEEDREADER
from homeassistant.const import CONF_SCAN_INTERVAL, EVENT_HOMEASSISTANT_START
from homeassistant.core import Event, HomeAssistant
from homeassistant.setup import async_setup_component
@ -26,11 +18,11 @@ import homeassistant.util.dt as dt_util
from tests.common import async_capture_events, async_fire_time_changed, load_fixture
URL = "http://some.rss.local/rss_feed.xml"
VALID_CONFIG_1 = {feedreader.DOMAIN: {CONF_URLS: [URL]}}
VALID_CONFIG_2 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_SCAN_INTERVAL: 60}}
VALID_CONFIG_3 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 100}}
VALID_CONFIG_4 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 5}}
VALID_CONFIG_5 = {feedreader.DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 1}}
VALID_CONFIG_1 = {DOMAIN: {CONF_URLS: [URL]}}
VALID_CONFIG_2 = {DOMAIN: {CONF_URLS: [URL], CONF_SCAN_INTERVAL: 60}}
VALID_CONFIG_3 = {DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 100}}
VALID_CONFIG_4 = {DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 5}}
VALID_CONFIG_5 = {DOMAIN: {CONF_URLS: [URL], CONF_MAX_ENTRIES: 1}}
def load_fixture_bytes(src: str) -> bytes:
@ -81,105 +73,36 @@ async def fixture_events(hass: HomeAssistant) -> list[Event]:
return async_capture_events(hass, EVENT_FEEDREADER)
@pytest.fixture(name="storage")
def fixture_storage(request: pytest.FixtureRequest) -> Generator[None, None, None]:
"""Set up the test storage environment."""
if request.param == "legacy_storage":
with patch("os.path.exists", return_value=False):
yield
elif request.param == "json_storage":
with patch("os.path.exists", return_value=True):
yield
else:
raise RuntimeError("Invalid storage fixture")
@pytest.fixture(name="legacy_storage_open")
def fixture_legacy_storage_open() -> Generator[MagicMock, None, None]:
"""Mock builtins.open for feedreader storage."""
with patch(
"homeassistant.components.feedreader.open",
mock_open(),
create=True,
) as open_mock:
yield open_mock
@pytest.fixture(name="legacy_storage_load", autouse=True)
def fixture_legacy_storage_load(
legacy_storage_open,
) -> Generator[MagicMock, None, None]:
"""Mock builtins.open for feedreader storage."""
with patch(
"homeassistant.components.feedreader.pickle.load", return_value={}
) as pickle_load:
yield pickle_load
async def test_setup_one_feed(hass: HomeAssistant) -> None:
"""Test the general setup of this component."""
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_1)
async def test_setup_no_feeds(hass: HomeAssistant) -> None:
"""Test config with no urls."""
assert not await async_setup_component(
hass, feedreader.DOMAIN, {feedreader.DOMAIN: {CONF_URLS: []}}
)
assert not await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_URLS: []}})
@pytest.mark.parametrize(
("open_error", "load_error"),
[
(FileNotFoundError("No file"), None),
(OSError("Boom"), None),
(None, pickle.PickleError("Bad data")),
],
)
async def test_legacy_storage_error(
hass: HomeAssistant,
legacy_storage_open: MagicMock,
legacy_storage_load: MagicMock,
open_error: Exception | None,
load_error: Exception | None,
) -> None:
"""Test legacy storage error."""
legacy_storage_open.side_effect = open_error
legacy_storage_load.side_effect = load_error
with patch(
"homeassistant.components.feedreader.async_track_time_interval"
) as track_method:
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_1)
await hass.async_block_till_done()
track_method.assert_called_once_with(
hass, mock.ANY, DEFAULT_SCAN_INTERVAL, cancel_on_shutdown=True
)
@pytest.mark.parametrize("storage", ["legacy_storage", "json_storage"], indirect=True)
async def test_storage_data_loading(
hass: HomeAssistant,
events: list[Event],
feed_one_event: bytes,
legacy_storage_load: MagicMock,
hass_storage: dict[str, Any],
storage: None,
) -> None:
"""Test loading existing storage data."""
storage_data: dict[str, str] = {URL: "2018-04-30T05:10:00+00:00"}
hass_storage[feedreader.DOMAIN] = {
hass_storage[DOMAIN] = {
"version": 1,
"minor_version": 1,
"key": feedreader.DOMAIN,
"key": DOMAIN,
"data": storage_data,
}
legacy_storage_data = {
URL: gmtime(datetime.fromisoformat(storage_data[URL]).timestamp())
}
legacy_storage_load.return_value = legacy_storage_data
with patch(
"feedparser.http.get",
return_value=feed_one_event,
):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_2)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
@ -202,9 +125,9 @@ async def test_storage_data_writing(
"feedparser.http.get",
return_value=feed_one_event,
),
patch("homeassistant.components.feedreader.DELAY_SAVE", new=0),
patch("homeassistant.components.feedreader.coordinator.DELAY_SAVE", new=0),
):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_2)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
@ -213,39 +136,12 @@ async def test_storage_data_writing(
assert len(events) == 1
# storage data updated
assert hass_storage[feedreader.DOMAIN]["data"] == storage_data
@pytest.mark.parametrize("storage", ["legacy_storage", "json_storage"], indirect=True)
async def test_setup_one_feed(hass: HomeAssistant, storage: None) -> None:
"""Test the general setup of this component."""
with patch(
"homeassistant.components.feedreader.async_track_time_interval"
) as track_method:
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_1)
await hass.async_block_till_done()
track_method.assert_called_once_with(
hass, mock.ANY, DEFAULT_SCAN_INTERVAL, cancel_on_shutdown=True
)
async def test_setup_scan_interval(hass: HomeAssistant) -> None:
"""Test the setup of this component with scan interval."""
with patch(
"homeassistant.components.feedreader.async_track_time_interval"
) as track_method:
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
await hass.async_block_till_done()
track_method.assert_called_once_with(
hass, mock.ANY, timedelta(seconds=60), cancel_on_shutdown=True
)
assert hass_storage[DOMAIN]["data"] == storage_data
async def test_setup_max_entries(hass: HomeAssistant) -> None:
"""Test the setup of this component with max entries."""
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_3)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_3)
await hass.async_block_till_done()
@ -255,7 +151,7 @@ async def test_feed(hass: HomeAssistant, events, feed_one_event) -> None:
"feedparser.http.get",
return_value=feed_one_event,
):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_2)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
@ -278,7 +174,7 @@ async def test_atom_feed(hass: HomeAssistant, events, feed_atom_event) -> None:
"feedparser.http.get",
return_value=feed_atom_event,
):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_5)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_5)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
@ -305,13 +201,13 @@ async def test_feed_identical_timestamps(
return_value=feed_identically_timed_events,
),
patch(
"homeassistant.components.feedreader.StoredData.get_timestamp",
"homeassistant.components.feedreader.coordinator.StoredData.get_timestamp",
return_value=gmtime(
datetime.fromisoformat("1970-01-01T00:00:00.0+0000").timestamp()
),
),
):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_2)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
@ -365,10 +261,11 @@ async def test_feed_updates(
feed_two_event,
]
with patch("feedparser.http.get", side_effect=side_effect):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
with patch(
"homeassistant.components.feedreader.coordinator.feedparser.http.get",
side_effect=side_effect,
):
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_2)
await hass.async_block_till_done()
assert len(events) == 1
@ -393,7 +290,7 @@ async def test_feed_default_max_length(
) -> None:
"""Test long feed beyond the default 20 entry limit."""
with patch("feedparser.http.get", return_value=feed_21_events):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_2)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
@ -404,7 +301,7 @@ async def test_feed_default_max_length(
async def test_feed_max_length(hass: HomeAssistant, events, feed_21_events) -> None:
"""Test long feed beyond a configured 5 entry limit."""
with patch("feedparser.http.get", return_value=feed_21_events):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_4)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_4)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
@ -417,7 +314,7 @@ async def test_feed_without_publication_date_and_title(
) -> None:
"""Test simple feed with entry without publication date and title."""
with patch("feedparser.http.get", return_value=feed_three_events):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_2)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
@ -432,7 +329,7 @@ async def test_feed_with_unrecognized_publication_date(
with patch(
"feedparser.http.get", return_value=load_fixture_bytes("feedreader4.xml")
):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_2)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
@ -444,7 +341,7 @@ async def test_feed_invalid_data(hass: HomeAssistant, events) -> None:
"""Test feed with invalid data."""
invalid_data = bytes("INVALID DATA", "utf-8")
with patch("feedparser.http.get", return_value=invalid_data):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_2)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
@ -459,7 +356,7 @@ async def test_feed_parsing_failed(
assert "Error fetching feed data" not in caplog.text
with patch("feedparser.parse", return_value=None):
assert await async_setup_component(hass, feedreader.DOMAIN, VALID_CONFIG_2)
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG_2)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()