Type feedreader strictly (#76707)
* Type feedreader strictly * Run hassfest
This commit is contained in:
parent
0f792eb92e
commit
d0986c7650
3 changed files with 46 additions and 21 deletions
|
@ -98,6 +98,7 @@ homeassistant.components.energy.*
|
||||||
homeassistant.components.evil_genius_labs.*
|
homeassistant.components.evil_genius_labs.*
|
||||||
homeassistant.components.fan.*
|
homeassistant.components.fan.*
|
||||||
homeassistant.components.fastdotcom.*
|
homeassistant.components.fastdotcom.*
|
||||||
|
homeassistant.components.feedreader.*
|
||||||
homeassistant.components.file_upload.*
|
homeassistant.components.file_upload.*
|
||||||
homeassistant.components.filesize.*
|
homeassistant.components.filesize.*
|
||||||
homeassistant.components.fitbit.*
|
homeassistant.components.fitbit.*
|
||||||
|
|
|
@ -1,9 +1,13 @@
|
||||||
"""Support for RSS/Atom feeds."""
|
"""Support for RSS/Atom feeds."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from os.path import exists
|
from os.path import exists
|
||||||
import pickle
|
import pickle
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
|
from time import struct_time
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
import feedparser
|
import feedparser
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
@ -44,9 +48,9 @@ CONFIG_SCHEMA = vol.Schema(
|
||||||
|
|
||||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up the Feedreader component."""
|
"""Set up the Feedreader component."""
|
||||||
urls = config[DOMAIN][CONF_URLS]
|
urls: list[str] = config[DOMAIN][CONF_URLS]
|
||||||
scan_interval = config[DOMAIN].get(CONF_SCAN_INTERVAL)
|
scan_interval: timedelta = config[DOMAIN][CONF_SCAN_INTERVAL]
|
||||||
max_entries = config[DOMAIN].get(CONF_MAX_ENTRIES)
|
max_entries: int = config[DOMAIN][CONF_MAX_ENTRIES]
|
||||||
data_file = hass.config.path(f"{DOMAIN}.pickle")
|
data_file = hass.config.path(f"{DOMAIN}.pickle")
|
||||||
storage = StoredData(data_file)
|
storage = StoredData(data_file)
|
||||||
feeds = [
|
feeds = [
|
||||||
|
@ -58,16 +62,23 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
class FeedManager:
|
class FeedManager:
|
||||||
"""Abstraction over Feedparser module."""
|
"""Abstraction over Feedparser module."""
|
||||||
|
|
||||||
def __init__(self, url, scan_interval, max_entries, hass, storage):
|
def __init__(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
scan_interval: timedelta,
|
||||||
|
max_entries: int,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
storage: StoredData,
|
||||||
|
) -> None:
|
||||||
"""Initialize the FeedManager object, poll as per scan interval."""
|
"""Initialize the FeedManager object, poll as per scan interval."""
|
||||||
self._url = url
|
self._url = url
|
||||||
self._scan_interval = scan_interval
|
self._scan_interval = scan_interval
|
||||||
self._max_entries = max_entries
|
self._max_entries = max_entries
|
||||||
self._feed = None
|
self._feed: feedparser.FeedParserDict | None = None
|
||||||
self._hass = hass
|
self._hass = hass
|
||||||
self._firstrun = True
|
self._firstrun = True
|
||||||
self._storage = storage
|
self._storage = storage
|
||||||
self._last_entry_timestamp = None
|
self._last_entry_timestamp: struct_time | None = None
|
||||||
self._last_update_successful = False
|
self._last_update_successful = False
|
||||||
self._has_published_parsed = False
|
self._has_published_parsed = False
|
||||||
self._has_updated_parsed = False
|
self._has_updated_parsed = False
|
||||||
|
@ -76,23 +87,23 @@ class FeedManager:
|
||||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, lambda _: self._update())
|
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, lambda _: self._update())
|
||||||
self._init_regular_updates(hass)
|
self._init_regular_updates(hass)
|
||||||
|
|
||||||
def _log_no_entries(self):
|
def _log_no_entries(self) -> None:
|
||||||
"""Send no entries log at debug level."""
|
"""Send no entries log at debug level."""
|
||||||
_LOGGER.debug("No new entries to be published in feed %s", self._url)
|
_LOGGER.debug("No new entries to be published in feed %s", self._url)
|
||||||
|
|
||||||
def _init_regular_updates(self, hass):
|
def _init_regular_updates(self, hass: HomeAssistant) -> None:
|
||||||
"""Schedule regular updates at the top of the clock."""
|
"""Schedule regular updates at the top of the clock."""
|
||||||
track_time_interval(hass, lambda now: self._update(), self._scan_interval)
|
track_time_interval(hass, lambda now: self._update(), self._scan_interval)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_update_successful(self):
|
def last_update_successful(self) -> bool:
|
||||||
"""Return True if the last feed update was successful."""
|
"""Return True if the last feed update was successful."""
|
||||||
return self._last_update_successful
|
return self._last_update_successful
|
||||||
|
|
||||||
def _update(self):
|
def _update(self) -> None:
|
||||||
"""Update the feed and publish new entries to the event bus."""
|
"""Update the feed and publish new entries to the event bus."""
|
||||||
_LOGGER.info("Fetching new data from feed %s", self._url)
|
_LOGGER.info("Fetching new data from feed %s", self._url)
|
||||||
self._feed = feedparser.parse(
|
self._feed: feedparser.FeedParserDict = feedparser.parse( # type: ignore[no-redef]
|
||||||
self._url,
|
self._url,
|
||||||
etag=None if not self._feed else self._feed.get("etag"),
|
etag=None if not self._feed else self._feed.get("etag"),
|
||||||
modified=None if not self._feed else self._feed.get("modified"),
|
modified=None if not self._feed else self._feed.get("modified"),
|
||||||
|
@ -125,15 +136,16 @@ class FeedManager:
|
||||||
self._publish_new_entries()
|
self._publish_new_entries()
|
||||||
if self._has_published_parsed or self._has_updated_parsed:
|
if self._has_published_parsed or self._has_updated_parsed:
|
||||||
self._storage.put_timestamp(
|
self._storage.put_timestamp(
|
||||||
self._feed_id, self._last_entry_timestamp
|
self._feed_id, cast(struct_time, self._last_entry_timestamp)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self._log_no_entries()
|
self._log_no_entries()
|
||||||
self._last_update_successful = True
|
self._last_update_successful = True
|
||||||
_LOGGER.info("Fetch from feed %s completed", self._url)
|
_LOGGER.info("Fetch from feed %s completed", self._url)
|
||||||
|
|
||||||
def _filter_entries(self):
|
def _filter_entries(self) -> None:
|
||||||
"""Filter the entries provided and return the ones to keep."""
|
"""Filter the entries provided and return the ones to keep."""
|
||||||
|
assert self._feed is not None
|
||||||
if len(self._feed.entries) > self._max_entries:
|
if len(self._feed.entries) > self._max_entries:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Processing only the first %s entries in feed %s",
|
"Processing only the first %s entries in feed %s",
|
||||||
|
@ -142,7 +154,7 @@ class FeedManager:
|
||||||
)
|
)
|
||||||
self._feed.entries = self._feed.entries[0 : self._max_entries]
|
self._feed.entries = self._feed.entries[0 : self._max_entries]
|
||||||
|
|
||||||
def _update_and_fire_entry(self, entry):
|
def _update_and_fire_entry(self, entry: feedparser.FeedParserDict) -> None:
|
||||||
"""Update last_entry_timestamp and fire entry."""
|
"""Update last_entry_timestamp and fire entry."""
|
||||||
# Check if the entry has a published or updated date.
|
# Check if the entry has a published or updated date.
|
||||||
if "published_parsed" in entry and entry.published_parsed:
|
if "published_parsed" in entry and entry.published_parsed:
|
||||||
|
@ -169,8 +181,9 @@ class FeedManager:
|
||||||
entry.update({"feed_url": self._url})
|
entry.update({"feed_url": self._url})
|
||||||
self._hass.bus.fire(self._event_type, entry)
|
self._hass.bus.fire(self._event_type, entry)
|
||||||
|
|
||||||
def _publish_new_entries(self):
|
def _publish_new_entries(self) -> None:
|
||||||
"""Publish new entries to the event bus."""
|
"""Publish new entries to the event bus."""
|
||||||
|
assert self._feed is not None
|
||||||
new_entries = False
|
new_entries = False
|
||||||
self._last_entry_timestamp = self._storage.get_timestamp(self._feed_id)
|
self._last_entry_timestamp = self._storage.get_timestamp(self._feed_id)
|
||||||
if self._last_entry_timestamp:
|
if self._last_entry_timestamp:
|
||||||
|
@ -202,15 +215,15 @@ class FeedManager:
|
||||||
class StoredData:
|
class StoredData:
|
||||||
"""Abstraction over pickle data storage."""
|
"""Abstraction over pickle data storage."""
|
||||||
|
|
||||||
def __init__(self, data_file):
|
def __init__(self, data_file: str) -> None:
|
||||||
"""Initialize pickle data storage."""
|
"""Initialize pickle data storage."""
|
||||||
self._data_file = data_file
|
self._data_file = data_file
|
||||||
self._lock = Lock()
|
self._lock = Lock()
|
||||||
self._cache_outdated = True
|
self._cache_outdated = True
|
||||||
self._data = {}
|
self._data: dict[str, struct_time] = {}
|
||||||
self._fetch_data()
|
self._fetch_data()
|
||||||
|
|
||||||
def _fetch_data(self):
|
def _fetch_data(self) -> None:
|
||||||
"""Fetch data stored into pickle file."""
|
"""Fetch data stored into pickle file."""
|
||||||
if self._cache_outdated and exists(self._data_file):
|
if self._cache_outdated and exists(self._data_file):
|
||||||
try:
|
try:
|
||||||
|
@ -223,20 +236,21 @@ class StoredData:
|
||||||
"Error loading data from pickled file %s", self._data_file
|
"Error loading data from pickled file %s", self._data_file
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_timestamp(self, feed_id):
|
def get_timestamp(self, feed_id: str) -> struct_time | None:
|
||||||
"""Return stored timestamp for given feed id (usually the url)."""
|
"""Return stored timestamp for given feed id (usually the url)."""
|
||||||
self._fetch_data()
|
self._fetch_data()
|
||||||
return self._data.get(feed_id)
|
return self._data.get(feed_id)
|
||||||
|
|
||||||
def put_timestamp(self, feed_id, timestamp):
|
def put_timestamp(self, feed_id: str, timestamp: struct_time) -> None:
|
||||||
"""Update timestamp for given feed id (usually the url)."""
|
"""Update timestamp for given feed id (usually the url)."""
|
||||||
self._fetch_data()
|
self._fetch_data()
|
||||||
with self._lock, open(self._data_file, "wb") as myfile:
|
with self._lock, open(self._data_file, "wb") as myfile:
|
||||||
self._data.update({feed_id: timestamp})
|
self._data.update({feed_id: timestamp})
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Overwriting feed %s timestamp in storage file %s",
|
"Overwriting feed %s timestamp in storage file %s: %s",
|
||||||
feed_id,
|
feed_id,
|
||||||
self._data_file,
|
self._data_file,
|
||||||
|
timestamp,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
pickle.dump(self._data, myfile)
|
pickle.dump(self._data, myfile)
|
||||||
|
|
10
mypy.ini
10
mypy.ini
|
@ -739,6 +739,16 @@ disallow_untyped_defs = true
|
||||||
warn_return_any = true
|
warn_return_any = true
|
||||||
warn_unreachable = true
|
warn_unreachable = true
|
||||||
|
|
||||||
|
[mypy-homeassistant.components.feedreader.*]
|
||||||
|
check_untyped_defs = true
|
||||||
|
disallow_incomplete_defs = true
|
||||||
|
disallow_subclassing_any = true
|
||||||
|
disallow_untyped_calls = true
|
||||||
|
disallow_untyped_decorators = true
|
||||||
|
disallow_untyped_defs = true
|
||||||
|
warn_return_any = true
|
||||||
|
warn_unreachable = true
|
||||||
|
|
||||||
[mypy-homeassistant.components.file_upload.*]
|
[mypy-homeassistant.components.file_upload.*]
|
||||||
check_untyped_defs = true
|
check_untyped_defs = true
|
||||||
disallow_incomplete_defs = true
|
disallow_incomplete_defs = true
|
||||||
|
|
Loading…
Add table
Reference in a new issue