Add config flow to geo_json_events (#87062)
* migrated geo_json_events integration to config flow * improve test coverage * code reformatting * fix tests * fix entity manager * changes after review * improve test coverage and fixed form * remove unused code * remove commented out code * changes after review * make title prettier * fixed tests * simplified code * changes after review * fix test * push deprecation out * changes after review * changes after review * changes after review * changes after review * changes after review * removed scan interval from user flow and import flow
This commit is contained in:
parent
0c0d59d3e2
commit
649557ed2e
14 changed files with 632 additions and 265 deletions
|
@ -1 +1,57 @@
|
|||
"""The geo_json_events component."""
|
||||
"""The GeoJSON events component."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_registry import (
|
||||
async_entries_for_config_entry,
|
||||
async_get,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .manager import GeoJsonFeedEntityManager
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Set up the GeoJSON events component as config entry."""
|
||||
feeds = hass.data.setdefault(DOMAIN, {})
|
||||
# Create feed entity manager for all platforms.
|
||||
manager = GeoJsonFeedEntityManager(hass, config_entry)
|
||||
feeds[config_entry.entry_id] = manager
|
||||
_LOGGER.debug("Feed entity manager added for %s", config_entry.entry_id)
|
||||
await remove_orphaned_entities(hass, config_entry.entry_id)
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
await manager.async_init()
|
||||
return True
|
||||
|
||||
|
||||
async def remove_orphaned_entities(hass: HomeAssistant, entry_id: str) -> None:
|
||||
"""Remove orphaned geo_location entities.
|
||||
|
||||
This is needed because when fetching data from the external feed this integration is
|
||||
determining which entities need to be added, updated or removed by comparing the
|
||||
current with the previous data. After a restart of Home Assistant the integration
|
||||
has no previous data to compare against, and thus all entities managed by this
|
||||
integration are removed after startup.
|
||||
"""
|
||||
entity_registry = async_get(hass)
|
||||
orphaned_entries = async_entries_for_config_entry(entity_registry, entry_id)
|
||||
if orphaned_entries is not None:
|
||||
for entry in orphaned_entries:
|
||||
if entry.domain == Platform.GEO_LOCATION:
|
||||
_LOGGER.debug("Removing orphaned entry %s", entry.entity_id)
|
||||
entity_registry.async_remove(entry.entity_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload the GeoJSON events config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
manager: GeoJsonFeedEntityManager = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
await manager.async_stop()
|
||||
return unload_ok
|
||||
|
|
105
homeassistant/components/geo_json_events/config_flow.py
Normal file
105
homeassistant/components/geo_json_events/config_flow.py
Normal file
|
@ -0,0 +1,105 @@
|
|||
"""Config flow to configure the GeoJSON events integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import (
|
||||
CONF_LATITUDE,
|
||||
CONF_LOCATION,
|
||||
CONF_LONGITUDE,
|
||||
CONF_RADIUS,
|
||||
CONF_URL,
|
||||
UnitOfLength,
|
||||
)
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers import config_validation as cv, selector
|
||||
from homeassistant.util.unit_conversion import DistanceConverter
|
||||
|
||||
from .const import DEFAULT_RADIUS_IN_KM, DEFAULT_RADIUS_IN_M, DOMAIN
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_URL): cv.string,
|
||||
vol.Required(CONF_LOCATION): selector.LocationSelector(
|
||||
selector.LocationSelectorConfig(radius=True, icon="")
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GeoJsonEventsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a GeoJSON events config flow."""
|
||||
|
||||
async def async_step_import(self, import_config: dict[str, Any]) -> FlowResult:
|
||||
"""Import a config entry from configuration.yaml."""
|
||||
url: str = import_config[CONF_URL]
|
||||
latitude: float = import_config.get(CONF_LATITUDE, self.hass.config.latitude)
|
||||
longitude: float = import_config.get(CONF_LONGITUDE, self.hass.config.longitude)
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_URL: url,
|
||||
CONF_LATITUDE: latitude,
|
||||
CONF_LONGITUDE: longitude,
|
||||
}
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=f"{url} ({latitude}, {longitude})",
|
||||
data={
|
||||
CONF_URL: url,
|
||||
CONF_LATITUDE: latitude,
|
||||
CONF_LONGITUDE: longitude,
|
||||
CONF_RADIUS: import_config.get(CONF_RADIUS, DEFAULT_RADIUS_IN_KM),
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the start of the config flow."""
|
||||
if not user_input:
|
||||
suggested_values: Mapping[str, Any] = {
|
||||
CONF_LOCATION: {
|
||||
CONF_LATITUDE: self.hass.config.latitude,
|
||||
CONF_LONGITUDE: self.hass.config.longitude,
|
||||
CONF_RADIUS: DEFAULT_RADIUS_IN_M,
|
||||
}
|
||||
}
|
||||
data_schema = self.add_suggested_values_to_schema(
|
||||
DATA_SCHEMA, suggested_values
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=data_schema,
|
||||
)
|
||||
|
||||
url: str = user_input[CONF_URL]
|
||||
location: dict[str, Any] = user_input[CONF_LOCATION]
|
||||
latitude: float = location[CONF_LATITUDE]
|
||||
longitude: float = location[CONF_LONGITUDE]
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_URL: url,
|
||||
CONF_LATITUDE: latitude,
|
||||
CONF_LONGITUDE: longitude,
|
||||
}
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=f"{url} ({latitude}, {longitude})",
|
||||
data={
|
||||
CONF_URL: url,
|
||||
CONF_LATITUDE: latitude,
|
||||
CONF_LONGITUDE: longitude,
|
||||
CONF_RADIUS: DistanceConverter.convert(
|
||||
location[CONF_RADIUS],
|
||||
UnitOfLength.METERS,
|
||||
UnitOfLength.KILOMETERS,
|
||||
),
|
||||
},
|
||||
)
|
|
@ -4,11 +4,16 @@ from __future__ import annotations
|
|||
from datetime import timedelta
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN: Final = "geo_json_events"
|
||||
|
||||
PLATFORMS: Final = [Platform.GEO_LOCATION]
|
||||
|
||||
ATTR_EXTERNAL_ID: Final = "external_id"
|
||||
DEFAULT_RADIUS_IN_KM: Final = 20.0
|
||||
DEFAULT_SCAN_INTERVAL: Final = timedelta(minutes=5)
|
||||
DEFAULT_RADIUS_IN_M: Final = 20000.0
|
||||
DEFAULT_UPDATE_INTERVAL: Final = timedelta(seconds=300)
|
||||
SOURCE: Final = "geo_json_events"
|
||||
|
||||
SIGNAL_DELETE_ENTITY: Final = "geo_json_events_delete_{}"
|
||||
|
|
|
@ -2,42 +2,41 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aio_geojson_generic_client import GenericFeedManager
|
||||
from aio_geojson_generic_client.feed_entry import GenericFeedEntry
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.geo_location import PLATFORM_SCHEMA, GeolocationEvent
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
CONF_RADIUS,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_URL,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
UnitOfLength,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import GeoJsonFeedEntityManager
|
||||
from .const import (
|
||||
ATTR_EXTERNAL_ID,
|
||||
DEFAULT_RADIUS_IN_KM,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
SIGNAL_DELETE_ENTITY,
|
||||
SIGNAL_UPDATE_ENTITY,
|
||||
SOURCE,
|
||||
)
|
||||
from .manager import GeoJsonFeedEntityManager
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Deprecated.
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_URL): cv.string,
|
||||
|
@ -48,28 +47,15 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
|||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Set up the GeoJSON Events platform."""
|
||||
url: str = config[CONF_URL]
|
||||
scan_interval: timedelta = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
|
||||
coordinates: tuple[float, float] = (
|
||||
config.get(CONF_LATITUDE, hass.config.latitude),
|
||||
config.get(CONF_LONGITUDE, hass.config.longitude),
|
||||
)
|
||||
radius_in_km: float = config[CONF_RADIUS]
|
||||
# Initialize the entity manager.
|
||||
manager = GeoJsonFeedEntityManager(
|
||||
hass, scan_interval, coordinates, url, radius_in_km
|
||||
)
|
||||
manager: GeoJsonFeedEntityManager = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
@callback
|
||||
def async_add_geolocation(
|
||||
feed_manager: GenericFeedManager,
|
||||
feed_manager: GeoJsonFeedEntityManager,
|
||||
external_id: str,
|
||||
) -> None:
|
||||
"""Add geolocation entity from feed."""
|
||||
|
@ -77,15 +63,36 @@ async def async_setup_platform(
|
|||
_LOGGER.debug("Adding geolocation %s", new_entity)
|
||||
async_add_entities([new_entity], True)
|
||||
|
||||
async_dispatcher_connect(hass, manager.signal_new_entity, async_add_geolocation)
|
||||
manager.listeners.append(
|
||||
async_dispatcher_connect(hass, manager.signal_new_entity, async_add_geolocation)
|
||||
)
|
||||
# Do not wait for update here so that the setup can be completed and because an
|
||||
# update will fetch data from the feed via HTTP and then process that data.
|
||||
entry.async_create_task(hass, manager.async_update())
|
||||
_LOGGER.debug("Geolocation setup done")
|
||||
|
||||
await manager.async_init()
|
||||
|
||||
async def start_feed_manager(event: Event) -> None:
|
||||
"""Start feed manager."""
|
||||
await manager.async_update()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_feed_manager)
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the GeoJSON Events platform."""
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml",
|
||||
breaks_in_ha_version="2023.8.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
)
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class GeoJsonLocationEvent(GeolocationEvent):
|
||||
|
@ -95,10 +102,15 @@ class GeoJsonLocationEvent(GeolocationEvent):
|
|||
_attr_source = SOURCE
|
||||
_attr_unit_of_measurement = UnitOfLength.KILOMETERS
|
||||
|
||||
def __init__(self, feed_manager: GenericFeedManager, external_id: str) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
feed_manager: GeoJsonFeedEntityManager,
|
||||
external_id: str,
|
||||
) -> None:
|
||||
"""Initialize entity with data from feed entry."""
|
||||
self._feed_manager = feed_manager
|
||||
self._external_id = external_id
|
||||
self._attr_unique_id = f"{feed_manager.entry_id}_{external_id}"
|
||||
self._remove_signal_delete: Callable[[], None]
|
||||
self._remove_signal_update: Callable[[], None]
|
||||
|
||||
|
|
|
@ -1,18 +1,26 @@
|
|||
"""Entity manager for generic GeoJSON events."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
from aio_geojson_generic_client import GenericFeedManager
|
||||
from aio_geojson_generic_client.feed_entry import GenericFeedEntry
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
from .const import DOMAIN, SIGNAL_DELETE_ENTITY, SIGNAL_UPDATE_ENTITY
|
||||
from .const import (
|
||||
DEFAULT_UPDATE_INTERVAL,
|
||||
DOMAIN,
|
||||
SIGNAL_DELETE_ENTITY,
|
||||
SIGNAL_UPDATE_ENTITY,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -23,27 +31,28 @@ class GeoJsonFeedEntityManager:
|
|||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
scan_interval: timedelta,
|
||||
coordinates: tuple[float, float],
|
||||
url: str,
|
||||
radius_in_km: float,
|
||||
config_entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the GeoJSON Feed Manager."""
|
||||
|
||||
self._hass = hass
|
||||
self._hass: HomeAssistant = hass
|
||||
self.entry_id: str = config_entry.entry_id
|
||||
websession = aiohttp_client.async_get_clientsession(hass)
|
||||
self._feed_manager = GenericFeedManager(
|
||||
self._feed_manager: GenericFeedManager = GenericFeedManager(
|
||||
websession,
|
||||
self._generate_entity,
|
||||
self._update_entity,
|
||||
self._remove_entity,
|
||||
coordinates,
|
||||
url,
|
||||
filter_radius=radius_in_km,
|
||||
(
|
||||
config_entry.data[CONF_LATITUDE],
|
||||
config_entry.data[CONF_LONGITUDE],
|
||||
),
|
||||
config_entry.data[CONF_URL],
|
||||
filter_radius=config_entry.data[CONF_RADIUS],
|
||||
)
|
||||
self._scan_interval = scan_interval
|
||||
self.signal_new_entity = (
|
||||
f"{DOMAIN}_new_geolocation_{coordinates}-{url}-{radius_in_km}"
|
||||
self._track_time_remove_callback: Callable[[], None] | None = None
|
||||
self.listeners: list[Callable[[], None]] = []
|
||||
self.signal_new_entity: str = (
|
||||
f"{DOMAIN}_new_geolocation_{config_entry.entry_id}"
|
||||
)
|
||||
|
||||
async def async_init(self) -> None:
|
||||
|
@ -54,7 +63,10 @@ class GeoJsonFeedEntityManager:
|
|||
await self.async_update()
|
||||
|
||||
# Trigger updates at regular intervals.
|
||||
async_track_time_interval(self._hass, update, self._scan_interval)
|
||||
self._track_time_remove_callback = async_track_time_interval(
|
||||
self._hass, update, DEFAULT_UPDATE_INTERVAL
|
||||
)
|
||||
|
||||
_LOGGER.debug("Feed entity manager initialized")
|
||||
|
||||
async def async_update(self) -> None:
|
||||
|
@ -62,6 +74,15 @@ class GeoJsonFeedEntityManager:
|
|||
await self._feed_manager.update()
|
||||
_LOGGER.debug("Feed entity manager updated")
|
||||
|
||||
async def async_stop(self) -> None:
|
||||
"""Stop this feed entity manager from refreshing."""
|
||||
for unsub_dispatcher in self.listeners:
|
||||
unsub_dispatcher()
|
||||
self.listeners = []
|
||||
if self._track_time_remove_callback:
|
||||
self._track_time_remove_callback()
|
||||
_LOGGER.debug("Feed entity manager stopped")
|
||||
|
||||
def get_entry(self, external_id: str) -> GenericFeedEntry | None:
|
||||
"""Get feed entry by external id."""
|
||||
return self._feed_manager.feed_entries.get(external_id)
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
"domain": "geo_json_events",
|
||||
"name": "GeoJSON",
|
||||
"codeowners": ["@exxamalte"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/geo_json_events",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
|
|
22
homeassistant/components/geo_json_events/strings.json
Normal file
22
homeassistant/components/geo_json_events/strings.json
Normal file
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Fill in your GeoJSON feed details.",
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"location": "[%key:common::config_flow::data::location%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml": {
|
||||
"title": "The GeoJSON feed YAML configuration is being removed",
|
||||
"description": "Configuring a GeoJSON feed using YAML is being removed.\n\nYour existing YAML configuration has been imported into the UI automatically.\n\nRemove the GeoJSON feed YAML configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
|
||||
}
|
||||
}
|
||||
}
|
|
@ -153,6 +153,7 @@ FLOWS = {
|
|||
"garages_amsterdam",
|
||||
"gdacs",
|
||||
"generic",
|
||||
"geo_json_events",
|
||||
"geocaching",
|
||||
"geofency",
|
||||
"geonetnz_quakes",
|
||||
|
|
|
@ -1893,7 +1893,7 @@
|
|||
"geo_json_events": {
|
||||
"name": "GeoJSON",
|
||||
"integration_type": "service",
|
||||
"config_flow": false,
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
"geo_rss_events": {
|
||||
|
|
|
@ -1 +1,17 @@
|
|||
"""Tests for the geo_json_events component."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
|
||||
def _generate_mock_feed_entry(
|
||||
external_id: str,
|
||||
title: str,
|
||||
distance_to_home: float,
|
||||
coordinates: tuple[float, float],
|
||||
) -> MagicMock:
|
||||
"""Construct a mock feed entry for testing purposes."""
|
||||
feed_entry = MagicMock()
|
||||
feed_entry.external_id = external_id
|
||||
feed_entry.title = title
|
||||
feed_entry.distance_to_home = distance_to_home
|
||||
feed_entry.coordinates = coordinates
|
||||
return feed_entry
|
||||
|
|
37
tests/components/geo_json_events/conftest.py
Normal file
37
tests/components/geo_json_events/conftest.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
"""Configuration for GeoJSON Events tests."""
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.geo_json_events import DOMAIN
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_URL
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
URL = "http://geo.json.local/geo_json_events.json"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config_entry() -> MockConfigEntry:
|
||||
"""Create a mock GeoJSON Events config entry."""
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_URL: URL,
|
||||
CONF_LATITUDE: -41.2,
|
||||
CONF_LONGITUDE: 174.7,
|
||||
CONF_RADIUS: 25.0,
|
||||
},
|
||||
title=f"{URL}, -41.2, 174.7",
|
||||
unique_id=f"{URL}, -41.2, 174.7",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_setup_entry() -> Generator[AsyncMock, None, None]:
|
||||
"""Mock geo_json_events entry setup."""
|
||||
with patch(
|
||||
"homeassistant.components.geo_json_events.async_setup_entry", return_value=True
|
||||
) as mock_setup_entry:
|
||||
yield mock_setup_entry
|
125
tests/components/geo_json_events/test_config_flow.py
Normal file
125
tests/components/geo_json_events/test_config_flow.py
Normal file
|
@ -0,0 +1,125 @@
|
|||
"""Define tests for the GeoJSON Events config flow."""
|
||||
from datetime import timedelta
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries, data_entry_flow
|
||||
from homeassistant.components.geo_json_events import DOMAIN
|
||||
from homeassistant.const import (
|
||||
CONF_LATITUDE,
|
||||
CONF_LOCATION,
|
||||
CONF_LONGITUDE,
|
||||
CONF_RADIUS,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_URL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.components.geo_json_events.conftest import URL
|
||||
|
||||
pytestmark = pytest.mark.usefixtures("mock_setup_entry")
|
||||
|
||||
|
||||
async def test_duplicate_error_user(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test that errors are shown when duplicates are added."""
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["step_id"] == "user"
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={
|
||||
CONF_URL: URL,
|
||||
CONF_LOCATION: {
|
||||
CONF_LATITUDE: -41.2,
|
||||
CONF_LONGITUDE: 174.7,
|
||||
CONF_RADIUS: 25.0,
|
||||
},
|
||||
},
|
||||
)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_duplicate_error_import(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test that errors are shown when duplicates are added."""
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
data={
|
||||
CONF_URL: URL,
|
||||
CONF_LATITUDE: -41.2,
|
||||
CONF_LONGITUDE: 174.7,
|
||||
CONF_RADIUS: 25,
|
||||
},
|
||||
)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_step_import(hass: HomeAssistant) -> None:
|
||||
"""Test that the import step works."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
data={
|
||||
CONF_URL: URL,
|
||||
CONF_LATITUDE: -41.2,
|
||||
CONF_LONGITUDE: 174.7,
|
||||
CONF_RADIUS: 25,
|
||||
# This custom scan interval will not be carried over into the configuration.
|
||||
CONF_SCAN_INTERVAL: timedelta(minutes=4),
|
||||
},
|
||||
)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||
assert (
|
||||
result["title"] == "http://geo.json.local/geo_json_events.json (-41.2, 174.7)"
|
||||
)
|
||||
assert result["data"] == {
|
||||
CONF_URL: URL,
|
||||
CONF_LATITUDE: -41.2,
|
||||
CONF_LONGITUDE: 174.7,
|
||||
CONF_RADIUS: 25,
|
||||
}
|
||||
|
||||
|
||||
async def test_step_user(hass: HomeAssistant) -> None:
|
||||
"""Test that the user step works."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["step_id"] == "user"
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={
|
||||
CONF_URL: URL,
|
||||
CONF_LOCATION: {
|
||||
CONF_LATITUDE: -41.2,
|
||||
CONF_LONGITUDE: 174.7,
|
||||
CONF_RADIUS: 25000.0,
|
||||
},
|
||||
},
|
||||
)
|
||||
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||
assert (
|
||||
result["title"] == "http://geo.json.local/geo_json_events.json (-41.2, 174.7)"
|
||||
)
|
||||
assert result["data"] == {
|
||||
CONF_URL: URL,
|
||||
CONF_LATITUDE: -41.2,
|
||||
CONF_LONGITUDE: 174.7,
|
||||
CONF_RADIUS: 25.0,
|
||||
}
|
|
@ -1,267 +1,157 @@
|
|||
"""The tests for the geojson platform."""
|
||||
from unittest.mock import ANY, MagicMock, call, patch
|
||||
from datetime import timedelta
|
||||
from unittest.mock import ANY, call, patch
|
||||
|
||||
from aio_geojson_generic_client import GenericFeed
|
||||
from freezegun import freeze_time
|
||||
|
||||
from homeassistant.components import geo_location
|
||||
from homeassistant.components.geo_json_events.const import (
|
||||
ATTR_EXTERNAL_ID,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_UPDATE_INTERVAL,
|
||||
)
|
||||
from homeassistant.components.geo_location import (
|
||||
ATTR_SOURCE,
|
||||
DOMAIN as GEO_LOCATION_DOMAIN,
|
||||
)
|
||||
from homeassistant.components.geo_location import ATTR_SOURCE
|
||||
from homeassistant.const import (
|
||||
ATTR_FRIENDLY_NAME,
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
CONF_RADIUS,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_URL,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
UnitOfLength,
|
||||
LENGTH_KILOMETERS,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import DATA_DISPATCHER
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.setup import async_setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from tests.common import assert_setup_component, async_fire_time_changed
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.components.geo_json_events import _generate_mock_feed_entry
|
||||
from tests.components.geo_json_events.conftest import URL
|
||||
|
||||
URL = "http://geo.json.local/geo_json_events.json"
|
||||
CONFIG = {
|
||||
geo_location.DOMAIN: [
|
||||
{"platform": "geo_json_events", CONF_URL: URL, CONF_RADIUS: 200}
|
||||
]
|
||||
}
|
||||
|
||||
CONFIG_WITH_CUSTOM_LOCATION = {
|
||||
geo_location.DOMAIN: [
|
||||
CONFIG_LEGACY = {
|
||||
GEO_LOCATION_DOMAIN: [
|
||||
{
|
||||
"platform": "geo_json_events",
|
||||
CONF_URL: URL,
|
||||
CONF_RADIUS: 200,
|
||||
CONF_LATITUDE: 15.1,
|
||||
CONF_LONGITUDE: 25.2,
|
||||
CONF_RADIUS: 190,
|
||||
CONF_SCAN_INTERVAL: timedelta(minutes=2),
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def _generate_mock_feed_entry(external_id, title, distance_to_home, coordinates):
|
||||
"""Construct a mock feed entry for testing purposes."""
|
||||
feed_entry = MagicMock()
|
||||
feed_entry.external_id = external_id
|
||||
feed_entry.title = title
|
||||
feed_entry.distance_to_home = distance_to_home
|
||||
feed_entry.coordinates = coordinates
|
||||
return feed_entry
|
||||
|
||||
|
||||
async def test_setup(hass: HomeAssistant) -> None:
|
||||
"""Test the general setup of the platform."""
|
||||
async def test_setup_as_legacy_platform(hass: HomeAssistant) -> None:
|
||||
"""Test the setup with YAML legacy configuration."""
|
||||
# Set up some mock feed entries for this test.
|
||||
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 15.5, (-31.0, 150.0))
|
||||
mock_entry_2 = _generate_mock_feed_entry("2345", "Title 2", 20.5, (-31.1, 150.1))
|
||||
mock_entry_3 = _generate_mock_feed_entry("3456", "Title 3", 25.5, (-31.2, 150.2))
|
||||
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (-31.3, 150.3))
|
||||
|
||||
# Patching 'utcnow' to gain more control over the timed update.
|
||||
utcnow = dt_util.utcnow()
|
||||
with freeze_time(utcnow), patch(
|
||||
"aio_geojson_client.feed.GeoJsonFeed.update"
|
||||
) as mock_feed_update:
|
||||
mock_feed_update.return_value = (
|
||||
"OK",
|
||||
[mock_entry_1, mock_entry_2, mock_entry_3],
|
||||
)
|
||||
with assert_setup_component(1, geo_location.DOMAIN):
|
||||
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
|
||||
await hass.async_block_till_done()
|
||||
# Artificially trigger update.
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
# Collect events.
|
||||
await hass.async_block_till_done()
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 3
|
||||
|
||||
state = hass.states.get("geo_location.title_1")
|
||||
assert state is not None
|
||||
assert state.name == "Title 1"
|
||||
assert state.attributes == {
|
||||
ATTR_EXTERNAL_ID: "1234",
|
||||
ATTR_LATITUDE: -31.0,
|
||||
ATTR_LONGITUDE: 150.0,
|
||||
ATTR_FRIENDLY_NAME: "Title 1",
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfLength.KILOMETERS,
|
||||
ATTR_SOURCE: "geo_json_events",
|
||||
}
|
||||
assert round(abs(float(state.state) - 15.5), 7) == 0
|
||||
|
||||
state = hass.states.get("geo_location.title_2")
|
||||
assert state is not None
|
||||
assert state.name == "Title 2"
|
||||
assert state.attributes == {
|
||||
ATTR_EXTERNAL_ID: "2345",
|
||||
ATTR_LATITUDE: -31.1,
|
||||
ATTR_LONGITUDE: 150.1,
|
||||
ATTR_FRIENDLY_NAME: "Title 2",
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfLength.KILOMETERS,
|
||||
ATTR_SOURCE: "geo_json_events",
|
||||
}
|
||||
assert round(abs(float(state.state) - 20.5), 7) == 0
|
||||
|
||||
state = hass.states.get("geo_location.title_3")
|
||||
assert state is not None
|
||||
assert state.name == "Title 3"
|
||||
assert state.attributes == {
|
||||
ATTR_EXTERNAL_ID: "3456",
|
||||
ATTR_LATITUDE: -31.2,
|
||||
ATTR_LONGITUDE: 150.2,
|
||||
ATTR_FRIENDLY_NAME: "Title 3",
|
||||
ATTR_UNIT_OF_MEASUREMENT: UnitOfLength.KILOMETERS,
|
||||
ATTR_SOURCE: "geo_json_events",
|
||||
}
|
||||
assert round(abs(float(state.state) - 25.5), 7) == 0
|
||||
|
||||
# Simulate an update - one existing, one new entry,
|
||||
# one outdated entry
|
||||
mock_feed_update.return_value = (
|
||||
"OK",
|
||||
[mock_entry_1, mock_entry_4, mock_entry_3],
|
||||
)
|
||||
async_fire_time_changed(hass, utcnow + DEFAULT_SCAN_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 3
|
||||
|
||||
# Simulate an update - empty data, but successful update,
|
||||
# so no changes to entities.
|
||||
mock_feed_update.return_value = "OK_NO_DATA", None
|
||||
async_fire_time_changed(hass, utcnow + 2 * DEFAULT_SCAN_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 3
|
||||
|
||||
# Simulate an update - empty data, removes all entities
|
||||
mock_feed_update.return_value = "ERROR", None
|
||||
async_fire_time_changed(hass, utcnow + 3 * DEFAULT_SCAN_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 0
|
||||
|
||||
|
||||
async def test_setup_with_custom_location(hass: HomeAssistant) -> None:
|
||||
"""Test the setup with a custom location."""
|
||||
# Set up some mock feed entries for this test.
|
||||
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 2000.5, (-31.1, 150.1))
|
||||
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 20.5, (-31.1, 150.1))
|
||||
|
||||
with patch(
|
||||
"aio_geojson_generic_client.feed_manager.GenericFeed",
|
||||
wraps=GenericFeed,
|
||||
) as mock_feed, patch(
|
||||
"aio_geojson_client.feed.GeoJsonFeed.update"
|
||||
) as mock_feed_update:
|
||||
mock_feed_update.return_value = "OK", [mock_entry_1]
|
||||
"aio_geojson_client.feed.GeoJsonFeed.update",
|
||||
return_value=("OK", [mock_entry_1]),
|
||||
):
|
||||
assert await async_setup_component(hass, GEO_LOCATION_DOMAIN, CONFIG_LEGACY)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with assert_setup_component(1, geo_location.DOMAIN):
|
||||
assert await async_setup_component(
|
||||
hass, geo_location.DOMAIN, CONFIG_WITH_CUSTOM_LOCATION
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_entity_ids(GEO_LOCATION_DOMAIN)) == 1
|
||||
|
||||
# Artificially trigger update.
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
# Collect events.
|
||||
await hass.async_block_till_done()
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 1
|
||||
|
||||
assert mock_feed.call_args == call(
|
||||
ANY, (15.1, 25.2), URL, filter_radius=200.0
|
||||
)
|
||||
assert mock_feed.call_args == call(ANY, ANY, URL, filter_radius=190.0)
|
||||
|
||||
|
||||
async def test_setup_race_condition(hass: HomeAssistant) -> None:
|
||||
"""Test a particular race condition experienced."""
|
||||
# 1. Feed returns 1 entry -> Feed manager creates 1 entity.
|
||||
# 2. Feed returns error -> Feed manager removes 1 entity.
|
||||
# However, this stayed on and kept listening for dispatcher signals.
|
||||
# 3. Feed returns 1 entry -> Feed manager creates 1 entity.
|
||||
# 4. Feed returns 1 entry -> Feed manager updates 1 entity.
|
||||
# Internally, the previous entity is updating itself, too.
|
||||
# 5. Feed returns error -> Feed manager removes 1 entity.
|
||||
# There are now 2 entities trying to remove themselves from HA, but
|
||||
# the second attempt fails of course.
|
||||
|
||||
# Set up some mock feed entries for this test.
|
||||
async def test_entity_lifecycle(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test entity lifecycle.."""
|
||||
config_entry.add_to_hass(hass)
|
||||
# Set up a mock feed entries for this test.
|
||||
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 15.5, (-31.0, 150.0))
|
||||
delete_signal = "geo_json_events_delete_1234"
|
||||
update_signal = "geo_json_events_update_1234"
|
||||
mock_entry_2 = _generate_mock_feed_entry("2345", "Title 2", 20.5, (-31.1, 150.1))
|
||||
mock_entry_3 = _generate_mock_feed_entry("3456", "Title 3", 25.5, (-31.2, 150.2))
|
||||
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (-31.3, 150.3))
|
||||
|
||||
# Patching 'utcnow' to gain more control over the timed update.
|
||||
utcnow = dt_util.utcnow()
|
||||
with freeze_time(utcnow), patch(
|
||||
"aio_geojson_client.feed.GeoJsonFeed.update"
|
||||
) as mock_feed_update, assert_setup_component(1, geo_location.DOMAIN):
|
||||
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
|
||||
) as mock_feed_update:
|
||||
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_2, mock_entry_3]
|
||||
|
||||
# Load config entry.
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
mock_feed_update.return_value = "OK", [mock_entry_1]
|
||||
# 3 geolocation and 1 sensor entities
|
||||
assert len(hass.states.async_entity_ids(GEO_LOCATION_DOMAIN)) == 3
|
||||
assert len(entity_registry.entities) == 3
|
||||
|
||||
# Artificially trigger update.
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
# Collect events.
|
||||
state = hass.states.get(f"{GEO_LOCATION_DOMAIN}.title_1")
|
||||
assert state is not None
|
||||
assert state.name == "Title 1"
|
||||
assert state.attributes == {
|
||||
ATTR_EXTERNAL_ID: "1234",
|
||||
ATTR_LATITUDE: -31.0,
|
||||
ATTR_LONGITUDE: 150.0,
|
||||
ATTR_FRIENDLY_NAME: "Title 1",
|
||||
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
|
||||
ATTR_SOURCE: "geo_json_events",
|
||||
}
|
||||
assert round(abs(float(state.state) - 15.5), 7) == 0
|
||||
|
||||
state = hass.states.get(f"{GEO_LOCATION_DOMAIN}.title_2")
|
||||
assert state is not None
|
||||
assert state.name == "Title 2"
|
||||
assert state.attributes == {
|
||||
ATTR_EXTERNAL_ID: "2345",
|
||||
ATTR_LATITUDE: -31.1,
|
||||
ATTR_LONGITUDE: 150.1,
|
||||
ATTR_FRIENDLY_NAME: "Title 2",
|
||||
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
|
||||
ATTR_SOURCE: "geo_json_events",
|
||||
}
|
||||
assert round(abs(float(state.state) - 20.5), 7) == 0
|
||||
|
||||
state = hass.states.get(f"{GEO_LOCATION_DOMAIN}.title_3")
|
||||
assert state is not None
|
||||
assert state.name == "Title 3"
|
||||
assert state.attributes == {
|
||||
ATTR_EXTERNAL_ID: "3456",
|
||||
ATTR_LATITUDE: -31.2,
|
||||
ATTR_LONGITUDE: 150.2,
|
||||
ATTR_FRIENDLY_NAME: "Title 3",
|
||||
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
|
||||
ATTR_SOURCE: "geo_json_events",
|
||||
}
|
||||
assert round(abs(float(state.state) - 25.5), 7) == 0
|
||||
|
||||
# Simulate an update - two existing, one new entry,
|
||||
# one outdated entry
|
||||
mock_feed_update.return_value = (
|
||||
"OK",
|
||||
[mock_entry_1, mock_entry_4, mock_entry_3],
|
||||
)
|
||||
async_fire_time_changed(hass, utcnow + DEFAULT_UPDATE_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 1
|
||||
assert len(hass.data[DATA_DISPATCHER][delete_signal]) == 1
|
||||
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1
|
||||
assert len(hass.states.async_entity_ids(GEO_LOCATION_DOMAIN)) == 3
|
||||
|
||||
# Simulate an update - empty data, but successful update,
|
||||
# so no changes to entities.
|
||||
mock_feed_update.return_value = "OK_NO_DATA", None
|
||||
async_fire_time_changed(hass, utcnow + 2 * DEFAULT_UPDATE_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(hass.states.async_entity_ids(GEO_LOCATION_DOMAIN)) == 3
|
||||
|
||||
# Simulate an update - empty data, removes all entities
|
||||
mock_feed_update.return_value = "ERROR", None
|
||||
async_fire_time_changed(hass, utcnow + DEFAULT_SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass, utcnow + 3 * DEFAULT_UPDATE_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 0
|
||||
assert len(hass.data[DATA_DISPATCHER][delete_signal]) == 0
|
||||
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 0
|
||||
|
||||
# Simulate an update - 1 entry
|
||||
mock_feed_update.return_value = "OK", [mock_entry_1]
|
||||
async_fire_time_changed(hass, utcnow + 2 * DEFAULT_SCAN_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 1
|
||||
assert len(hass.data[DATA_DISPATCHER][delete_signal]) == 1
|
||||
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1
|
||||
|
||||
# Simulate an update - 1 entry
|
||||
mock_feed_update.return_value = "OK", [mock_entry_1]
|
||||
async_fire_time_changed(hass, utcnow + 3 * DEFAULT_SCAN_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 1
|
||||
assert len(hass.data[DATA_DISPATCHER][delete_signal]) == 1
|
||||
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 1
|
||||
|
||||
# Simulate an update - empty data, removes all entities
|
||||
mock_feed_update.return_value = "ERROR", None
|
||||
async_fire_time_changed(hass, utcnow + 4 * DEFAULT_SCAN_INTERVAL)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 0
|
||||
# Ensure that delete and update signal targets are now empty.
|
||||
assert len(hass.data[DATA_DISPATCHER][delete_signal]) == 0
|
||||
assert len(hass.data[DATA_DISPATCHER][update_signal]) == 0
|
||||
assert len(hass.states.async_entity_ids(GEO_LOCATION_DOMAIN)) == 0
|
||||
|
|
76
tests/components/geo_json_events/test_init.py
Normal file
76
tests/components/geo_json_events/test_init.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
"""Define tests for the GeoJSON Events general setup."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.geo_json_events.const import DOMAIN
|
||||
from homeassistant.components.geo_location import DOMAIN as GEO_LOCATION_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.components.geo_json_events import _generate_mock_feed_entry
|
||||
|
||||
|
||||
async def test_component_unload_config_entry(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test that loading and unloading of a config entry works."""
|
||||
config_entry.add_to_hass(hass)
|
||||
with patch(
|
||||
"aio_geojson_generic_client.GenericFeedManager.update"
|
||||
) as mock_feed_manager_update:
|
||||
# Load config entry.
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
assert mock_feed_manager_update.call_count == 1
|
||||
assert hass.data[DOMAIN][config_entry.entry_id] is not None
|
||||
# Unload config entry.
|
||||
assert await hass.config_entries.async_unload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
assert hass.data[DOMAIN].get(config_entry.entry_id) is None
|
||||
|
||||
|
||||
async def test_remove_orphaned_entities(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test removing orphaned geolocation entities."""
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
entity_registry.async_get_or_create(
|
||||
GEO_LOCATION_DOMAIN, "geo_json_events", "1", config_entry=config_entry
|
||||
)
|
||||
entity_registry.async_get_or_create(
|
||||
GEO_LOCATION_DOMAIN, "geo_json_events", "2", config_entry=config_entry
|
||||
)
|
||||
entity_registry.async_get_or_create(
|
||||
GEO_LOCATION_DOMAIN, "geo_json_events", "3", config_entry=config_entry
|
||||
)
|
||||
|
||||
# There should now be 3 "orphaned" entries available which will be removed
|
||||
# when the component is set up.
|
||||
entries = er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)
|
||||
assert len(entries) == 3
|
||||
|
||||
# Set up a mock feed entry for this test.
|
||||
mock_entry_1 = _generate_mock_feed_entry(
|
||||
"1234",
|
||||
"Title 1",
|
||||
15.5,
|
||||
(38.0, -3.0),
|
||||
)
|
||||
|
||||
with patch(
|
||||
"aio_geojson_client.feed.GeoJsonFeed.update",
|
||||
return_value=("OK", [mock_entry_1]),
|
||||
):
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# 1 geolocation entity.
|
||||
entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry.entry_id
|
||||
)
|
||||
assert len(entries) == 1
|
||||
|
||||
assert len(hass.states.async_entity_ids(GEO_LOCATION_DOMAIN)) == 1
|
Loading…
Add table
Reference in a new issue