Scrape move yaml config to integration key (#74325)

* Scrape take 2

* cleanup

* new entity name

* Fix name, add tests

* Use FlowResultType

* Add test abort

* issue

* hassfest

* Remove not needed test

* clean

* Remove config entry and implement datacoordinator

* fix codeowners

* fix codeowners

* codeowners reset

* Fix coordinator

* Remove test config_flow

* Fix tests

* hassfest

* reset config flow

* reset strings

* reset sensor

* next version

* Reconfig

* Adjust sensor

* cleanup sensor

* cleanup init

* Fix tests

* coverage

* Guard against empty sensor

* naming

* Remove coverage

* Review comments

* Remove print

* Move sensor check
This commit is contained in:
G Johansson 2022-10-30 13:02:11 +01:00 committed by GitHub
parent 06773efcbd
commit 662aee17a6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 498 additions and 114 deletions

View file

@ -34,6 +34,7 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.helpers.template import Template
from homeassistant.helpers.template_entity import (
TEMPLATE_SENSOR_BASE_SCHEMA,
@ -42,7 +43,7 @@ from homeassistant.helpers.template_entity import (
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import CONF_INDEX, CONF_SELECT, DEFAULT_NAME, DEFAULT_VERIFY_SSL
from .const import CONF_INDEX, CONF_SELECT, DEFAULT_NAME, DEFAULT_VERIFY_SSL, DOMAIN
from .coordinator import ScrapeCoordinator
_LOGGER = logging.getLogger(__name__)
@ -82,25 +83,40 @@ async def async_setup_platform(
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Web scrape sensor."""
resource_config = vol.Schema(RESOURCE_SCHEMA, extra=vol.REMOVE_EXTRA)(config)
rest = create_rest_data_from_config(hass, resource_config)
if discovery_info is None:
async_create_issue(
hass,
DOMAIN,
"moved_yaml",
breaks_in_ha_version="2022.12.0",
is_fixable=False,
severity=IssueSeverity.WARNING,
translation_key="moved_yaml",
)
resource_config = vol.Schema(RESOURCE_SCHEMA, extra=vol.REMOVE_EXTRA)(config)
rest = create_rest_data_from_config(hass, resource_config)
coordinator = ScrapeCoordinator(hass, rest, SCAN_INTERVAL)
await coordinator.async_refresh()
if coordinator.data is None:
raise PlatformNotReady
coordinator = ScrapeCoordinator(hass, rest, SCAN_INTERVAL)
await coordinator.async_refresh()
if coordinator.data is None:
raise PlatformNotReady
sensor_config = vol.Schema(
TEMPLATE_SENSOR_BASE_SCHEMA.schema, extra=vol.REMOVE_EXTRA
)(config)
sensor_config = config
template_config = vol.Schema(
TEMPLATE_SENSOR_BASE_SCHEMA.schema, extra=vol.REMOVE_EXTRA
)(sensor_config)
name: str = config[CONF_NAME]
unique_id: str | None = config.get(CONF_UNIQUE_ID)
else:
coordinator = discovery_info["coordinator"]
sensor_config = discovery_info["config"]
template_config = sensor_config
select: str | None = config.get(CONF_SELECT)
attr: str | None = config.get(CONF_ATTRIBUTE)
index: int = config[CONF_INDEX]
value_template: Template | None = config.get(CONF_VALUE_TEMPLATE)
name: str = template_config[CONF_NAME]
unique_id: str | None = template_config.get(CONF_UNIQUE_ID)
select: str | None = sensor_config.get(CONF_SELECT)
attr: str | None = sensor_config.get(CONF_ATTRIBUTE)
index: int = sensor_config[CONF_INDEX]
value_template: Template | None = sensor_config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
@ -110,7 +126,7 @@ async def async_setup_platform(
ScrapeSensor(
hass,
coordinator,
sensor_config,
template_config,
name,
unique_id,
select,