Remove airvisual_pro
code from airvisual
(#84254)
This commit is contained in:
parent
1a476258c7
commit
0ccac69ce1
6 changed files with 56 additions and 306 deletions
|
@ -1,4 +1,4 @@
|
||||||
"""The airvisual component."""
|
"""The AirVisual component."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
@ -7,10 +7,13 @@ from datetime import timedelta
|
||||||
from math import ceil
|
from math import ceil
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from pyairvisual import CloudAPI, NodeSamba
|
from pyairvisual.cloud_api import (
|
||||||
from pyairvisual.cloud_api import InvalidKeyError, KeyExpiredError, UnauthorizedError
|
CloudAPI,
|
||||||
|
InvalidKeyError,
|
||||||
|
KeyExpiredError,
|
||||||
|
UnauthorizedError,
|
||||||
|
)
|
||||||
from pyairvisual.errors import AirVisualError
|
from pyairvisual.errors import AirVisualError
|
||||||
from pyairvisual.node import NodeProError
|
|
||||||
|
|
||||||
from homeassistant.components import automation
|
from homeassistant.components import automation
|
||||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||||
|
@ -19,7 +22,6 @@ from homeassistant.const import (
|
||||||
CONF_IP_ADDRESS,
|
CONF_IP_ADDRESS,
|
||||||
CONF_LATITUDE,
|
CONF_LATITUDE,
|
||||||
CONF_LONGITUDE,
|
CONF_LONGITUDE,
|
||||||
CONF_PASSWORD,
|
|
||||||
CONF_SHOW_ON_MAP,
|
CONF_SHOW_ON_MAP,
|
||||||
CONF_STATE,
|
CONF_STATE,
|
||||||
Platform,
|
Platform,
|
||||||
|
@ -30,7 +32,6 @@ from homeassistant.helpers import (
|
||||||
aiohttp_client,
|
aiohttp_client,
|
||||||
config_validation as cv,
|
config_validation as cv,
|
||||||
device_registry as dr,
|
device_registry as dr,
|
||||||
entity_registry,
|
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.entity import EntityDescription
|
from homeassistant.helpers.entity import EntityDescription
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||||
|
@ -204,108 +205,54 @@ def _standardize_geography_config_entry(
|
||||||
hass.config_entries.async_update_entry(entry, **entry_updates)
|
hass.config_entries.async_update_entry(entry, **entry_updates)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _standardize_node_pro_config_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
|
||||||
"""Ensure that Node/Pro config entries have appropriate properties."""
|
|
||||||
entry_updates: dict[str, Any] = {}
|
|
||||||
|
|
||||||
if CONF_INTEGRATION_TYPE not in entry.data:
|
|
||||||
# If the config entry data doesn't contain the integration type, add it:
|
|
||||||
entry_updates["data"] = {
|
|
||||||
**entry.data,
|
|
||||||
CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO,
|
|
||||||
}
|
|
||||||
|
|
||||||
if not entry_updates:
|
|
||||||
return
|
|
||||||
|
|
||||||
hass.config_entries.async_update_entry(entry, **entry_updates)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Set up AirVisual as config entry."""
|
"""Set up AirVisual as config entry."""
|
||||||
if CONF_API_KEY in entry.data:
|
_standardize_geography_config_entry(hass, entry)
|
||||||
_standardize_geography_config_entry(hass, entry)
|
|
||||||
|
|
||||||
websession = aiohttp_client.async_get_clientsession(hass)
|
websession = aiohttp_client.async_get_clientsession(hass)
|
||||||
cloud_api = CloudAPI(entry.data[CONF_API_KEY], session=websession)
|
cloud_api = CloudAPI(entry.data[CONF_API_KEY], session=websession)
|
||||||
|
|
||||||
async def async_update_data() -> dict[str, Any]:
|
async def async_update_data() -> dict[str, Any]:
|
||||||
"""Get new data from the API."""
|
"""Get new data from the API."""
|
||||||
if CONF_CITY in entry.data:
|
if CONF_CITY in entry.data:
|
||||||
api_coro = cloud_api.air_quality.city(
|
api_coro = cloud_api.air_quality.city(
|
||||||
entry.data[CONF_CITY],
|
entry.data[CONF_CITY],
|
||||||
entry.data[CONF_STATE],
|
entry.data[CONF_STATE],
|
||||||
entry.data[CONF_COUNTRY],
|
entry.data[CONF_COUNTRY],
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
api_coro = cloud_api.air_quality.nearest_city(
|
api_coro = cloud_api.air_quality.nearest_city(
|
||||||
entry.data[CONF_LATITUDE],
|
entry.data[CONF_LATITUDE],
|
||||||
entry.data[CONF_LONGITUDE],
|
entry.data[CONF_LONGITUDE],
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return await api_coro
|
|
||||||
except (InvalidKeyError, KeyExpiredError, UnauthorizedError) as ex:
|
|
||||||
raise ConfigEntryAuthFailed from ex
|
|
||||||
except AirVisualError as err:
|
|
||||||
raise UpdateFailed(f"Error while retrieving data: {err}") from err
|
|
||||||
|
|
||||||
coordinator = DataUpdateCoordinator(
|
|
||||||
hass,
|
|
||||||
LOGGER,
|
|
||||||
name=async_get_geography_id(entry.data),
|
|
||||||
# We give a placeholder update interval in order to create the coordinator;
|
|
||||||
# then, below, we use the coordinator's presence (along with any other
|
|
||||||
# coordinators using the same API key) to calculate an actual, leveled
|
|
||||||
# update interval:
|
|
||||||
update_interval=timedelta(minutes=5),
|
|
||||||
update_method=async_update_data,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Only geography-based entries have options:
|
|
||||||
entry.async_on_unload(entry.add_update_listener(async_reload_entry))
|
|
||||||
else:
|
|
||||||
# Remove outdated air_quality entities from the entity registry if they exist:
|
|
||||||
ent_reg = entity_registry.async_get(hass)
|
|
||||||
for entity_entry in [
|
|
||||||
e
|
|
||||||
for e in ent_reg.entities.values()
|
|
||||||
if e.config_entry_id == entry.entry_id
|
|
||||||
and e.entity_id.startswith("air_quality")
|
|
||||||
]:
|
|
||||||
LOGGER.debug(
|
|
||||||
'Removing deprecated air_quality entity: "%s"', entity_entry.entity_id
|
|
||||||
)
|
)
|
||||||
ent_reg.async_remove(entity_entry.entity_id)
|
|
||||||
|
|
||||||
_standardize_node_pro_config_entry(hass, entry)
|
try:
|
||||||
|
return await api_coro
|
||||||
|
except (InvalidKeyError, KeyExpiredError, UnauthorizedError) as ex:
|
||||||
|
raise ConfigEntryAuthFailed from ex
|
||||||
|
except AirVisualError as err:
|
||||||
|
raise UpdateFailed(f"Error while retrieving data: {err}") from err
|
||||||
|
|
||||||
async def async_update_data() -> dict[str, Any]:
|
coordinator = DataUpdateCoordinator(
|
||||||
"""Get new data from the API."""
|
hass,
|
||||||
try:
|
LOGGER,
|
||||||
async with NodeSamba(
|
name=async_get_geography_id(entry.data),
|
||||||
entry.data[CONF_IP_ADDRESS], entry.data[CONF_PASSWORD]
|
# We give a placeholder update interval in order to create the coordinator;
|
||||||
) as node:
|
# then, below, we use the coordinator's presence (along with any other
|
||||||
return await node.async_get_latest_measurements()
|
# coordinators using the same API key) to calculate an actual, leveled
|
||||||
except NodeProError as err:
|
# update interval:
|
||||||
raise UpdateFailed(f"Error while retrieving data: {err}") from err
|
update_interval=timedelta(minutes=5),
|
||||||
|
update_method=async_update_data,
|
||||||
|
)
|
||||||
|
|
||||||
coordinator = DataUpdateCoordinator(
|
entry.async_on_unload(entry.add_update_listener(async_reload_entry))
|
||||||
hass,
|
|
||||||
LOGGER,
|
|
||||||
name="Node/Pro data",
|
|
||||||
update_interval=DEFAULT_NODE_PRO_UPDATE_INTERVAL,
|
|
||||||
update_method=async_update_data,
|
|
||||||
)
|
|
||||||
|
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
hass.data.setdefault(DOMAIN, {})
|
hass.data.setdefault(DOMAIN, {})
|
||||||
hass.data[DOMAIN][entry.entry_id] = coordinator
|
hass.data[DOMAIN][entry.entry_id] = coordinator
|
||||||
|
|
||||||
# Reassess the interval between 2 server requests
|
# Reassess the interval between 2 server requests
|
||||||
if CONF_API_KEY in entry.data:
|
async_sync_geo_coordinator_update_intervals(hass, entry.data[CONF_API_KEY])
|
||||||
async_sync_geo_coordinator_update_intervals(hass, entry.data[CONF_API_KEY])
|
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
|
|
@ -5,25 +5,22 @@ import asyncio
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from pyairvisual import CloudAPI, NodeSamba
|
|
||||||
from pyairvisual.cloud_api import (
|
from pyairvisual.cloud_api import (
|
||||||
|
CloudAPI,
|
||||||
InvalidKeyError,
|
InvalidKeyError,
|
||||||
KeyExpiredError,
|
KeyExpiredError,
|
||||||
NotFoundError,
|
NotFoundError,
|
||||||
UnauthorizedError,
|
UnauthorizedError,
|
||||||
)
|
)
|
||||||
from pyairvisual.errors import AirVisualError
|
from pyairvisual.errors import AirVisualError
|
||||||
from pyairvisual.node import NodeProError
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_API_KEY,
|
CONF_API_KEY,
|
||||||
CONF_IP_ADDRESS,
|
|
||||||
CONF_LATITUDE,
|
CONF_LATITUDE,
|
||||||
CONF_LONGITUDE,
|
CONF_LONGITUDE,
|
||||||
CONF_PASSWORD,
|
|
||||||
CONF_SHOW_ON_MAP,
|
CONF_SHOW_ON_MAP,
|
||||||
CONF_STATE,
|
CONF_STATE,
|
||||||
)
|
)
|
||||||
|
@ -43,7 +40,6 @@ from .const import (
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
INTEGRATION_TYPE_GEOGRAPHY_COORDS,
|
INTEGRATION_TYPE_GEOGRAPHY_COORDS,
|
||||||
INTEGRATION_TYPE_GEOGRAPHY_NAME,
|
INTEGRATION_TYPE_GEOGRAPHY_NAME,
|
||||||
INTEGRATION_TYPE_NODE_PRO,
|
|
||||||
LOGGER,
|
LOGGER,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -55,9 +51,6 @@ GEOGRAPHY_NAME_SCHEMA = API_KEY_DATA_SCHEMA.extend(
|
||||||
vol.Required(CONF_COUNTRY): cv.string,
|
vol.Required(CONF_COUNTRY): cv.string,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
NODE_PRO_SCHEMA = vol.Schema(
|
|
||||||
{vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PASSWORD): cv.string}
|
|
||||||
)
|
|
||||||
PICK_INTEGRATION_TYPE_SCHEMA = vol.Schema(
|
PICK_INTEGRATION_TYPE_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required("type"): vol.In(
|
vol.Required("type"): vol.In(
|
||||||
|
@ -205,34 +198,6 @@ class AirVisualFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||||
user_input, INTEGRATION_TYPE_GEOGRAPHY_NAME
|
user_input, INTEGRATION_TYPE_GEOGRAPHY_NAME
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_step_node_pro(
|
|
||||||
self, user_input: dict[str, str] | None = None
|
|
||||||
) -> FlowResult:
|
|
||||||
"""Handle the initialization of the integration with a Node/Pro."""
|
|
||||||
if not user_input:
|
|
||||||
return self.async_show_form(step_id="node_pro", data_schema=NODE_PRO_SCHEMA)
|
|
||||||
|
|
||||||
await self._async_set_unique_id(user_input[CONF_IP_ADDRESS])
|
|
||||||
|
|
||||||
node = NodeSamba(user_input[CONF_IP_ADDRESS], user_input[CONF_PASSWORD])
|
|
||||||
|
|
||||||
try:
|
|
||||||
await node.async_connect()
|
|
||||||
except NodeProError as err:
|
|
||||||
LOGGER.error("Error connecting to Node/Pro unit: %s", err)
|
|
||||||
return self.async_show_form(
|
|
||||||
step_id="node_pro",
|
|
||||||
data_schema=NODE_PRO_SCHEMA,
|
|
||||||
errors={CONF_IP_ADDRESS: "cannot_connect"},
|
|
||||||
)
|
|
||||||
|
|
||||||
await node.async_disconnect()
|
|
||||||
|
|
||||||
return self.async_create_entry(
|
|
||||||
title=f"Node/Pro ({user_input[CONF_IP_ADDRESS]})",
|
|
||||||
data={**user_input, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO},
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult:
|
async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult:
|
||||||
"""Handle configuration by re-auth."""
|
"""Handle configuration by re-auth."""
|
||||||
self._entry_data_for_reauth = entry_data
|
self._entry_data_for_reauth = entry_data
|
||||||
|
@ -265,6 +230,4 @@ class AirVisualFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||||
|
|
||||||
if user_input["type"] == INTEGRATION_TYPE_GEOGRAPHY_COORDS:
|
if user_input["type"] == INTEGRATION_TYPE_GEOGRAPHY_COORDS:
|
||||||
return await self.async_step_geography_by_coords()
|
return await self.async_step_geography_by_coords()
|
||||||
if user_input["type"] == INTEGRATION_TYPE_GEOGRAPHY_NAME:
|
return await self.async_step_geography_by_name()
|
||||||
return await self.async_step_geography_by_name()
|
|
||||||
return await self.async_step_node_pro()
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/airvisual",
|
"documentation": "https://www.home-assistant.io/integrations/airvisual",
|
||||||
"requirements": ["pyairvisual==2022.12.1"],
|
"requirements": ["pyairvisual==2022.12.1"],
|
||||||
|
"dependencies": ["airvisual_pro"],
|
||||||
"codeowners": ["@bachya"],
|
"codeowners": ["@bachya"],
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["pyairvisual", "pysmb"],
|
"loggers": ["pyairvisual", "pysmb"],
|
||||||
|
|
|
@ -23,19 +23,12 @@ from homeassistant.const import (
|
||||||
TEMP_CELSIUS,
|
TEMP_CELSIUS,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.entity import DeviceInfo, EntityCategory
|
from homeassistant.helpers.entity import EntityCategory
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
from . import AirVisualEntity
|
from . import AirVisualEntity
|
||||||
from .const import (
|
from .const import CONF_CITY, CONF_COUNTRY, DOMAIN
|
||||||
CONF_CITY,
|
|
||||||
CONF_COUNTRY,
|
|
||||||
CONF_INTEGRATION_TYPE,
|
|
||||||
DOMAIN,
|
|
||||||
INTEGRATION_TYPE_GEOGRAPHY_COORDS,
|
|
||||||
INTEGRATION_TYPE_GEOGRAPHY_NAME,
|
|
||||||
)
|
|
||||||
|
|
||||||
ATTR_CITY = "city"
|
ATTR_CITY = "city"
|
||||||
ATTR_COUNTRY = "country"
|
ATTR_COUNTRY = "country"
|
||||||
|
@ -193,24 +186,11 @@ async def async_setup_entry(
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up AirVisual sensors based on a config entry."""
|
"""Set up AirVisual sensors based on a config entry."""
|
||||||
coordinator = hass.data[DOMAIN][entry.entry_id]
|
coordinator = hass.data[DOMAIN][entry.entry_id]
|
||||||
|
async_add_entities(
|
||||||
sensors: list[AirVisualGeographySensor | AirVisualNodeProSensor]
|
AirVisualGeographySensor(coordinator, entry, description, locale)
|
||||||
if entry.data[CONF_INTEGRATION_TYPE] in (
|
for locale in GEOGRAPHY_SENSOR_LOCALES
|
||||||
INTEGRATION_TYPE_GEOGRAPHY_COORDS,
|
for description in GEOGRAPHY_SENSOR_DESCRIPTIONS
|
||||||
INTEGRATION_TYPE_GEOGRAPHY_NAME,
|
)
|
||||||
):
|
|
||||||
sensors = [
|
|
||||||
AirVisualGeographySensor(coordinator, entry, description, locale)
|
|
||||||
for locale in GEOGRAPHY_SENSOR_LOCALES
|
|
||||||
for description in GEOGRAPHY_SENSOR_DESCRIPTIONS
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
sensors = [
|
|
||||||
AirVisualNodeProSensor(coordinator, entry, description)
|
|
||||||
for description in NODE_PRO_SENSOR_DESCRIPTIONS
|
|
||||||
]
|
|
||||||
|
|
||||||
async_add_entities(sensors, True)
|
|
||||||
|
|
||||||
|
|
||||||
class AirVisualGeographySensor(AirVisualEntity, SensorEntity):
|
class AirVisualGeographySensor(AirVisualEntity, SensorEntity):
|
||||||
|
@ -295,67 +275,3 @@ class AirVisualGeographySensor(AirVisualEntity, SensorEntity):
|
||||||
self._attr_extra_state_attributes["long"] = longitude
|
self._attr_extra_state_attributes["long"] = longitude
|
||||||
self._attr_extra_state_attributes.pop(ATTR_LATITUDE, None)
|
self._attr_extra_state_attributes.pop(ATTR_LATITUDE, None)
|
||||||
self._attr_extra_state_attributes.pop(ATTR_LONGITUDE, None)
|
self._attr_extra_state_attributes.pop(ATTR_LONGITUDE, None)
|
||||||
|
|
||||||
|
|
||||||
class AirVisualNodeProSensor(AirVisualEntity, SensorEntity):
|
|
||||||
"""Define an AirVisual sensor related to a Node/Pro unit."""
|
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
coordinator: DataUpdateCoordinator,
|
|
||||||
entry: ConfigEntry,
|
|
||||||
description: SensorEntityDescription,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize."""
|
|
||||||
super().__init__(coordinator, entry, description)
|
|
||||||
|
|
||||||
self._attr_unique_id = f"{coordinator.data['serial_number']}_{description.key}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def device_info(self) -> DeviceInfo:
|
|
||||||
"""Return device registry information for this entity."""
|
|
||||||
return DeviceInfo(
|
|
||||||
identifiers={(DOMAIN, self.coordinator.data["serial_number"])},
|
|
||||||
manufacturer="AirVisual",
|
|
||||||
model=f'{self.coordinator.data["status"]["model"]}',
|
|
||||||
name=self.coordinator.data["settings"]["node_name"],
|
|
||||||
sw_version=(
|
|
||||||
f'Version {self.coordinator.data["status"]["system_version"]}'
|
|
||||||
f'{self.coordinator.data["status"]["app_version"]}'
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def update_from_latest_data(self) -> None:
|
|
||||||
"""Update the entity from the latest data."""
|
|
||||||
if self.entity_description.key == SENSOR_KIND_AQI:
|
|
||||||
if self.coordinator.data["settings"]["is_aqi_usa"]:
|
|
||||||
self._attr_native_value = self.coordinator.data["measurements"][
|
|
||||||
"aqi_us"
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
self._attr_native_value = self.coordinator.data["measurements"][
|
|
||||||
"aqi_cn"
|
|
||||||
]
|
|
||||||
elif self.entity_description.key == SENSOR_KIND_BATTERY_LEVEL:
|
|
||||||
self._attr_native_value = self.coordinator.data["status"]["battery"]
|
|
||||||
elif self.entity_description.key == SENSOR_KIND_CO2:
|
|
||||||
self._attr_native_value = self.coordinator.data["measurements"].get("co2")
|
|
||||||
elif self.entity_description.key == SENSOR_KIND_HUMIDITY:
|
|
||||||
self._attr_native_value = self.coordinator.data["measurements"].get(
|
|
||||||
"humidity"
|
|
||||||
)
|
|
||||||
elif self.entity_description.key == SENSOR_KIND_PM_0_1:
|
|
||||||
self._attr_native_value = self.coordinator.data["measurements"].get("pm0_1")
|
|
||||||
elif self.entity_description.key == SENSOR_KIND_PM_1_0:
|
|
||||||
self._attr_native_value = self.coordinator.data["measurements"].get("pm1_0")
|
|
||||||
elif self.entity_description.key == SENSOR_KIND_PM_2_5:
|
|
||||||
self._attr_native_value = self.coordinator.data["measurements"].get("pm2_5")
|
|
||||||
elif self.entity_description.key == SENSOR_KIND_TEMPERATURE:
|
|
||||||
self._attr_native_value = self.coordinator.data["measurements"].get(
|
|
||||||
"temperature_C"
|
|
||||||
)
|
|
||||||
elif self.entity_description.key == SENSOR_KIND_VOC:
|
|
||||||
self._attr_native_value = self.coordinator.data["measurements"].get("voc")
|
|
||||||
|
|
|
@ -20,14 +20,6 @@
|
||||||
"state": "state"
|
"state": "state"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_pro": {
|
|
||||||
"title": "Configure an AirVisual Node/Pro",
|
|
||||||
"description": "Monitor a personal AirVisual unit. The password can be retrieved from the unit's UI.",
|
|
||||||
"data": {
|
|
||||||
"ip_address": "[%key:common::config_flow::data::host%]",
|
|
||||||
"password": "[%key:common::config_flow::data::password%]"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"reauth_confirm": {
|
"reauth_confirm": {
|
||||||
"title": "Re-authenticate AirVisual",
|
"title": "Re-authenticate AirVisual",
|
||||||
"data": {
|
"data": {
|
||||||
|
@ -46,7 +38,7 @@
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%] or Node/Pro ID is already registered.",
|
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
|
||||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -8,7 +8,6 @@ from pyairvisual.cloud_api import (
|
||||||
UnauthorizedError,
|
UnauthorizedError,
|
||||||
)
|
)
|
||||||
from pyairvisual.errors import AirVisualError
|
from pyairvisual.errors import AirVisualError
|
||||||
from pyairvisual.node import NodeProError
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant import data_entry_flow
|
from homeassistant import data_entry_flow
|
||||||
|
@ -38,36 +37,12 @@ from homeassistant.setup import async_setup_component
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
async def test_duplicate_error(hass, config, config_entry, data, setup_airvisual):
|
||||||
"config,data,unique_id",
|
|
||||||
[
|
|
||||||
(
|
|
||||||
{
|
|
||||||
CONF_API_KEY: "abcde12345",
|
|
||||||
CONF_LATITUDE: 51.528308,
|
|
||||||
CONF_LONGITUDE: -0.3817765,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": INTEGRATION_TYPE_GEOGRAPHY_COORDS,
|
|
||||||
},
|
|
||||||
"51.528308, -0.3817765",
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{
|
|
||||||
CONF_IP_ADDRESS: "192.168.1.100",
|
|
||||||
CONF_PASSWORD: "12345",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": INTEGRATION_TYPE_NODE_PRO,
|
|
||||||
},
|
|
||||||
"192.168.1.100",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
async def test_duplicate_error(hass, config, config_entry, data):
|
|
||||||
"""Test that errors are shown when duplicate entries are added."""
|
"""Test that errors are shown when duplicate entries are added."""
|
||||||
result = await hass.config_entries.flow.async_init(
|
result = await hass.config_entries.flow.async_init(
|
||||||
DOMAIN, context={"source": SOURCE_USER}, data=data
|
DOMAIN,
|
||||||
|
context={"source": SOURCE_USER},
|
||||||
|
data={"type": INTEGRATION_TYPE_GEOGRAPHY_COORDS},
|
||||||
)
|
)
|
||||||
result = await hass.config_entries.flow.async_configure(
|
result = await hass.config_entries.flow.async_configure(
|
||||||
result["flow_id"], user_input=config
|
result["flow_id"], user_input=config
|
||||||
|
@ -134,15 +109,6 @@ async def test_duplicate_error(hass, config, config_entry, data):
|
||||||
{"base": "unknown"},
|
{"base": "unknown"},
|
||||||
INTEGRATION_TYPE_GEOGRAPHY_NAME,
|
INTEGRATION_TYPE_GEOGRAPHY_NAME,
|
||||||
),
|
),
|
||||||
(
|
|
||||||
{
|
|
||||||
CONF_IP_ADDRESS: "192.168.1.100",
|
|
||||||
CONF_PASSWORD: "my_password",
|
|
||||||
},
|
|
||||||
NodeProError,
|
|
||||||
{CONF_IP_ADDRESS: "cannot_connect"},
|
|
||||||
INTEGRATION_TYPE_NODE_PRO,
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_errors(hass, data, exc, errors, integration_type):
|
async def test_errors(hass, data, exc, errors, integration_type):
|
||||||
|
@ -317,32 +283,6 @@ async def test_step_geography_by_name(hass, config, setup_airvisual):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"config",
|
|
||||||
[
|
|
||||||
{
|
|
||||||
CONF_IP_ADDRESS: "192.168.1.100",
|
|
||||||
CONF_PASSWORD: "my_password",
|
|
||||||
}
|
|
||||||
],
|
|
||||||
)
|
|
||||||
async def test_step_node_pro(hass, config, setup_airvisual):
|
|
||||||
"""Test the Node/Pro step."""
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
|
||||||
DOMAIN, context={"source": SOURCE_USER}, data={"type": "AirVisual Node/Pro"}
|
|
||||||
)
|
|
||||||
result = await hass.config_entries.flow.async_configure(
|
|
||||||
result["flow_id"], user_input=config
|
|
||||||
)
|
|
||||||
assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY
|
|
||||||
assert result["title"] == "Node/Pro (192.168.1.100)"
|
|
||||||
assert result["data"] == {
|
|
||||||
CONF_IP_ADDRESS: "192.168.1.100",
|
|
||||||
CONF_PASSWORD: "my_password",
|
|
||||||
CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def test_step_reauth(hass, config_entry, setup_airvisual):
|
async def test_step_reauth(hass, config_entry, setup_airvisual):
|
||||||
"""Test that the reauth step works."""
|
"""Test that the reauth step works."""
|
||||||
result = await hass.config_entries.flow.async_init(
|
result = await hass.config_entries.flow.async_init(
|
||||||
|
@ -395,12 +335,3 @@ async def test_step_user(hass):
|
||||||
|
|
||||||
assert result["type"] == data_entry_flow.FlowResultType.FORM
|
assert result["type"] == data_entry_flow.FlowResultType.FORM
|
||||||
assert result["step_id"] == "geography_by_name"
|
assert result["step_id"] == "geography_by_name"
|
||||||
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
|
||||||
DOMAIN,
|
|
||||||
context={"source": SOURCE_USER},
|
|
||||||
data={"type": INTEGRATION_TYPE_NODE_PRO},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result["type"] == data_entry_flow.FlowResultType.FORM
|
|
||||||
assert result["step_id"] == "node_pro"
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue