Merge branch 'dev' of github.com:home-assistant/core into hassio_stats_coordinator
This commit is contained in:
commit
7360ea0677
659 changed files with 21759 additions and 9227 deletions
|
@ -226,6 +226,7 @@ omit =
|
|||
homeassistant/components/dublin_bus_transport/sensor.py
|
||||
homeassistant/components/dunehd/__init__.py
|
||||
homeassistant/components/dunehd/media_player.py
|
||||
homeassistant/components/dwd_weather_warnings/const.py
|
||||
homeassistant/components/dwd_weather_warnings/sensor.py
|
||||
homeassistant/components/dweet/*
|
||||
homeassistant/components/ebox/sensor.py
|
||||
|
@ -1438,7 +1439,6 @@ omit =
|
|||
homeassistant/components/xbox/media_player.py
|
||||
homeassistant/components/xbox/remote.py
|
||||
homeassistant/components/xbox/sensor.py
|
||||
homeassistant/components/xbox_live/sensor.py
|
||||
homeassistant/components/xeoma/camera.py
|
||||
homeassistant/components/xiaomi/camera.py
|
||||
homeassistant/components/xiaomi_aqara/__init__.py
|
||||
|
|
6
.github/workflows/ci.yaml
vendored
6
.github/workflows/ci.yaml
vendored
|
@ -40,7 +40,9 @@ env:
|
|||
# - 10.6.10 is the version currently shipped with the Add-on (as of 31 Jan 2023)
|
||||
# 10.10 is the latest short-term-support
|
||||
# - 10.10.3 is the latest (as of 6 Feb 2023)
|
||||
MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3']"
|
||||
# mysql 8.0.32 does not always behave the same as MariaDB
|
||||
# and some queries that work on MariaDB do not work on MySQL
|
||||
MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3','mysql:8.0.32']"
|
||||
# 12 is the oldest supported version
|
||||
# - 12.14 is the latest (as of 9 Feb 2023)
|
||||
# 15 is the latest version
|
||||
|
@ -434,6 +436,7 @@ jobs:
|
|||
shell: bash
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual prettier --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*}
|
||||
|
||||
- name: Register check executables problem matcher
|
||||
|
@ -1091,6 +1094,7 @@ jobs:
|
|||
needs:
|
||||
- info
|
||||
- pytest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
repos:
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: v0.0.256
|
||||
rev: v0.0.260
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
- --fix
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
rev: 23.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
|
|
|
@ -137,6 +137,7 @@ homeassistant.components.hardkernel.*
|
|||
homeassistant.components.hardware.*
|
||||
homeassistant.components.here_travel_time.*
|
||||
homeassistant.components.history.*
|
||||
homeassistant.components.homeassistant.exposed_entities
|
||||
homeassistant.components.homeassistant.triggers.event
|
||||
homeassistant.components.homeassistant_alerts.*
|
||||
homeassistant.components.homeassistant_hardware.*
|
||||
|
|
17
CODEOWNERS
17
CODEOWNERS
|
@ -80,6 +80,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/android_ip_webcam/ @engrbm87
|
||||
/homeassistant/components/androidtv/ @JeffLIrion @ollo69
|
||||
/tests/components/androidtv/ @JeffLIrion @ollo69
|
||||
/homeassistant/components/androidtv_remote/ @tronikos
|
||||
/tests/components/androidtv_remote/ @tronikos
|
||||
/homeassistant/components/anthemav/ @hyralex
|
||||
/tests/components/anthemav/ @hyralex
|
||||
/homeassistant/components/apache_kafka/ @bachya
|
||||
|
@ -215,8 +217,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/coolmaster/ @OnFreund
|
||||
/tests/components/coolmaster/ @OnFreund
|
||||
/homeassistant/components/coronavirus/ @home-assistant/core
|
||||
/tests/components/coronavirus/ @home-assistant/core
|
||||
/homeassistant/components/counter/ @fabaff
|
||||
/tests/components/counter/ @fabaff
|
||||
/homeassistant/components/cover/ @home-assistant/core
|
||||
|
@ -228,8 +228,6 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/cups/ @fabaff
|
||||
/homeassistant/components/daikin/ @fredrike
|
||||
/tests/components/daikin/ @fredrike
|
||||
/homeassistant/components/darksky/ @fabaff
|
||||
/tests/components/darksky/ @fabaff
|
||||
/homeassistant/components/debugpy/ @frenck
|
||||
/tests/components/debugpy/ @frenck
|
||||
/homeassistant/components/deconz/ @Kane610
|
||||
|
@ -283,7 +281,7 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/dsmr_reader/ @depl0y @glodenox
|
||||
/homeassistant/components/dunehd/ @bieniu
|
||||
/tests/components/dunehd/ @bieniu
|
||||
/homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192 @Hummel95
|
||||
/homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192 @Hummel95 @andarotajo
|
||||
/homeassistant/components/dynalite/ @ziv1234
|
||||
/tests/components/dynalite/ @ziv1234
|
||||
/homeassistant/components/eafm/ @Jc2k
|
||||
|
@ -979,6 +977,8 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/repairs/ @home-assistant/core
|
||||
/tests/components/repairs/ @home-assistant/core
|
||||
/homeassistant/components/repetier/ @MTrab @ShadowBr0ther
|
||||
/homeassistant/components/rest/ @epenet
|
||||
/tests/components/rest/ @epenet
|
||||
/homeassistant/components/rflink/ @javicalle
|
||||
/tests/components/rflink/ @javicalle
|
||||
/homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||
|
@ -1157,8 +1157,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/stookwijzer/ @fwestenberg
|
||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
/tests/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
/homeassistant/components/stt/ @pvizeli
|
||||
/tests/components/stt/ @pvizeli
|
||||
/homeassistant/components/stt/ @home-assistant/core @pvizeli
|
||||
/tests/components/stt/ @home-assistant/core @pvizeli
|
||||
/homeassistant/components/subaru/ @G-Two
|
||||
/tests/components/subaru/ @G-Two
|
||||
/homeassistant/components/suez_water/ @ooii
|
||||
|
@ -1303,8 +1303,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/version/ @ludeeus
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey
|
||||
/homeassistant/components/vicare/ @oischinger
|
||||
/tests/components/vicare/ @oischinger
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
/tests/components/vilfo/ @ManneW
|
||||
/homeassistant/components/vivotek/ @HarlemSquirrel
|
||||
|
@ -1367,7 +1365,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xbox_live/ @MartinHjelmare
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
||||
|
|
|
@ -239,6 +239,7 @@ async def load_registries(hass: core.HomeAssistant) -> None:
|
|||
|
||||
# Load the registries and cache the result of platform.uname().processor
|
||||
entity.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
await asyncio.gather(
|
||||
area_registry.async_load(hass),
|
||||
device_registry.async_load(hass),
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
"microsoft_face",
|
||||
"microsoft",
|
||||
"msteams",
|
||||
"xbox",
|
||||
"xbox_live"
|
||||
"xbox"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import ATTR_FORECAST, CONF_FORECAST, DOMAIN, MANUFACTURER
|
||||
|
||||
|
@ -116,11 +115,7 @@ class AccuWeatherDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||
async with timeout(10):
|
||||
current = await self.accuweather.async_get_current_conditions()
|
||||
forecast = (
|
||||
await self.accuweather.async_get_forecast(
|
||||
metric=self.hass.config.units is METRIC_SYSTEM
|
||||
)
|
||||
if self.forecast
|
||||
else {}
|
||||
await self.accuweather.async_get_forecast() if self.forecast else {}
|
||||
)
|
||||
except (
|
||||
ApiError,
|
||||
|
|
|
@ -20,7 +20,6 @@ from homeassistant.components.weather import (
|
|||
ATTR_CONDITION_WINDY,
|
||||
)
|
||||
|
||||
API_IMPERIAL: Final = "Imperial"
|
||||
API_METRIC: Final = "Metric"
|
||||
ATTRIBUTION: Final = "Data provided by AccuWeather"
|
||||
ATTR_CATEGORY: Final = "Category"
|
||||
|
|
|
@ -26,11 +26,9 @@ from homeassistant.core import HomeAssistant, callback
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from . import AccuWeatherDataUpdateCoordinator
|
||||
from .const import (
|
||||
API_IMPERIAL,
|
||||
API_METRIC,
|
||||
ATTR_CATEGORY,
|
||||
ATTR_DIRECTION,
|
||||
|
@ -51,7 +49,7 @@ PARALLEL_UPDATES = 1
|
|||
class AccuWeatherSensorDescriptionMixin:
|
||||
"""Mixin for AccuWeather sensor."""
|
||||
|
||||
value_fn: Callable[[dict[str, Any], str], StateType]
|
||||
value_fn: Callable[[dict[str, Any]], StateType]
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@ -61,8 +59,6 @@ class AccuWeatherSensorDescription(
|
|||
"""Class describing AccuWeather sensor entities."""
|
||||
|
||||
attr_fn: Callable[[dict[str, Any]], dict[str, StateType]] = lambda _: {}
|
||||
metric_unit: str | None = None
|
||||
us_customary_unit: str | None = None
|
||||
|
||||
|
||||
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
|
@ -72,7 +68,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Cloud cover day",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
value_fn=lambda data: cast(int, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="CloudCoverNight",
|
||||
|
@ -80,7 +76,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Cloud cover night",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
value_fn=lambda data: cast(int, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Grass",
|
||||
|
@ -88,7 +84,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Grass pollen",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
@ -96,7 +92,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
icon="mdi:weather-partly-cloudy",
|
||||
name="Hours of sun",
|
||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
||||
value_fn=lambda data, _: cast(float, data),
|
||||
value_fn=lambda data: cast(float, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Mold",
|
||||
|
@ -104,7 +100,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Mold pollen",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
@ -112,7 +108,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
icon="mdi:vector-triangle",
|
||||
name="Ozone",
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
@ -121,56 +117,52 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Ragweed pollen",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperatureMax",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name="RealFeel temperature max",
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperatureMin",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name="RealFeel temperature min",
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperatureShadeMax",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name="RealFeel temperature shade max",
|
||||
entity_registry_enabled_default=False,
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperatureShadeMin",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name="RealFeel temperature shade min",
|
||||
entity_registry_enabled_default=False,
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="ThunderstormProbabilityDay",
|
||||
icon="mdi:weather-lightning",
|
||||
name="Thunderstorm probability day",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
value_fn=lambda data: cast(int, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="ThunderstormProbabilityNight",
|
||||
icon="mdi:weather-lightning",
|
||||
name="Thunderstorm probability night",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
value_fn=lambda data: cast(int, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Tree",
|
||||
|
@ -178,7 +170,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Tree pollen",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
@ -186,7 +178,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
icon="mdi:weather-sunny",
|
||||
name="UV index",
|
||||
native_unit_of_measurement=UV_INDEX,
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
@ -194,9 +186,8 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
name="Wind gust day",
|
||||
entity_registry_enabled_default=False,
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
@ -204,27 +195,24 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
name="Wind gust night",
|
||||
entity_registry_enabled_default=False,
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="WindDay",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
name="Wind day",
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="WindNight",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
name="Wind night",
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
|
||||
),
|
||||
)
|
||||
|
@ -236,9 +224,8 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Apparent temperature",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Ceiling",
|
||||
|
@ -246,9 +233,8 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
icon="mdi:weather-fog",
|
||||
name="Cloud ceiling",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
metric_unit=UnitOfLength.METERS,
|
||||
us_customary_unit=UnitOfLength.FEET,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfLength.METERS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
@ -258,7 +244,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
value_fn=lambda data: cast(int, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="DewPoint",
|
||||
|
@ -266,18 +252,16 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Dew point",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name="RealFeel temperature",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperatureShade",
|
||||
|
@ -285,18 +269,16 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="RealFeel temperature shade",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Precipitation",
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
name="Precipitation",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
metric_unit=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfVolumetricFlux.INCHES_PER_HOUR,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
attr_fn=lambda data: {"type": data["PrecipitationType"]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
@ -306,7 +288,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Pressure tendency",
|
||||
options=["falling", "rising", "steady"],
|
||||
translation_key="pressure_tendency",
|
||||
value_fn=lambda data, _: cast(str, data["LocalizedText"]).lower(),
|
||||
value_fn=lambda data: cast(str, data["LocalizedText"]).lower(),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="UVIndex",
|
||||
|
@ -314,7 +296,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="UV index",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UV_INDEX,
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
value_fn=lambda data: cast(int, data),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data["UVIndexText"]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
|
@ -323,9 +305,8 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Wet bulb temperature",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="WindChillTemperature",
|
||||
|
@ -333,18 +314,16 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Wind chill temperature",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Wind",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
name="Wind",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, unit: cast(float, data[ATTR_SPEED][unit][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][API_METRIC][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="WindGust",
|
||||
|
@ -352,9 +331,8 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
name="Wind gust",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, unit: cast(float, data[ATTR_SPEED][unit][ATTR_VALUE]),
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][API_METRIC][ATTR_VALUE]),
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -374,7 +352,7 @@ async def async_setup_entry(
|
|||
# Some air quality/allergy sensors are only available for certain
|
||||
# locations.
|
||||
sensors.extend(
|
||||
AccuWeatherForecastSensor(coordinator, description, forecast_day=day)
|
||||
AccuWeatherSensor(coordinator, description, forecast_day=day)
|
||||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
for description in FORECAST_SENSOR_TYPES
|
||||
if description.key in coordinator.data[ATTR_FORECAST][0]
|
||||
|
@ -413,34 +391,27 @@ class AccuWeatherSensor(
|
|||
self._attr_unique_id = (
|
||||
f"{coordinator.location_key}-{description.key}".lower()
|
||||
)
|
||||
self._attr_native_unit_of_measurement = description.native_unit_of_measurement
|
||||
if self.coordinator.hass.config.units is METRIC_SYSTEM:
|
||||
self._unit_system = API_METRIC
|
||||
if metric_unit := description.metric_unit:
|
||||
self._attr_native_unit_of_measurement = metric_unit
|
||||
else:
|
||||
self._unit_system = API_IMPERIAL
|
||||
if us_customary_unit := description.us_customary_unit:
|
||||
self._attr_native_unit_of_measurement = us_customary_unit
|
||||
self._attr_device_info = coordinator.device_info
|
||||
if forecast_day is not None:
|
||||
self.forecast_day = forecast_day
|
||||
self.forecast_day = forecast_day
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state."""
|
||||
return self.entity_description.value_fn(self._sensor_data, self._unit_system)
|
||||
return self.entity_description.value_fn(self._sensor_data)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
if self.forecast_day is not None:
|
||||
return self.entity_description.attr_fn(self._sensor_data)
|
||||
|
||||
return self.entity_description.attr_fn(self.coordinator.data)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle data update."""
|
||||
self._sensor_data = _get_sensor_data(
|
||||
self.coordinator.data, self.entity_description.key
|
||||
self.coordinator.data, self.entity_description.key, self.forecast_day
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
@ -458,20 +429,3 @@ def _get_sensor_data(
|
|||
return sensors["PrecipitationSummary"]["PastHour"]
|
||||
|
||||
return sensors[kind]
|
||||
|
||||
|
||||
class AccuWeatherForecastSensor(AccuWeatherSensor):
|
||||
"""Define an AccuWeather forecast entity."""
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
return self.entity_description.attr_fn(self._sensor_data)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle data update."""
|
||||
self._sensor_data = _get_sensor_data(
|
||||
self.coordinator.data, self.entity_description.key, self.forecast_day
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
|
|
@ -28,17 +28,9 @@ from homeassistant.core import HomeAssistant
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from . import AccuWeatherDataUpdateCoordinator
|
||||
from .const import (
|
||||
API_IMPERIAL,
|
||||
API_METRIC,
|
||||
ATTR_FORECAST,
|
||||
ATTRIBUTION,
|
||||
CONDITION_CLASSES,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import API_METRIC, ATTR_FORECAST, ATTRIBUTION, CONDITION_CLASSES, DOMAIN
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
@ -66,20 +58,11 @@ class AccuWeatherEntity(
|
|||
# Coordinator data is used also for sensors which don't have units automatically
|
||||
# converted, hence the weather entity's native units follow the configured unit
|
||||
# system
|
||||
if coordinator.hass.config.units is METRIC_SYSTEM:
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
self._attr_native_pressure_unit = UnitOfPressure.HPA
|
||||
self._attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
self._attr_native_visibility_unit = UnitOfLength.KILOMETERS
|
||||
self._attr_native_wind_speed_unit = UnitOfSpeed.KILOMETERS_PER_HOUR
|
||||
self._unit_system = API_METRIC
|
||||
else:
|
||||
self._unit_system = API_IMPERIAL
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.INCHES
|
||||
self._attr_native_pressure_unit = UnitOfPressure.INHG
|
||||
self._attr_native_temperature_unit = UnitOfTemperature.FAHRENHEIT
|
||||
self._attr_native_visibility_unit = UnitOfLength.MILES
|
||||
self._attr_native_wind_speed_unit = UnitOfSpeed.MILES_PER_HOUR
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
self._attr_native_pressure_unit = UnitOfPressure.HPA
|
||||
self._attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
self._attr_native_visibility_unit = UnitOfLength.KILOMETERS
|
||||
self._attr_native_wind_speed_unit = UnitOfSpeed.KILOMETERS_PER_HOUR
|
||||
self._attr_unique_id = coordinator.location_key
|
||||
self._attr_attribution = ATTRIBUTION
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
@ -99,16 +82,12 @@ class AccuWeatherEntity(
|
|||
@property
|
||||
def native_temperature(self) -> float:
|
||||
"""Return the temperature."""
|
||||
return cast(
|
||||
float, self.coordinator.data["Temperature"][self._unit_system]["Value"]
|
||||
)
|
||||
return cast(float, self.coordinator.data["Temperature"][API_METRIC]["Value"])
|
||||
|
||||
@property
|
||||
def native_pressure(self) -> float:
|
||||
"""Return the pressure."""
|
||||
return cast(
|
||||
float, self.coordinator.data["Pressure"][self._unit_system]["Value"]
|
||||
)
|
||||
return cast(float, self.coordinator.data["Pressure"][API_METRIC]["Value"])
|
||||
|
||||
@property
|
||||
def humidity(self) -> int:
|
||||
|
@ -118,9 +97,7 @@ class AccuWeatherEntity(
|
|||
@property
|
||||
def native_wind_speed(self) -> float:
|
||||
"""Return the wind speed."""
|
||||
return cast(
|
||||
float, self.coordinator.data["Wind"]["Speed"][self._unit_system]["Value"]
|
||||
)
|
||||
return cast(float, self.coordinator.data["Wind"]["Speed"][API_METRIC]["Value"])
|
||||
|
||||
@property
|
||||
def wind_bearing(self) -> int:
|
||||
|
@ -130,9 +107,7 @@ class AccuWeatherEntity(
|
|||
@property
|
||||
def native_visibility(self) -> float:
|
||||
"""Return the visibility."""
|
||||
return cast(
|
||||
float, self.coordinator.data["Visibility"][self._unit_system]["Value"]
|
||||
)
|
||||
return cast(float, self.coordinator.data["Visibility"][API_METRIC]["Value"])
|
||||
|
||||
@property
|
||||
def ozone(self) -> int | None:
|
||||
|
|
|
@ -5,6 +5,8 @@ import logging
|
|||
from typing import Any
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_TARGET_TEMP_HIGH,
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
|
@ -32,18 +34,10 @@ ADVANTAGE_AIR_HVAC_MODES = {
|
|||
"cool": HVACMode.COOL,
|
||||
"vent": HVACMode.FAN_ONLY,
|
||||
"dry": HVACMode.DRY,
|
||||
"myauto": HVACMode.AUTO,
|
||||
"myauto": HVACMode.HEAT_COOL,
|
||||
}
|
||||
HASS_HVAC_MODES = {v: k for k, v in ADVANTAGE_AIR_HVAC_MODES.items()}
|
||||
|
||||
AC_HVAC_MODES = [
|
||||
HVACMode.OFF,
|
||||
HVACMode.COOL,
|
||||
HVACMode.HEAT,
|
||||
HVACMode.FAN_ONLY,
|
||||
HVACMode.DRY,
|
||||
]
|
||||
|
||||
ADVANTAGE_AIR_FAN_MODES = {
|
||||
"autoAA": FAN_AUTO,
|
||||
"low": FAN_LOW,
|
||||
|
@ -53,7 +47,14 @@ ADVANTAGE_AIR_FAN_MODES = {
|
|||
HASS_FAN_MODES = {v: k for k, v in ADVANTAGE_AIR_FAN_MODES.items()}
|
||||
FAN_SPEEDS = {FAN_LOW: 30, FAN_MEDIUM: 60, FAN_HIGH: 100}
|
||||
|
||||
ZONE_HVAC_MODES = [HVACMode.OFF, HVACMode.HEAT_COOL]
|
||||
ADVANTAGE_AIR_AUTOFAN = "aaAutoFanModeEnabled"
|
||||
ADVANTAGE_AIR_MYZONE = "MyZone"
|
||||
ADVANTAGE_AIR_MYAUTO = "MyAuto"
|
||||
ADVANTAGE_AIR_MYAUTO_ENABLED = "myAutoModeEnabled"
|
||||
ADVANTAGE_AIR_MYTEMP = "MyTemp"
|
||||
ADVANTAGE_AIR_MYTEMP_ENABLED = "climateControlModeEnabled"
|
||||
ADVANTAGE_AIR_HEAT_TARGET = "myAutoHeatTargetTemp"
|
||||
ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp"
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
@ -75,7 +76,7 @@ async def async_setup_entry(
|
|||
entities.append(AdvantageAirAC(instance, ac_key))
|
||||
for zone_key, zone in ac_device["zones"].items():
|
||||
# Only add zone climate control when zone is in temperature control
|
||||
if zone["type"] != 0:
|
||||
if zone["type"] > 0:
|
||||
entities.append(AdvantageAirZone(instance, ac_key, zone_key))
|
||||
async_add_entities(entities)
|
||||
|
||||
|
@ -83,24 +84,56 @@ async def async_setup_entry(
|
|||
class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
||||
"""AdvantageAir AC unit."""
|
||||
|
||||
_attr_fan_modes = [FAN_LOW, FAN_MEDIUM, FAN_HIGH]
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_max_temp = 32
|
||||
_attr_min_temp = 16
|
||||
_attr_fan_modes = [FAN_AUTO, FAN_LOW, FAN_MEDIUM, FAN_HIGH]
|
||||
_attr_hvac_modes = AC_HVAC_MODES
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.FAN_MODE
|
||||
)
|
||||
|
||||
def __init__(self, instance: dict[str, Any], ac_key: str) -> None:
|
||||
"""Initialize an AdvantageAir AC unit."""
|
||||
super().__init__(instance, ac_key)
|
||||
if self._ac.get("myAutoModeEnabled"):
|
||||
self._attr_hvac_modes = AC_HVAC_MODES + [HVACMode.AUTO]
|
||||
|
||||
# Set supported features and HVAC modes based on current operating mode
|
||||
if self._ac.get(ADVANTAGE_AIR_MYAUTO_ENABLED):
|
||||
# MyAuto
|
||||
self._attr_supported_features = (
|
||||
ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
)
|
||||
self._attr_hvac_modes = [
|
||||
HVACMode.OFF,
|
||||
HVACMode.COOL,
|
||||
HVACMode.HEAT,
|
||||
HVACMode.FAN_ONLY,
|
||||
HVACMode.DRY,
|
||||
HVACMode.HEAT_COOL,
|
||||
]
|
||||
elif self._ac.get(ADVANTAGE_AIR_MYTEMP_ENABLED):
|
||||
# MyTemp
|
||||
self._attr_supported_features = ClimateEntityFeature.FAN_MODE
|
||||
self._attr_hvac_modes = [HVACMode.OFF, HVACMode.COOL, HVACMode.HEAT]
|
||||
|
||||
else:
|
||||
# MyZone
|
||||
self._attr_supported_features = (
|
||||
ClimateEntityFeature.FAN_MODE | ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
)
|
||||
self._attr_hvac_modes = [
|
||||
HVACMode.OFF,
|
||||
HVACMode.COOL,
|
||||
HVACMode.HEAT,
|
||||
HVACMode.FAN_ONLY,
|
||||
HVACMode.DRY,
|
||||
]
|
||||
|
||||
# Add "ezfan" mode if supported
|
||||
if self._ac.get(ADVANTAGE_AIR_AUTOFAN):
|
||||
self._attr_fan_modes += [FAN_AUTO]
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float:
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the current target temperature."""
|
||||
return self._ac["setTemp"]
|
||||
|
||||
|
@ -116,6 +149,16 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
|||
"""Return the current fan modes."""
|
||||
return ADVANTAGE_AIR_FAN_MODES.get(self._ac["fan"])
|
||||
|
||||
@property
|
||||
def target_temperature_high(self) -> float | None:
|
||||
"""Return the temperature cool mode is enabled."""
|
||||
return self._ac.get(ADVANTAGE_AIR_COOL_TARGET)
|
||||
|
||||
@property
|
||||
def target_temperature_low(self) -> float | None:
|
||||
"""Return the temperature heat mode is enabled."""
|
||||
return self._ac.get(ADVANTAGE_AIR_HEAT_TARGET)
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Set the HVAC State to on."""
|
||||
await self.aircon(
|
||||
|
@ -166,27 +209,37 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
|||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the Temperature."""
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
await self.aircon({self.ac_key: {"info": {"setTemp": temp}}})
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
await self.aircon(
|
||||
{self.ac_key: {"info": {"setTemp": kwargs[ATTR_TEMPERATURE]}}}
|
||||
)
|
||||
if ATTR_TARGET_TEMP_LOW in kwargs and ATTR_TARGET_TEMP_HIGH in kwargs:
|
||||
await self.aircon(
|
||||
{
|
||||
self.ac_key: {
|
||||
"info": {
|
||||
ADVANTAGE_AIR_COOL_TARGET: kwargs[ATTR_TARGET_TEMP_HIGH],
|
||||
ADVANTAGE_AIR_HEAT_TARGET: kwargs[ATTR_TARGET_TEMP_LOW],
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
||||
"""AdvantageAir Zone control."""
|
||||
"""AdvantageAir MyTemp Zone control."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT_COOL]
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_max_temp = 32
|
||||
_attr_min_temp = 16
|
||||
_attr_hvac_modes = ZONE_HVAC_MODES
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
|
||||
def __init__(self, instance: dict[str, Any], ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an AdvantageAir Zone control."""
|
||||
super().__init__(instance, ac_key, zone_key)
|
||||
self._attr_name = self._zone["name"]
|
||||
self._attr_unique_id = (
|
||||
f'{self.coordinator.data["system"]["rid"]}-{ac_key}-{zone_key}'
|
||||
)
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
|
@ -196,7 +249,7 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
|||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float:
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return self._zone["measuredTemp"]
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
|||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_CAQI,
|
||||
icon="mdi:air-filter",
|
||||
name=ATTR_API_CAQI,
|
||||
translation_key="caqi",
|
||||
native_unit_of_measurement="CAQI",
|
||||
suggested_display_precision=0,
|
||||
attrs=lambda data: {
|
||||
|
@ -80,7 +80,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
|||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_PM1,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
name="PM1.0",
|
||||
translation_key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
|
@ -88,7 +88,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
|||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_PM25,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
name="PM2.5",
|
||||
translation_key="pm25",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
|
@ -100,7 +100,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
|||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_PM10,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
name=ATTR_API_PM10,
|
||||
translation_key="pm10",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
|
@ -112,7 +112,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
|||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_HUMIDITY,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
name=ATTR_API_HUMIDITY.capitalize(),
|
||||
translation_key="humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
|
@ -120,7 +120,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
|||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_PRESSURE,
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
name=ATTR_API_PRESSURE.capitalize(),
|
||||
translation_key="pressure",
|
||||
native_unit_of_measurement=UnitOfPressure.HPA,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
|
@ -128,14 +128,14 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
|||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_TEMPERATURE,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name=ATTR_API_TEMPERATURE.capitalize(),
|
||||
translation_key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_CO,
|
||||
name="Carbon monoxide",
|
||||
translation_key="co",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
|
@ -147,7 +147,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
|||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_NO2,
|
||||
device_class=SensorDeviceClass.NITROGEN_DIOXIDE,
|
||||
name="Nitrogen dioxide",
|
||||
translation_key="no2",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
|
@ -159,7 +159,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
|||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_SO2,
|
||||
device_class=SensorDeviceClass.SULPHUR_DIOXIDE,
|
||||
name="Sulphur dioxide",
|
||||
translation_key="so2",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
|
@ -171,7 +171,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
|||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_O3,
|
||||
device_class=SensorDeviceClass.OZONE,
|
||||
name="Ozone",
|
||||
translation_key="o3",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
|
|
|
@ -26,5 +26,42 @@
|
|||
"requests_remaining": "Remaining allowed requests",
|
||||
"requests_per_day": "Allowed requests per day"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"caqi": {
|
||||
"name": "Common air quality index"
|
||||
},
|
||||
"pm1": {
|
||||
"name": "[%key:component::sensor::entity_component::pm1::name%]"
|
||||
},
|
||||
"pm25": {
|
||||
"name": "[%key:component::sensor::entity_component::pm25::name%]"
|
||||
},
|
||||
"pm10": {
|
||||
"name": "[%key:component::sensor::entity_component::pm10::name%]"
|
||||
},
|
||||
"humidity": {
|
||||
"name": "[%key:component::sensor::entity_component::humidity::name%]"
|
||||
},
|
||||
"pressure": {
|
||||
"name": "[%key:component::sensor::entity_component::pressure::name%]"
|
||||
},
|
||||
"temperature": {
|
||||
"name": "[%key:component::sensor::entity_component::temperature::name%]"
|
||||
},
|
||||
"co": {
|
||||
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
|
||||
},
|
||||
"no2": {
|
||||
"name": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]"
|
||||
},
|
||||
"so2": {
|
||||
"name": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
},
|
||||
"o3": {
|
||||
"name": "[%key:component::sensor::entity_component::ozone::name%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -380,7 +380,6 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
)
|
||||
else:
|
||||
entry.version = version
|
||||
hass.config_entries.async_update_entry(entry)
|
||||
|
||||
LOGGER.info("Migration to version %s successful", version)
|
||||
|
||||
|
|
|
@ -117,7 +117,6 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
en_reg.async_clear_config_entry(entry.entry_id)
|
||||
|
||||
version = entry.version = 2
|
||||
hass.config_entries.async_update_entry(entry)
|
||||
|
||||
LOGGER.info("Migration to version %s successful", version)
|
||||
|
||||
|
|
|
@ -7,5 +7,5 @@
|
|||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioambient"],
|
||||
"requirements": ["aioambient==2021.11.0"]
|
||||
"requirements": ["aioambient==2023.04.0"]
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import ATTRIBUTION, CONF_STATION_ID, SCAN_INTERVAL
|
||||
from .const import CONF_STATION_ID, SCAN_INTERVAL
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
|
@ -54,6 +54,8 @@ async def async_setup_platform(
|
|||
class AmpioSmogQuality(AirQualityEntity):
|
||||
"""Implementation of an Ampio Smog air quality entity."""
|
||||
|
||||
_attr_attribution = "Data provided by Ampio"
|
||||
|
||||
def __init__(
|
||||
self, api: AmpioSmogMapData, station_id: str, name: str | None
|
||||
) -> None:
|
||||
|
@ -82,11 +84,6 @@ class AmpioSmogQuality(AirQualityEntity):
|
|||
"""Return the particulate matter 10 level."""
|
||||
return self._ampio.api.pm10 # type: ignore[no-any-return]
|
||||
|
||||
@property
|
||||
def attribution(self) -> str:
|
||||
"""Return the attribution."""
|
||||
return ATTRIBUTION
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data from the AmpioMap API."""
|
||||
await self._ampio.async_update()
|
||||
|
|
|
@ -2,6 +2,5 @@
|
|||
from datetime import timedelta
|
||||
from typing import Final
|
||||
|
||||
ATTRIBUTION: Final = "Data provided by Ampio"
|
||||
CONF_STATION_ID: Final = "station_id"
|
||||
SCAN_INTERVAL: Final = timedelta(minutes=10)
|
||||
|
|
|
@ -28,7 +28,7 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
|||
|
||||
# Send every day
|
||||
async_track_time_interval(
|
||||
hass, analytics.send_analytics, INTERVAL, "analytics daily"
|
||||
hass, analytics.send_analytics, INTERVAL, name="analytics daily"
|
||||
)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Support for functionality to interact with Android TV/Fire TV devices."""
|
||||
"""Support for functionality to interact with Android/Fire TV devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
|
@ -135,11 +135,11 @@ async def async_connect_androidtv(
|
|||
if not aftv.available:
|
||||
# Determine the name that will be used for the device in the log
|
||||
if config[CONF_DEVICE_CLASS] == DEVICE_ANDROIDTV:
|
||||
device_name = "Android TV device"
|
||||
device_name = "Android device"
|
||||
elif config[CONF_DEVICE_CLASS] == DEVICE_FIRETV:
|
||||
device_name = "Fire TV device"
|
||||
else:
|
||||
device_name = "Android TV / Fire TV device"
|
||||
device_name = "Android / Fire TV device"
|
||||
|
||||
error_message = f"Could not connect to {device_name} at {address} {adb_log}"
|
||||
return None, error_message
|
||||
|
@ -148,7 +148,7 @@ async def async_connect_androidtv(
|
|||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Android TV platform."""
|
||||
"""Set up Android Debug Bridge platform."""
|
||||
|
||||
state_det_rules = entry.options.get(CONF_STATE_DETECTION_RULES)
|
||||
if CONF_ADB_SERVER_IP not in entry.data:
|
||||
|
@ -167,7 +167,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
raise ConfigEntryNotReady(error_message)
|
||||
|
||||
async def async_close_connection(event):
|
||||
"""Close Android TV connection on HA Stop."""
|
||||
"""Close Android Debug Bridge connection on HA Stop."""
|
||||
await aftv.adb_close()
|
||||
|
||||
entry.async_on_unload(
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Config flow to configure the Android TV integration."""
|
||||
"""Config flow to configure the Android Debug Bridge integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
@ -114,13 +114,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
async def _async_check_connection(
|
||||
self, user_input: dict[str, Any]
|
||||
) -> tuple[str | None, str | None]:
|
||||
"""Attempt to connect the Android TV."""
|
||||
"""Attempt to connect the Android device."""
|
||||
|
||||
try:
|
||||
aftv, error_message = await async_connect_androidtv(self.hass, user_input)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"Unknown error connecting with Android TV at %s", user_input[CONF_HOST]
|
||||
"Unknown error connecting with Android device at %s",
|
||||
user_input[CONF_HOST],
|
||||
)
|
||||
return RESULT_UNKNOWN, None
|
||||
|
||||
|
@ -130,7 +131,7 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
dev_prop = aftv.device_properties
|
||||
_LOGGER.info(
|
||||
"Android TV at %s: %s = %r, %s = %r",
|
||||
"Android device at %s: %s = %r, %s = %r",
|
||||
user_input[CONF_HOST],
|
||||
PROP_ETHMAC,
|
||||
dev_prop.get(PROP_ETHMAC),
|
||||
|
@ -184,7 +185,7 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
"""Handle an option flow for Android TV."""
|
||||
"""Handle an option flow for Android Debug Bridge."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Android TV component constants."""
|
||||
"""Android Debug Bridge component constants."""
|
||||
DOMAIN = "androidtv"
|
||||
|
||||
ANDROID_DEV = DOMAIN
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"domain": "androidtv",
|
||||
"name": "Android TV",
|
||||
"name": "Android Debug Bridge",
|
||||
"codeowners": ["@JeffLIrion", "@ollo69"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/androidtv",
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Support for functionality to interact with Android TV / Fire TV devices."""
|
||||
"""Support for functionality to interact with Android / Fire TV devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
|
@ -87,7 +87,7 @@ async def async_setup_entry(
|
|||
entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Android TV entity."""
|
||||
"""Set up the Android Debug Bridge entity."""
|
||||
aftv = hass.data[DOMAIN][entry.entry_id][ANDROID_DEV]
|
||||
device_class = aftv.DEVICE_CLASS
|
||||
device_type = (
|
||||
|
@ -201,7 +201,7 @@ def adb_decorator(
|
|||
|
||||
|
||||
class ADBDevice(MediaPlayerEntity):
|
||||
"""Representation of an Android TV or Fire TV device."""
|
||||
"""Representation of an Android or Fire TV device."""
|
||||
|
||||
_attr_device_class = MediaPlayerDeviceClass.TV
|
||||
|
||||
|
@ -214,7 +214,7 @@ class ADBDevice(MediaPlayerEntity):
|
|||
entry_id,
|
||||
entry_data,
|
||||
):
|
||||
"""Initialize the Android TV / Fire TV device."""
|
||||
"""Initialize the Android / Fire TV device."""
|
||||
self.aftv = aftv
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = unique_id
|
||||
|
@ -384,7 +384,7 @@ class ADBDevice(MediaPlayerEntity):
|
|||
|
||||
@adb_decorator()
|
||||
async def adb_command(self, command):
|
||||
"""Send an ADB command to an Android TV / Fire TV device."""
|
||||
"""Send an ADB command to an Android / Fire TV device."""
|
||||
if key := KEYS.get(command):
|
||||
await self.aftv.adb_shell(f"input keyevent {key}")
|
||||
return
|
||||
|
@ -422,13 +422,13 @@ class ADBDevice(MediaPlayerEntity):
|
|||
persistent_notification.async_create(
|
||||
self.hass,
|
||||
msg,
|
||||
title="Android TV",
|
||||
title="Android Debug Bridge",
|
||||
)
|
||||
_LOGGER.info("%s", msg)
|
||||
|
||||
@adb_decorator()
|
||||
async def service_download(self, device_path, local_path):
|
||||
"""Download a file from your Android TV / Fire TV device to your Home Assistant instance."""
|
||||
"""Download a file from your Android / Fire TV device to your Home Assistant instance."""
|
||||
if not self.hass.config.is_allowed_path(local_path):
|
||||
_LOGGER.warning("'%s' is not secure to load data from!", local_path)
|
||||
return
|
||||
|
@ -437,7 +437,7 @@ class ADBDevice(MediaPlayerEntity):
|
|||
|
||||
@adb_decorator()
|
||||
async def service_upload(self, device_path, local_path):
|
||||
"""Upload a file from your Home Assistant instance to an Android TV / Fire TV device."""
|
||||
"""Upload a file from your Home Assistant instance to an Android / Fire TV device."""
|
||||
if not self.hass.config.is_allowed_path(local_path):
|
||||
_LOGGER.warning("'%s' is not secure to load data from!", local_path)
|
||||
return
|
||||
|
@ -446,7 +446,7 @@ class ADBDevice(MediaPlayerEntity):
|
|||
|
||||
|
||||
class AndroidTVDevice(ADBDevice):
|
||||
"""Representation of an Android TV device."""
|
||||
"""Representation of an Android device."""
|
||||
|
||||
_attr_supported_features = (
|
||||
MediaPlayerEntityFeature.PAUSE
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Describes the format for available Android TV and Fire TV services
|
||||
# Describes the format for available Android and Fire TV services
|
||||
|
||||
adb_command:
|
||||
name: ADB command
|
||||
description: Send an ADB command to an Android TV / Fire TV device.
|
||||
description: Send an ADB command to an Android / Fire TV device.
|
||||
target:
|
||||
entity:
|
||||
integration: androidtv
|
||||
|
@ -17,7 +17,7 @@ adb_command:
|
|||
text:
|
||||
download:
|
||||
name: Download
|
||||
description: Download a file from your Android TV / Fire TV device to your Home Assistant instance.
|
||||
description: Download a file from your Android / Fire TV device to your Home Assistant instance.
|
||||
target:
|
||||
entity:
|
||||
integration: androidtv
|
||||
|
@ -25,7 +25,7 @@ download:
|
|||
fields:
|
||||
device_path:
|
||||
name: Device path
|
||||
description: The filepath on the Android TV / Fire TV device.
|
||||
description: The filepath on the Android / Fire TV device.
|
||||
required: true
|
||||
example: "/storage/emulated/0/Download/example.txt"
|
||||
selector:
|
||||
|
@ -39,7 +39,7 @@ download:
|
|||
text:
|
||||
upload:
|
||||
name: Upload
|
||||
description: Upload a file from your Home Assistant instance to an Android TV / Fire TV device.
|
||||
description: Upload a file from your Home Assistant instance to an Android / Fire TV device.
|
||||
target:
|
||||
entity:
|
||||
integration: androidtv
|
||||
|
@ -47,7 +47,7 @@ upload:
|
|||
fields:
|
||||
device_path:
|
||||
name: Device path
|
||||
description: The filepath on the Android TV / Fire TV device.
|
||||
description: The filepath on the Android / Fire TV device.
|
||||
required: true
|
||||
example: "/storage/emulated/0/Download/example.txt"
|
||||
selector:
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android TV Apps",
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
|
@ -47,7 +47,7 @@
|
|||
}
|
||||
},
|
||||
"rules": {
|
||||
"title": "Configure Android TV state detection rules",
|
||||
"title": "Configure Android state detection rules",
|
||||
"description": "Configure detection rule for application id {rule_id}",
|
||||
"data": {
|
||||
"rule_id": "Application ID",
|
||||
|
|
67
homeassistant/components/androidtv_remote/__init__.py
Normal file
67
homeassistant/components/androidtv_remote/__init__.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
"""The Android TV Remote integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from androidtvremote2 import (
|
||||
AndroidTVRemote,
|
||||
CannotConnect,
|
||||
ConnectionClosed,
|
||||
InvalidAuth,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers import create_api
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.REMOTE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Android TV Remote from a config entry."""
|
||||
|
||||
api = create_api(hass, entry.data[CONF_HOST])
|
||||
try:
|
||||
await api.async_connect()
|
||||
except InvalidAuth as exc:
|
||||
# The Android TV is hard reset or the certificate and key files were deleted.
|
||||
raise ConfigEntryAuthFailed from exc
|
||||
except (CannotConnect, ConnectionClosed) as exc:
|
||||
# The Android TV is network unreachable. Raise exception and let Home Assistant retry
|
||||
# later. If device gets a new IP address the zeroconf flow will update the config.
|
||||
raise ConfigEntryNotReady from exc
|
||||
|
||||
def reauth_needed() -> None:
|
||||
"""Start a reauth flow if Android TV is hard reset while reconnecting."""
|
||||
entry.async_start_reauth(hass)
|
||||
|
||||
# Start a task (canceled in disconnect) to keep reconnecting if device becomes
|
||||
# network unreachable. If device gets a new IP address the zeroconf flow will
|
||||
# update the config entry data and reload the config entry.
|
||||
api.keep_reconnecting(reauth_needed)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = api
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@callback
|
||||
def on_hass_stop(event) -> None:
|
||||
"""Stop push updates when hass stops."""
|
||||
api.disconnect()
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
api: AndroidTVRemote = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
api.disconnect()
|
||||
|
||||
return unload_ok
|
187
homeassistant/components/androidtv_remote/config_flow.py
Normal file
187
homeassistant/components/androidtv_remote/config_flow.py
Normal file
|
@ -0,0 +1,187 @@
|
|||
"""Config flow for Android TV Remote integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from androidtvremote2 import (
|
||||
AndroidTVRemote,
|
||||
CannotConnect,
|
||||
ConnectionClosed,
|
||||
InvalidAuth,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers import create_api
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("host"): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_PAIR_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("pin"): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class AndroidTVRemoteConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Android TV Remote."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new AndroidTVRemoteConfigFlow."""
|
||||
self.api: AndroidTVRemote | None = None
|
||||
self.reauth_entry: config_entries.ConfigEntry | None = None
|
||||
self.host: str | None = None
|
||||
self.name: str | None = None
|
||||
self.mac: str | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self.host = user_input["host"]
|
||||
assert self.host
|
||||
api = create_api(self.hass, self.host)
|
||||
try:
|
||||
self.name, self.mac = await api.async_get_name_and_mac()
|
||||
assert self.mac
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: self.host})
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Likely invalid IP address or device is network unreachable. Stay
|
||||
# in the user step allowing the user to enter a different host.
|
||||
errors["base"] = "cannot_connect"
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def _async_start_pair(self) -> FlowResult:
|
||||
"""Start pairing with the Android TV. Navigate to the pair flow to enter the PIN shown on screen."""
|
||||
assert self.host
|
||||
self.api = create_api(self.hass, self.host)
|
||||
await self.api.async_generate_cert_if_missing()
|
||||
await self.api.async_start_pairing()
|
||||
return await self.async_step_pair()
|
||||
|
||||
async def async_step_pair(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the pair step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
pin = user_input["pin"]
|
||||
assert self.api
|
||||
await self.api.async_finish_pairing(pin)
|
||||
if self.reauth_entry:
|
||||
await self.hass.config_entries.async_reload(
|
||||
self.reauth_entry.entry_id
|
||||
)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
assert self.name
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_NAME: self.name,
|
||||
CONF_MAC: self.mac,
|
||||
},
|
||||
)
|
||||
except InvalidAuth:
|
||||
# Invalid PIN. Stay in the pair step allowing the user to enter
|
||||
# a different PIN.
|
||||
errors["base"] = "invalid_auth"
|
||||
except ConnectionClosed:
|
||||
# Either user canceled pairing on the Android TV itself (most common)
|
||||
# or device doesn't respond to the specified host (device was unplugged,
|
||||
# network was unplugged, or device got a new IP address).
|
||||
# Attempt to pair again.
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Device doesn't respond to the specified host. Abort.
|
||||
# If we are in the user flow we could go back to the user step to allow
|
||||
# them to enter a new IP address but we cannot do that for the zeroconf
|
||||
# flow. Simpler to abort for both flows.
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
return self.async_show_form(
|
||||
step_id="pair",
|
||||
data_schema=STEP_PAIR_DATA_SCHEMA,
|
||||
description_placeholders={CONF_NAME: self.name},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
) -> FlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
self.host = discovery_info.host
|
||||
self.name = discovery_info.name.removesuffix("._androidtvremote2._tcp.local.")
|
||||
self.mac = discovery_info.properties.get("bt")
|
||||
assert self.mac
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_HOST: self.host, CONF_NAME: self.name}
|
||||
)
|
||||
self.context.update({"title_placeholders": {CONF_NAME: self.name}})
|
||||
return await self.async_step_zeroconf_confirm()
|
||||
|
||||
async def async_step_zeroconf_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle a flow initiated by zeroconf."""
|
||||
if user_input is not None:
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Device became network unreachable after discovery.
|
||||
# Abort and let discovery find it again later.
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
return self.async_show_form(
|
||||
step_id="zeroconf_confirm",
|
||||
description_placeholders={CONF_NAME: self.name},
|
||||
)
|
||||
|
||||
async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult:
|
||||
"""Handle configuration by re-auth."""
|
||||
self.host = entry_data[CONF_HOST]
|
||||
self.name = entry_data[CONF_NAME]
|
||||
self.mac = entry_data[CONF_MAC]
|
||||
self.reauth_entry = self.hass.config_entries.async_get_entry(
|
||||
self.context["entry_id"]
|
||||
)
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Device is network unreachable. Abort.
|
||||
errors["base"] = "cannot_connect"
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={CONF_NAME: self.name},
|
||||
errors=errors,
|
||||
)
|
6
homeassistant/components/androidtv_remote/const.py
Normal file
6
homeassistant/components/androidtv_remote/const.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
"""Constants for the Android TV Remote integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "androidtv_remote"
|
29
homeassistant/components/androidtv_remote/diagnostics.py
Normal file
29
homeassistant/components/androidtv_remote/diagnostics.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
"""Diagnostics support for Android TV Remote."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from androidtvremote2 import AndroidTVRemote
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
TO_REDACT = {CONF_HOST, CONF_MAC}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
api: AndroidTVRemote = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return async_redact_data(
|
||||
{
|
||||
"api_device_info": api.device_info,
|
||||
"config_entry_data": entry.data,
|
||||
},
|
||||
TO_REDACT,
|
||||
)
|
18
homeassistant/components/androidtv_remote/helpers.py
Normal file
18
homeassistant/components/androidtv_remote/helpers.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
"""Helper functions for Android TV Remote integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from androidtvremote2 import AndroidTVRemote
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
|
||||
|
||||
def create_api(hass: HomeAssistant, host: str) -> AndroidTVRemote:
|
||||
"""Create an AndroidTVRemote instance."""
|
||||
return AndroidTVRemote(
|
||||
client_name="Home Assistant",
|
||||
certfile=hass.config.path(STORAGE_DIR, "androidtv_remote_cert.pem"),
|
||||
keyfile=hass.config.path(STORAGE_DIR, "androidtv_remote_key.pem"),
|
||||
host=host,
|
||||
loop=hass.loop,
|
||||
)
|
13
homeassistant/components/androidtv_remote/manifest.json
Normal file
13
homeassistant/components/androidtv_remote/manifest.json
Normal file
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"domain": "androidtv_remote",
|
||||
"name": "Android TV Remote",
|
||||
"codeowners": ["@tronikos"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/androidtv_remote",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["androidtvremote2==0.0.7"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
154
homeassistant/components/androidtv_remote/remote.py
Normal file
154
homeassistant/components/androidtv_remote/remote.py
Normal file
|
@ -0,0 +1,154 @@
|
|||
"""Remote control support for Android TV Remote."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Iterable
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from androidtvremote2 import AndroidTVRemote, ConnectionClosed
|
||||
|
||||
from homeassistant.components.remote import (
|
||||
ATTR_ACTIVITY,
|
||||
ATTR_DELAY_SECS,
|
||||
ATTR_HOLD_SECS,
|
||||
ATTR_NUM_REPEATS,
|
||||
DEFAULT_DELAY_SECS,
|
||||
DEFAULT_HOLD_SECS,
|
||||
DEFAULT_NUM_REPEATS,
|
||||
RemoteEntity,
|
||||
RemoteEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Android TV remote entity based on a config entry."""
|
||||
api: AndroidTVRemote = hass.data[DOMAIN][config_entry.entry_id]
|
||||
async_add_entities([AndroidTVRemoteEntity(api, config_entry)])
|
||||
|
||||
|
||||
class AndroidTVRemoteEntity(RemoteEntity):
|
||||
"""Representation of an Android TV Remote."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, api: AndroidTVRemote, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize device."""
|
||||
self._api = api
|
||||
self._host = config_entry.data[CONF_HOST]
|
||||
self._name = config_entry.data[CONF_NAME]
|
||||
self._attr_unique_id = config_entry.unique_id
|
||||
self._attr_supported_features = RemoteEntityFeature.ACTIVITY
|
||||
self._attr_is_on = api.is_on
|
||||
self._attr_current_activity = api.current_app
|
||||
device_info = api.device_info
|
||||
assert config_entry.unique_id
|
||||
assert device_info
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, config_entry.data[CONF_MAC])},
|
||||
identifiers={(DOMAIN, config_entry.unique_id)},
|
||||
name=self._name,
|
||||
manufacturer=device_info["manufacturer"],
|
||||
model=device_info["model"],
|
||||
)
|
||||
|
||||
@callback
|
||||
def is_on_updated(is_on: bool) -> None:
|
||||
self._attr_is_on = is_on
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def current_app_updated(current_app: str) -> None:
|
||||
self._attr_current_activity = current_app
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def is_available_updated(is_available: bool) -> None:
|
||||
if is_available:
|
||||
_LOGGER.info(
|
||||
"Reconnected to %s at %s",
|
||||
self._name,
|
||||
self._host,
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Disconnected from %s at %s",
|
||||
self._name,
|
||||
self._host,
|
||||
)
|
||||
self._attr_available = is_available
|
||||
self.async_write_ha_state()
|
||||
|
||||
api.add_is_on_updated_callback(is_on_updated)
|
||||
api.add_current_app_updated_callback(current_app_updated)
|
||||
api.add_is_available_updated_callback(is_available_updated)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the Android TV on."""
|
||||
if not self.is_on:
|
||||
self._send_key_command("POWER")
|
||||
activity = kwargs.get(ATTR_ACTIVITY, "")
|
||||
if activity:
|
||||
self._send_launch_app_command(activity)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the Android TV off."""
|
||||
if self.is_on:
|
||||
self._send_key_command("POWER")
|
||||
|
||||
async def async_send_command(self, command: Iterable[str], **kwargs: Any) -> None:
|
||||
"""Send commands to one device."""
|
||||
num_repeats = kwargs.get(ATTR_NUM_REPEATS, DEFAULT_NUM_REPEATS)
|
||||
delay_secs = kwargs.get(ATTR_DELAY_SECS, DEFAULT_DELAY_SECS)
|
||||
hold_secs = kwargs.get(ATTR_HOLD_SECS, DEFAULT_HOLD_SECS)
|
||||
|
||||
for _ in range(num_repeats):
|
||||
for single_command in command:
|
||||
if hold_secs:
|
||||
self._send_key_command(single_command, "START_LONG")
|
||||
await asyncio.sleep(hold_secs)
|
||||
self._send_key_command(single_command, "END_LONG")
|
||||
else:
|
||||
self._send_key_command(single_command, "SHORT")
|
||||
await asyncio.sleep(delay_secs)
|
||||
|
||||
def _send_key_command(self, key_code: str, direction: str = "SHORT") -> None:
|
||||
"""Send a key press to Android TV.
|
||||
|
||||
This does not block; it buffers the data and arranges for it to be sent out asynchronously.
|
||||
"""
|
||||
try:
|
||||
self._api.send_key_command(key_code, direction)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
"Connection to Android TV device is closed"
|
||||
) from exc
|
||||
|
||||
def _send_launch_app_command(self, app_link: str) -> None:
|
||||
"""Launch an app on Android TV.
|
||||
|
||||
This does not block; it buffers the data and arranges for it to be sent out asynchronously.
|
||||
"""
|
||||
try:
|
||||
self._api.send_launch_app_command(app_link)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
"Connection to Android TV device is closed"
|
||||
) from exc
|
38
homeassistant/components/androidtv_remote/strings.json
Normal file
38
homeassistant/components/androidtv_remote/strings.json
Normal file
|
@ -0,0 +1,38 @@
|
|||
{
|
||||
"config": {
|
||||
"flow_title": "{name}",
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Enter the IP address of the Android TV you want to add to Home Assistant. It will turn on and a pairing code will be displayed on it that you will need to enter in the next screen.",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
}
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"title": "Discovered Android TV",
|
||||
"description": "Do you want to add the Android TV ({name}) to Home Assistant? It will turn on and a pairing code will be displayed on it that you will need to enter in the next screen."
|
||||
},
|
||||
"pair": {
|
||||
"description": "Enter the pairing code displayed on the Android TV ({name}).",
|
||||
"data": {
|
||||
"pin": "[%key:common::config_flow::data::pin%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "You need to pair again with the Android TV ({name})."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -75,7 +75,7 @@ class AuthorizationServer:
|
|||
token_url: str
|
||||
|
||||
|
||||
class ApplicationCredentialsStorageCollection(collection.StorageCollection):
|
||||
class ApplicationCredentialsStorageCollection(collection.DictStorageCollection):
|
||||
"""Application credential collection stored in storage."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
|
||||
|
@ -94,7 +94,7 @@ class ApplicationCredentialsStorageCollection(collection.StorageCollection):
|
|||
return f"{info[CONF_DOMAIN]}.{info[CONF_CLIENT_ID]}"
|
||||
|
||||
async def _update_data(
|
||||
self, data: dict[str, str], update_data: dict[str, str]
|
||||
self, item: dict[str, str], update_data: dict[str, str]
|
||||
) -> dict[str, str]:
|
||||
"""Return a new updated data object."""
|
||||
raise ValueError("Updates not supported")
|
||||
|
@ -144,7 +144,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
id_manager = collection.IDManager()
|
||||
storage_collection = ApplicationCredentialsStorageCollection(
|
||||
Store(hass, STORAGE_VERSION, STORAGE_KEY),
|
||||
logging.getLogger(f"{__name__}.storage_collection"),
|
||||
id_manager,
|
||||
)
|
||||
await storage_collection.async_load()
|
||||
|
|
|
@ -38,7 +38,10 @@ class AugustSubscriberMixin:
|
|||
def _async_setup_listeners(self):
|
||||
"""Create interval and stop listeners."""
|
||||
self._unsub_interval = async_track_time_interval(
|
||||
self._hass, self._async_refresh, self._update_interval, "august refresh"
|
||||
self._hass,
|
||||
self._async_refresh,
|
||||
self._update_interval,
|
||||
name="august refresh",
|
||||
)
|
||||
|
||||
@callback
|
||||
|
|
|
@ -51,7 +51,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
|||
if config_entry.version != 3:
|
||||
# Home Assistant 2023.2
|
||||
config_entry.version = 3
|
||||
hass.config_entries.async_update_entry(config_entry)
|
||||
|
||||
_LOGGER.info("Migration to version %s successful", config_entry.version)
|
||||
|
||||
|
|
|
@ -23,6 +23,8 @@ from homeassistant.util.json import json_loads_object
|
|||
|
||||
from .const import DOMAIN, EXCLUDE_FROM_BACKUP, LOGGER
|
||||
|
||||
BUF_SIZE = 2**20 * 4 # 4MB
|
||||
|
||||
|
||||
@dataclass
|
||||
class Backup:
|
||||
|
@ -99,7 +101,7 @@ class BackupManager:
|
|||
backups: dict[str, Backup] = {}
|
||||
for backup_path in self.backup_dir.glob("*.tar"):
|
||||
try:
|
||||
with tarfile.open(backup_path, "r:") as backup_file:
|
||||
with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file:
|
||||
if data_file := backup_file.extractfile("./backup.json"):
|
||||
data = json_loads_object(data_file.read())
|
||||
backup = Backup(
|
||||
|
@ -227,7 +229,7 @@ class BackupManager:
|
|||
self.backup_dir.mkdir()
|
||||
|
||||
with TemporaryDirectory() as tmp_dir, SecureTarFile(
|
||||
tar_file_path, "w", gzip=False
|
||||
tar_file_path, "w", gzip=False, bufsize=BUF_SIZE
|
||||
) as tar_file:
|
||||
tmp_dir_path = Path(tmp_dir)
|
||||
save_json(
|
||||
|
@ -237,6 +239,7 @@ class BackupManager:
|
|||
with SecureTarFile(
|
||||
tmp_dir_path.joinpath("./homeassistant.tar.gz").as_posix(),
|
||||
"w",
|
||||
bufsize=BUF_SIZE,
|
||||
) as core_tar:
|
||||
atomic_contents_add(
|
||||
tar_file=core_tar,
|
||||
|
|
|
@ -7,5 +7,5 @@
|
|||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["securetar==2022.2.0"]
|
||||
"requirements": ["securetar==2023.3.0"]
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ class BleBoxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
) -> FlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
hass = self.hass
|
||||
ipaddress = host_port(discovery_info.__dict__)
|
||||
ipaddress = (discovery_info.host, discovery_info.port)
|
||||
self.device_config["host"] = discovery_info.host
|
||||
self.device_config["port"] = discovery_info.port
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ class BaseHaScanner(ABC):
|
|||
self.hass,
|
||||
self._async_scanner_watchdog,
|
||||
SCANNER_WATCHDOG_INTERVAL,
|
||||
f"{self.name} Bluetooth scanner watchdog",
|
||||
name=f"{self.name} Bluetooth scanner watchdog",
|
||||
)
|
||||
|
||||
@hass_callback
|
||||
|
@ -230,7 +230,7 @@ class BaseHaRemoteScanner(BaseHaScanner):
|
|||
self.hass,
|
||||
self._async_expire_devices,
|
||||
timedelta(seconds=30),
|
||||
f"{self.name} Bluetooth scanner device expire",
|
||||
name=f"{self.name} Bluetooth scanner device expire",
|
||||
)
|
||||
cancel_stop = self.hass.bus.async_listen(
|
||||
EVENT_HOMEASSISTANT_STOP, self._async_save_history
|
||||
|
@ -345,12 +345,27 @@ class BaseHaRemoteScanner(BaseHaScanner):
|
|||
tx_power=NO_RSSI_VALUE if tx_power is None else tx_power,
|
||||
platform_data=(),
|
||||
)
|
||||
device = BLEDevice(
|
||||
address=address,
|
||||
name=local_name,
|
||||
details=self._details | details,
|
||||
rssi=rssi, # deprecated, will be removed in newer bleak
|
||||
)
|
||||
if prev_discovery:
|
||||
#
|
||||
# Bleak updates the BLEDevice via create_or_update_device.
|
||||
# We need to do the same to ensure integrations that already
|
||||
# have the BLEDevice object get the updated details when they
|
||||
# change.
|
||||
#
|
||||
# https://github.com/hbldh/bleak/blob/222618b7747f0467dbb32bd3679f8cfaa19b1668/bleak/backends/scanner.py#L203
|
||||
#
|
||||
device = prev_device
|
||||
device.name = local_name
|
||||
device.details = self._details | details
|
||||
# pylint: disable-next=protected-access
|
||||
device._rssi = rssi # deprecated, will be removed in newer bleak
|
||||
else:
|
||||
device = BLEDevice(
|
||||
address=address,
|
||||
name=local_name,
|
||||
details=self._details | details,
|
||||
rssi=rssi, # deprecated, will be removed in newer bleak
|
||||
)
|
||||
self._discovered_device_advertisement_datas[address] = (
|
||||
device,
|
||||
advertisement_data,
|
||||
|
|
|
@ -276,7 +276,7 @@ class BluetoothManager:
|
|||
self.hass,
|
||||
self._async_check_unavailable,
|
||||
timedelta(seconds=UNAVAILABLE_TRACK_SECONDS),
|
||||
"Bluetooth manager unavailable tracking",
|
||||
name="Bluetooth manager unavailable tracking",
|
||||
)
|
||||
|
||||
@hass_callback
|
||||
|
|
|
@ -10,9 +10,10 @@ from .wrappers import HaBleakClientWrapper, HaBleakScannerWrapper
|
|||
|
||||
ORIGINAL_BLEAK_SCANNER = bleak.BleakScanner
|
||||
ORIGINAL_BLEAK_CLIENT = bleak.BleakClient
|
||||
ORIGINAL_BLEAK_RETRY_CONNECTOR_CLIENT = (
|
||||
ORIGINAL_BLEAK_RETRY_CONNECTOR_CLIENT_WITH_SERVICE_CACHE = (
|
||||
bleak_retry_connector.BleakClientWithServiceCache
|
||||
)
|
||||
ORIGINAL_BLEAK_RETRY_CONNECTOR_CLIENT = bleak_retry_connector.BleakClient
|
||||
|
||||
|
||||
def install_multiple_bleak_catcher() -> None:
|
||||
|
@ -23,6 +24,7 @@ def install_multiple_bleak_catcher() -> None:
|
|||
bleak.BleakScanner = HaBleakScannerWrapper # type: ignore[misc, assignment]
|
||||
bleak.BleakClient = HaBleakClientWrapper # type: ignore[misc]
|
||||
bleak_retry_connector.BleakClientWithServiceCache = HaBleakClientWithServiceCache # type: ignore[misc,assignment] # noqa: E501
|
||||
bleak_retry_connector.BleakClient = HaBleakClientWrapper # type: ignore[misc] # noqa: E501
|
||||
|
||||
|
||||
def uninstall_multiple_bleak_catcher() -> None:
|
||||
|
@ -30,6 +32,9 @@ def uninstall_multiple_bleak_catcher() -> None:
|
|||
bleak.BleakScanner = ORIGINAL_BLEAK_SCANNER # type: ignore[misc]
|
||||
bleak.BleakClient = ORIGINAL_BLEAK_CLIENT # type: ignore[misc]
|
||||
bleak_retry_connector.BleakClientWithServiceCache = ( # type: ignore[misc]
|
||||
ORIGINAL_BLEAK_RETRY_CONNECTOR_CLIENT_WITH_SERVICE_CACHE
|
||||
)
|
||||
bleak_retry_connector.BleakClient = ( # type: ignore[misc]
|
||||
ORIGINAL_BLEAK_RETRY_CONNECTOR_CLIENT
|
||||
)
|
||||
|
||||
|
|
|
@ -70,6 +70,7 @@ async def async_setup_scanner( # noqa: C901
|
|||
yaml_path = hass.config.path(YAML_DEVICES)
|
||||
devs_to_track: set[str] = set()
|
||||
devs_no_track: set[str] = set()
|
||||
devs_advertise_time: dict[str, float] = {}
|
||||
devs_track_battery = {}
|
||||
interval: timedelta = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
|
||||
# if track new devices is true discover new devices
|
||||
|
@ -178,6 +179,7 @@ async def async_setup_scanner( # noqa: C901
|
|||
"""Update from a ble callback."""
|
||||
mac = service_info.address
|
||||
if mac in devs_to_track:
|
||||
devs_advertise_time[mac] = service_info.time
|
||||
now = dt_util.utcnow()
|
||||
hass.async_create_task(async_see_device(mac, service_info.name))
|
||||
if (
|
||||
|
@ -205,7 +207,9 @@ async def async_setup_scanner( # noqa: C901
|
|||
# there have been no callbacks because the RSSI or
|
||||
# other properties have not changed.
|
||||
for service_info in bluetooth.async_discovered_service_info(hass, False):
|
||||
_async_update_ble(service_info, bluetooth.BluetoothChange.ADVERTISEMENT)
|
||||
# Only call _async_update_ble if the advertisement time has changed
|
||||
if service_info.time != devs_advertise_time.get(service_info.address):
|
||||
_async_update_ble(service_info, bluetooth.BluetoothChange.ADVERTISEMENT)
|
||||
|
||||
cancels = [
|
||||
bluetooth.async_register_callback(
|
||||
|
|
|
@ -41,6 +41,7 @@ PLATFORMS = [
|
|||
Platform.DEVICE_TRACKER,
|
||||
Platform.LOCK,
|
||||
Platform.NOTIFY,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
|
|
139
homeassistant/components/bmw_connected_drive/select.py
Normal file
139
homeassistant/components/bmw_connected_drive/select.py
Normal file
|
@ -0,0 +1,139 @@
|
|||
"""Select platform for BMW."""
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from bimmer_connected.vehicle import MyBMWVehicle
|
||||
from bimmer_connected.vehicle.charging_profile import ChargingMode
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import PERCENTAGE, UnitOfElectricCurrent
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWBaseEntity
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BMWDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BMWRequiredKeysMixin:
|
||||
"""Mixin for required keys."""
|
||||
|
||||
current_option: Callable[[MyBMWVehicle], str]
|
||||
remote_service: Callable[[MyBMWVehicle, str], Coroutine[Any, Any, Any]]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BMWSelectEntityDescription(SelectEntityDescription, BMWRequiredKeysMixin):
|
||||
"""Describes BMW sensor entity."""
|
||||
|
||||
is_available: Callable[[MyBMWVehicle], bool] = lambda _: False
|
||||
dynamic_options: Callable[[MyBMWVehicle], list[str]] | None = None
|
||||
|
||||
|
||||
SELECT_TYPES: dict[str, BMWSelectEntityDescription] = {
|
||||
# --- Generic ---
|
||||
"target_soc": BMWSelectEntityDescription(
|
||||
key="target_soc",
|
||||
name="Target SoC",
|
||||
is_available=lambda v: v.is_remote_set_target_soc_enabled,
|
||||
options=[str(i * 5 + 20) for i in range(17)],
|
||||
current_option=lambda v: str(v.fuel_and_battery.charging_target),
|
||||
remote_service=lambda v, o: v.remote_services.trigger_charging_settings_update(
|
||||
target_soc=int(o)
|
||||
),
|
||||
icon="mdi:battery-charging-medium",
|
||||
unit_of_measurement=PERCENTAGE,
|
||||
),
|
||||
"ac_limit": BMWSelectEntityDescription(
|
||||
key="ac_limit",
|
||||
name="AC Charging Limit",
|
||||
is_available=lambda v: v.is_remote_set_ac_limit_enabled,
|
||||
dynamic_options=lambda v: [
|
||||
str(lim) for lim in v.charging_profile.ac_available_limits # type: ignore[union-attr]
|
||||
],
|
||||
current_option=lambda v: str(v.charging_profile.ac_current_limit), # type: ignore[union-attr]
|
||||
remote_service=lambda v, o: v.remote_services.trigger_charging_settings_update(
|
||||
ac_limit=int(o)
|
||||
),
|
||||
icon="mdi:current-ac",
|
||||
unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
),
|
||||
"charging_mode": BMWSelectEntityDescription(
|
||||
key="charging_mode",
|
||||
name="Charging Mode",
|
||||
is_available=lambda v: v.is_charging_plan_supported,
|
||||
options=[c.value for c in ChargingMode if c != ChargingMode.UNKNOWN],
|
||||
current_option=lambda v: str(v.charging_profile.charging_mode.value), # type: ignore[union-attr]
|
||||
remote_service=lambda v, o: v.remote_services.trigger_charging_profile_update(
|
||||
charging_mode=ChargingMode(o)
|
||||
),
|
||||
icon="mdi:vector-point-select",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the MyBMW lock from config entry."""
|
||||
coordinator: BMWDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
entities: list[BMWSelect] = []
|
||||
|
||||
for vehicle in coordinator.account.vehicles:
|
||||
if not coordinator.read_only:
|
||||
entities.extend(
|
||||
[
|
||||
BMWSelect(coordinator, vehicle, description)
|
||||
for description in SELECT_TYPES.values()
|
||||
if description.is_available(vehicle)
|
||||
]
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class BMWSelect(BMWBaseEntity, SelectEntity):
|
||||
"""Representation of BMW select entity."""
|
||||
|
||||
entity_description: BMWSelectEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BMWDataUpdateCoordinator,
|
||||
vehicle: MyBMWVehicle,
|
||||
description: BMWSelectEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize an BMW select."""
|
||||
super().__init__(coordinator, vehicle)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{vehicle.vin}-{description.key}"
|
||||
if description.dynamic_options:
|
||||
self._attr_options = description.dynamic_options(vehicle)
|
||||
self._attr_current_option = description.current_option(vehicle)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
_LOGGER.debug(
|
||||
"Updating select '%s' of %s", self.entity_description.key, self.vehicle.name
|
||||
)
|
||||
self._attr_current_option = self.entity_description.current_option(self.vehicle)
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Update to the vehicle."""
|
||||
_LOGGER.debug(
|
||||
"Executing '%s' on vehicle '%s' to value '%s'",
|
||||
self.entity_description.key,
|
||||
self.vehicle.vin,
|
||||
option,
|
||||
)
|
||||
await self.entity_description.remote_service(self.vehicle, option)
|
|
@ -17,9 +17,9 @@ from homeassistant.const import (
|
|||
ATTR_SW_VERSION,
|
||||
ATTR_VIA_DEVICE,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.core import CALLBACK_TYPE, callback
|
||||
from homeassistant.helpers.entity import DeviceInfo, Entity
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
||||
from .const import DOMAIN
|
||||
from .utils import BondDevice, BondHub
|
||||
|
@ -27,6 +27,7 @@ from .utils import BondDevice, BondHub
|
|||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_FALLBACK_SCAN_INTERVAL = timedelta(seconds=10)
|
||||
_BPUP_ALIVE_SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
|
||||
class BondEntity(Entity):
|
||||
|
@ -65,6 +66,7 @@ class BondEntity(Entity):
|
|||
self._attr_name = device.name
|
||||
self._attr_assumed_state = self._hub.is_bridge and not self._device.trust_state
|
||||
self._apply_state()
|
||||
self._bpup_polling_fallback: CALLBACK_TYPE | None = None
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
|
@ -100,12 +102,13 @@ class BondEntity(Entity):
|
|||
return device_info
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Fetch assumed state of the cover from the hub using API."""
|
||||
"""Perform a manual update from API."""
|
||||
await self._async_update_from_api()
|
||||
|
||||
@callback
|
||||
def _async_update_if_bpup_not_alive(self, now: datetime) -> None:
|
||||
"""Fetch via the API if BPUP is not alive."""
|
||||
self._async_schedule_bpup_alive_or_poll()
|
||||
if (
|
||||
self.hass.is_stopping
|
||||
or self._bpup_subs.alive
|
||||
|
@ -172,16 +175,22 @@ class BondEntity(Entity):
|
|||
"""Subscribe to BPUP and start polling."""
|
||||
await super().async_added_to_hass()
|
||||
self._bpup_subs.subscribe(self._device_id, self._async_bpup_callback)
|
||||
self.async_on_remove(
|
||||
async_track_time_interval(
|
||||
self.hass,
|
||||
self._async_update_if_bpup_not_alive,
|
||||
_FALLBACK_SCAN_INTERVAL,
|
||||
f"Bond {self.entity_id} fallback polling",
|
||||
)
|
||||
self._async_schedule_bpup_alive_or_poll()
|
||||
|
||||
@callback
|
||||
def _async_schedule_bpup_alive_or_poll(self) -> None:
|
||||
"""Schedule the BPUP alive or poll."""
|
||||
alive = self._bpup_subs.alive
|
||||
self._bpup_polling_fallback = async_call_later(
|
||||
self.hass,
|
||||
_BPUP_ALIVE_SCAN_INTERVAL if alive else _FALLBACK_SCAN_INTERVAL,
|
||||
self._async_update_if_bpup_not_alive,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Unsubscribe from BPUP data on remove."""
|
||||
await super().async_will_remove_from_hass()
|
||||
self._bpup_subs.unsubscribe(self._device_id, self._async_bpup_callback)
|
||||
if self._bpup_polling_fallback:
|
||||
self._bpup_polling_fallback()
|
||||
self._bpup_polling_fallback = None
|
||||
|
|
|
@ -36,14 +36,14 @@ class BraviaTVButtonDescription(
|
|||
BUTTONS: tuple[BraviaTVButtonDescription, ...] = (
|
||||
BraviaTVButtonDescription(
|
||||
key="reboot",
|
||||
name="Reboot",
|
||||
translation_key="restart",
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action=lambda coordinator: coordinator.async_reboot_device(),
|
||||
),
|
||||
BraviaTVButtonDescription(
|
||||
key="terminate_apps",
|
||||
name="Terminate apps",
|
||||
translation_key="terminate_apps",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_action=lambda coordinator: coordinator.async_terminate_apps(),
|
||||
),
|
||||
|
|
|
@ -44,5 +44,15 @@
|
|||
"not_bravia_device": "The device is not a Bravia TV.",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"button": {
|
||||
"restart": {
|
||||
"name": "[%key:component::button::entity_component::restart::name%]"
|
||||
},
|
||||
"terminate_apps": {
|
||||
"name": "Terminate apps"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,61 +25,61 @@ from .entity import BroadlinkEntity
|
|||
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
translation_key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="air_quality",
|
||||
name="Air quality",
|
||||
translation_key="air_quality",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="humidity",
|
||||
name="Humidity",
|
||||
translation_key="humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="light",
|
||||
name="Light",
|
||||
translation_key="light",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="noise",
|
||||
name="Noise",
|
||||
translation_key="noise",
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="power",
|
||||
name="Current power",
|
||||
translation_key="power",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="volt",
|
||||
name="Voltage",
|
||||
translation_key="voltage",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="current",
|
||||
name="Current",
|
||||
translation_key="current",
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="overload",
|
||||
name="Overload",
|
||||
translation_key="overload",
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="totalconsum",
|
||||
name="Total consumption",
|
||||
translation_key="total_consumption",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
|
|
|
@ -43,5 +43,39 @@
|
|||
"invalid_host": "[%key:common::config_flow::error::invalid_host%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"temperature": {
|
||||
"name": "[%key:component::sensor::entity_component::temperature::name%]"
|
||||
},
|
||||
"air_quality": {
|
||||
"name": "[%key:component::sensor::entity_component::aqi::name%]"
|
||||
},
|
||||
"humidity": {
|
||||
"name": "[%key:component::sensor::entity_component::humidity::name%]"
|
||||
},
|
||||
"light": {
|
||||
"name": "[%key:component::sensor::entity_component::illuminance::name%]"
|
||||
},
|
||||
"noise": {
|
||||
"name": "Noise"
|
||||
},
|
||||
"power": {
|
||||
"name": "[%key:component::sensor::entity_component::power::name%]"
|
||||
},
|
||||
"voltage": {
|
||||
"name": "[%key:component::sensor::entity_component::voltage::name%]"
|
||||
},
|
||||
"current": {
|
||||
"name": "[%key:component::sensor::entity_component::current::name%]"
|
||||
},
|
||||
"overload": {
|
||||
"name": "Overload"
|
||||
},
|
||||
"total_consumption": {
|
||||
"name": "Total consumption"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,14 +53,14 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="status",
|
||||
icon="mdi:printer",
|
||||
name="Status",
|
||||
translation_key="status",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value=lambda data: data.status,
|
||||
),
|
||||
BrotherSensorEntityDescription(
|
||||
key="page_counter",
|
||||
icon="mdi:file-document-outline",
|
||||
name="Page counter",
|
||||
translation_key="page_counter",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -69,7 +69,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="bw_counter",
|
||||
icon="mdi:file-document-outline",
|
||||
name="B/W counter",
|
||||
translation_key="bw_pages",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -78,7 +78,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="color_counter",
|
||||
icon="mdi:file-document-outline",
|
||||
name="Color counter",
|
||||
translation_key="color_pages",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -87,7 +87,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="duplex_unit_pages_counter",
|
||||
icon="mdi:file-document-outline",
|
||||
name="Duplex unit pages counter",
|
||||
translation_key="duplex_unit_page_counter",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -96,7 +96,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="drum_remaining_life",
|
||||
icon="mdi:chart-donut",
|
||||
name="Drum remaining life",
|
||||
translation_key="drum_remaining_life",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -105,7 +105,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="drum_remaining_pages",
|
||||
icon="mdi:chart-donut",
|
||||
name="Drum remaining pages",
|
||||
translation_key="drum_remaining_pages",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -114,7 +114,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="drum_counter",
|
||||
icon="mdi:chart-donut",
|
||||
name="Drum counter",
|
||||
translation_key="drum_page_counter",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -123,7 +123,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="black_drum_remaining_life",
|
||||
icon="mdi:chart-donut",
|
||||
name="Black drum remaining life",
|
||||
translation_key="black_drum_remaining_life",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -132,7 +132,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="black_drum_remaining_pages",
|
||||
icon="mdi:chart-donut",
|
||||
name="Black drum remaining pages",
|
||||
translation_key="black_drum_remaining_pages",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -141,7 +141,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="black_drum_counter",
|
||||
icon="mdi:chart-donut",
|
||||
name="Black drum counter",
|
||||
translation_key="black_drum_page_counter",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -150,7 +150,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="cyan_drum_remaining_life",
|
||||
icon="mdi:chart-donut",
|
||||
name="Cyan drum remaining life",
|
||||
translation_key="cyan_drum_remaining_life",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -159,7 +159,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="cyan_drum_remaining_pages",
|
||||
icon="mdi:chart-donut",
|
||||
name="Cyan drum remaining pages",
|
||||
translation_key="cyan_drum_remaining_pages",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -168,7 +168,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="cyan_drum_counter",
|
||||
icon="mdi:chart-donut",
|
||||
name="Cyan drum counter",
|
||||
translation_key="cyan_drum_page_counter",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -177,7 +177,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="magenta_drum_remaining_life",
|
||||
icon="mdi:chart-donut",
|
||||
name="Magenta drum remaining life",
|
||||
translation_key="magenta_drum_remaining_life",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -186,7 +186,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="magenta_drum_remaining_pages",
|
||||
icon="mdi:chart-donut",
|
||||
name="Magenta drum remaining pages",
|
||||
translation_key="magenta_drum_remaining_pages",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -195,7 +195,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="magenta_drum_counter",
|
||||
icon="mdi:chart-donut",
|
||||
name="Magenta drum counter",
|
||||
translation_key="magenta_drum_page_counter",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -204,7 +204,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="yellow_drum_remaining_life",
|
||||
icon="mdi:chart-donut",
|
||||
name="Yellow drum remaining life",
|
||||
translation_key="yellow_drum_remaining_life",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -213,7 +213,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="yellow_drum_remaining_pages",
|
||||
icon="mdi:chart-donut",
|
||||
name="Yellow drum remaining pages",
|
||||
translation_key="yellow_drum_remaining_pages",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -222,7 +222,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="yellow_drum_counter",
|
||||
icon="mdi:chart-donut",
|
||||
name="Yellow drum counter",
|
||||
translation_key="yellow_drum_page_counter",
|
||||
native_unit_of_measurement=UNIT_PAGES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -231,7 +231,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="belt_unit_remaining_life",
|
||||
icon="mdi:current-ac",
|
||||
name="Belt unit remaining life",
|
||||
translation_key="belt_unit_remaining_life",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -240,7 +240,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="fuser_remaining_life",
|
||||
icon="mdi:water-outline",
|
||||
name="Fuser remaining life",
|
||||
translation_key="fuser_remaining_life",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -249,7 +249,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="laser_remaining_life",
|
||||
icon="mdi:spotlight-beam",
|
||||
name="Laser remaining life",
|
||||
translation_key="laser_remaining_life",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -258,7 +258,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="pf_kit_1_remaining_life",
|
||||
icon="mdi:printer-3d",
|
||||
name="PF Kit 1 remaining life",
|
||||
translation_key="pf_kit_1_remaining_life",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -267,7 +267,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="pf_kit_mp_remaining_life",
|
||||
icon="mdi:printer-3d",
|
||||
name="PF Kit MP remaining life",
|
||||
translation_key="pf_kit_mp_remaining_life",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -276,7 +276,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="black_toner_remaining",
|
||||
icon="mdi:printer-3d-nozzle",
|
||||
name="Black toner remaining",
|
||||
translation_key="black_toner_remaining",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -285,7 +285,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="cyan_toner_remaining",
|
||||
icon="mdi:printer-3d-nozzle",
|
||||
name="Cyan toner remaining",
|
||||
translation_key="cyan_toner_remaining",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -294,7 +294,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="magenta_toner_remaining",
|
||||
icon="mdi:printer-3d-nozzle",
|
||||
name="Magenta toner remaining",
|
||||
translation_key="magenta_toner_remaining",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -303,7 +303,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="yellow_toner_remaining",
|
||||
icon="mdi:printer-3d-nozzle",
|
||||
name="Yellow toner remaining",
|
||||
translation_key="yellow_toner_remaining",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -312,7 +312,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="black_ink_remaining",
|
||||
icon="mdi:printer-3d-nozzle",
|
||||
name="Black ink remaining",
|
||||
translation_key="black_ink_remaining",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -321,7 +321,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="cyan_ink_remaining",
|
||||
icon="mdi:printer-3d-nozzle",
|
||||
name="Cyan ink remaining",
|
||||
translation_key="cyan_ink_remaining",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -330,7 +330,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="magenta_ink_remaining",
|
||||
icon="mdi:printer-3d-nozzle",
|
||||
name="Magenta ink remaining",
|
||||
translation_key="magenta_ink_remaining",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -339,7 +339,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
BrotherSensorEntityDescription(
|
||||
key="yellow_ink_remaining",
|
||||
icon="mdi:printer-3d-nozzle",
|
||||
name="Yellow ink remaining",
|
||||
translation_key="yellow_ink_remaining",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
@ -347,7 +347,7 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
|||
),
|
||||
BrotherSensorEntityDescription(
|
||||
key="uptime",
|
||||
name="Uptime",
|
||||
translation_key="last_restart",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
|
|
@ -25,5 +25,111 @@
|
|||
"unsupported_model": "This printer model is not supported.",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"status": {
|
||||
"name": "Status"
|
||||
},
|
||||
"page_counter": {
|
||||
"name": "Page counter"
|
||||
},
|
||||
"bw_pages": {
|
||||
"name": "B/W pages"
|
||||
},
|
||||
"color_pages": {
|
||||
"name": "Color pages"
|
||||
},
|
||||
"duplex_unit_page_counter": {
|
||||
"name": "Duplex unit page counter"
|
||||
},
|
||||
"drum_remaining_life": {
|
||||
"name": "Drum remaining life"
|
||||
},
|
||||
"drum_remaining_pages": {
|
||||
"name": "Drum remaining pages"
|
||||
},
|
||||
"drum_page_counter": {
|
||||
"name": "Drum page counter"
|
||||
},
|
||||
"black_drum_remaining_life": {
|
||||
"name": "Black drum remaining life"
|
||||
},
|
||||
"black_drum_remaining_pages": {
|
||||
"name": "Black drum remaining pages"
|
||||
},
|
||||
"black_drum_page_counter": {
|
||||
"name": "Black drum page counter"
|
||||
},
|
||||
"cyan_drum_remaining_life": {
|
||||
"name": "Cyan drum remaining life"
|
||||
},
|
||||
"cyan_drum_remaining_pages": {
|
||||
"name": "Cyan drum remaining pages"
|
||||
},
|
||||
"cyan_drum_page_counter": {
|
||||
"name": "Cyan drum page counter"
|
||||
},
|
||||
"magenta_drum_remaining_life": {
|
||||
"name": "Magenta drum remaining life"
|
||||
},
|
||||
"magenta_drum_remaining_pages": {
|
||||
"name": "Magenta drum remaining pages"
|
||||
},
|
||||
"magenta_drum_page_counter": {
|
||||
"name": "Magenta drum page counter"
|
||||
},
|
||||
"yellow_drum_remaining_life": {
|
||||
"name": "Yellow drum remaining life"
|
||||
},
|
||||
"yellow_drum_remaining_pages": {
|
||||
"name": "Yellow drum remaining pages"
|
||||
},
|
||||
"yellow_drum_page_counter": {
|
||||
"name": "Yellow drum page counter"
|
||||
},
|
||||
"belt_unit_remaining_life": {
|
||||
"name": "Belt unit remaining life"
|
||||
},
|
||||
"fuser_remaining_life": {
|
||||
"name": "Fuser remaining life"
|
||||
},
|
||||
"laser_remaining_life": {
|
||||
"name": "Laser remaining life"
|
||||
},
|
||||
"pf_kit_1_remaining_life": {
|
||||
"name": "PF Kit 1 remaining life"
|
||||
},
|
||||
"pf_kit_mp_remaining_life": {
|
||||
"name": "PF Kit MP remaining life"
|
||||
},
|
||||
"black_toner_remaining": {
|
||||
"name": "Black toner remaining"
|
||||
},
|
||||
"cyan_toner_remaining": {
|
||||
"name": "Cyan toner remaining"
|
||||
},
|
||||
"magenta_toner_remaining": {
|
||||
"name": "Magenta toner remaining"
|
||||
},
|
||||
"yellow_toner_remaining": {
|
||||
"name": "Yellow toner remaining"
|
||||
},
|
||||
"black_ink_remaining": {
|
||||
"name": "Black ink remaining"
|
||||
},
|
||||
"cyan_ink_remaining": {
|
||||
"name": "Cyan ink remaining"
|
||||
},
|
||||
"magenta_ink_remaining": {
|
||||
"name": "Magenta ink remaining"
|
||||
},
|
||||
"yellow_ink_remaining": {
|
||||
"name": "Yellow ink remaining"
|
||||
},
|
||||
"last_restart": {
|
||||
"name": "Last restart"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -380,7 +380,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
entity.async_write_ha_state()
|
||||
|
||||
unsub = async_track_time_interval(
|
||||
hass, update_tokens, TOKEN_CHANGE_INTERVAL, "Camera update tokens"
|
||||
hass, update_tokens, TOKEN_CHANGE_INTERVAL, name="Camera update tokens"
|
||||
)
|
||||
|
||||
@callback
|
||||
|
|
|
@ -14,6 +14,6 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/cast",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["casttube", "pychromecast"],
|
||||
"requirements": ["pychromecast==13.0.6"],
|
||||
"requirements": ["pychromecast==13.0.7"],
|
||||
"zeroconf": ["_googlecast._tcp.local."]
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ from homeassistant.const import (
|
|||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import HassJob, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, entityfilter
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
@ -311,7 +311,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
async_call_later(
|
||||
hass=hass,
|
||||
delay=timedelta(hours=STARTUP_REPAIR_DELAY),
|
||||
action=async_startup_repairs,
|
||||
action=HassJob(
|
||||
async_startup_repairs, "cloud startup repairs", cancel_on_shutdown=True
|
||||
),
|
||||
)
|
||||
|
||||
return True
|
||||
|
|
|
@ -20,6 +20,11 @@ from homeassistant.components.alexa import (
|
|||
errors as alexa_errors,
|
||||
state_report as alexa_state_report,
|
||||
)
|
||||
from homeassistant.components.homeassistant.exposed_entities import (
|
||||
async_get_assistant_settings,
|
||||
async_listen_entity_updates,
|
||||
async_should_expose,
|
||||
)
|
||||
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
|
||||
from homeassistant.core import HomeAssistant, callback, split_entity_id
|
||||
from homeassistant.helpers import entity_registry as er, start
|
||||
|
@ -30,16 +35,17 @@ from homeassistant.util.dt import utcnow
|
|||
from .const import (
|
||||
CONF_ENTITY_CONFIG,
|
||||
CONF_FILTER,
|
||||
PREF_ALEXA_DEFAULT_EXPOSE,
|
||||
PREF_ALEXA_ENTITY_CONFIGS,
|
||||
DOMAIN as CLOUD_DOMAIN,
|
||||
PREF_ALEXA_REPORT_STATE,
|
||||
PREF_ENABLE_ALEXA,
|
||||
PREF_SHOULD_EXPOSE,
|
||||
)
|
||||
from .prefs import CloudPreferences
|
||||
from .prefs import ALEXA_SETTINGS_VERSION, CloudPreferences
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CLOUD_ALEXA = f"{CLOUD_DOMAIN}.{ALEXA_DOMAIN}"
|
||||
|
||||
# Time to wait when entity preferences have changed before syncing it to
|
||||
# the cloud.
|
||||
SYNC_DELAY = 1
|
||||
|
@ -64,7 +70,7 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
|
|||
self._cloud = cloud
|
||||
self._token = None
|
||||
self._token_valid = None
|
||||
self._cur_entity_prefs = prefs.alexa_entity_configs
|
||||
self._cur_entity_prefs = async_get_assistant_settings(hass, CLOUD_ALEXA)
|
||||
self._alexa_sync_unsub: Callable[[], None] | None = None
|
||||
self._endpoint = None
|
||||
|
||||
|
@ -115,10 +121,31 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
|
|||
"""Return an identifier for the user that represents this config."""
|
||||
return self._cloud_user
|
||||
|
||||
def _migrate_alexa_entity_settings_v1(self):
|
||||
"""Migrate alexa entity settings to entity registry options."""
|
||||
if not self._config[CONF_FILTER].empty_filter:
|
||||
# Don't migrate if there's a YAML config
|
||||
return
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
|
||||
for entity_id, entry in entity_registry.entities.items():
|
||||
if CLOUD_ALEXA in entry.options:
|
||||
continue
|
||||
options = {"should_expose": self._should_expose_legacy(entity_id)}
|
||||
entity_registry.async_update_entity_options(entity_id, CLOUD_ALEXA, options)
|
||||
|
||||
async def async_initialize(self):
|
||||
"""Initialize the Alexa config."""
|
||||
await super().async_initialize()
|
||||
|
||||
if self._prefs.alexa_settings_version != ALEXA_SETTINGS_VERSION:
|
||||
if self._prefs.alexa_settings_version < 2:
|
||||
self._migrate_alexa_entity_settings_v1()
|
||||
await self._prefs.async_update(
|
||||
alexa_settings_version=ALEXA_SETTINGS_VERSION
|
||||
)
|
||||
|
||||
async def hass_started(hass):
|
||||
if self.enabled and ALEXA_DOMAIN not in self.hass.config.components:
|
||||
await async_setup_component(self.hass, ALEXA_DOMAIN, {})
|
||||
|
@ -126,19 +153,19 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
|
|||
start.async_at_start(self.hass, hass_started)
|
||||
|
||||
self._prefs.async_listen_updates(self._async_prefs_updated)
|
||||
async_listen_entity_updates(
|
||||
self.hass, CLOUD_ALEXA, self._async_exposed_entities_updated
|
||||
)
|
||||
self.hass.bus.async_listen(
|
||||
er.EVENT_ENTITY_REGISTRY_UPDATED,
|
||||
self._handle_entity_registry_updated,
|
||||
)
|
||||
|
||||
def should_expose(self, entity_id):
|
||||
def _should_expose_legacy(self, entity_id):
|
||||
"""If an entity should be exposed."""
|
||||
if entity_id in CLOUD_NEVER_EXPOSED_ENTITIES:
|
||||
return False
|
||||
|
||||
if not self._config[CONF_FILTER].empty_filter:
|
||||
return self._config[CONF_FILTER](entity_id)
|
||||
|
||||
entity_configs = self._prefs.alexa_entity_configs
|
||||
entity_config = entity_configs.get(entity_id, {})
|
||||
entity_expose = entity_config.get(PREF_SHOULD_EXPOSE)
|
||||
|
@ -160,6 +187,15 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
|
|||
|
||||
return not auxiliary_entity and split_entity_id(entity_id)[0] in default_expose
|
||||
|
||||
def should_expose(self, entity_id):
|
||||
"""If an entity should be exposed."""
|
||||
if not self._config[CONF_FILTER].empty_filter:
|
||||
if entity_id in CLOUD_NEVER_EXPOSED_ENTITIES:
|
||||
return False
|
||||
return self._config[CONF_FILTER](entity_id)
|
||||
|
||||
return async_should_expose(self.hass, CLOUD_ALEXA, entity_id)
|
||||
|
||||
@callback
|
||||
def async_invalidate_access_token(self):
|
||||
"""Invalidate access token."""
|
||||
|
@ -233,32 +269,30 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
|
|||
if not any(
|
||||
key in updated_prefs
|
||||
for key in (
|
||||
PREF_ALEXA_DEFAULT_EXPOSE,
|
||||
PREF_ALEXA_ENTITY_CONFIGS,
|
||||
PREF_ALEXA_REPORT_STATE,
|
||||
PREF_ENABLE_ALEXA,
|
||||
)
|
||||
):
|
||||
return
|
||||
|
||||
# If we update just entity preferences, delay updating
|
||||
# as we might update more
|
||||
if updated_prefs == {PREF_ALEXA_ENTITY_CONFIGS}:
|
||||
if self._alexa_sync_unsub:
|
||||
self._alexa_sync_unsub()
|
||||
|
||||
self._alexa_sync_unsub = async_call_later(
|
||||
self.hass, SYNC_DELAY, self._sync_prefs
|
||||
)
|
||||
return
|
||||
|
||||
await self.async_sync_entities()
|
||||
|
||||
@callback
|
||||
def _async_exposed_entities_updated(self) -> None:
|
||||
"""Handle updated preferences."""
|
||||
# Delay updating as we might update more
|
||||
if self._alexa_sync_unsub:
|
||||
self._alexa_sync_unsub()
|
||||
|
||||
self._alexa_sync_unsub = async_call_later(
|
||||
self.hass, SYNC_DELAY, self._sync_prefs
|
||||
)
|
||||
|
||||
async def _sync_prefs(self, _now):
|
||||
"""Sync the updated preferences to Alexa."""
|
||||
self._alexa_sync_unsub = None
|
||||
old_prefs = self._cur_entity_prefs
|
||||
new_prefs = self._prefs.alexa_entity_configs
|
||||
new_prefs = async_get_assistant_settings(self.hass, CLOUD_ALEXA)
|
||||
|
||||
seen = set()
|
||||
to_update = []
|
||||
|
|
|
@ -19,6 +19,8 @@ PREF_USERNAME = "username"
|
|||
PREF_REMOTE_DOMAIN = "remote_domain"
|
||||
PREF_ALEXA_DEFAULT_EXPOSE = "alexa_default_expose"
|
||||
PREF_GOOGLE_DEFAULT_EXPOSE = "google_default_expose"
|
||||
PREF_ALEXA_SETTINGS_VERSION = "alexa_settings_version"
|
||||
PREF_GOOGLE_SETTINGS_VERSION = "google_settings_version"
|
||||
PREF_TTS_DEFAULT_VOICE = "tts_default_voice"
|
||||
DEFAULT_TTS_DEFAULT_VOICE = ("en-US", "female")
|
||||
DEFAULT_DISABLE_2FA = False
|
||||
|
|
|
@ -9,6 +9,10 @@ from hass_nabucasa.google_report_state import ErrorResponse
|
|||
|
||||
from homeassistant.components.google_assistant import DOMAIN as GOOGLE_DOMAIN
|
||||
from homeassistant.components.google_assistant.helpers import AbstractConfig
|
||||
from homeassistant.components.homeassistant.exposed_entities import (
|
||||
async_listen_entity_updates,
|
||||
async_should_expose,
|
||||
)
|
||||
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
|
||||
from homeassistant.core import (
|
||||
CoreState,
|
||||
|
@ -22,14 +26,18 @@ from homeassistant.setup import async_setup_component
|
|||
|
||||
from .const import (
|
||||
CONF_ENTITY_CONFIG,
|
||||
CONF_FILTER,
|
||||
DEFAULT_DISABLE_2FA,
|
||||
DOMAIN as CLOUD_DOMAIN,
|
||||
PREF_DISABLE_2FA,
|
||||
PREF_SHOULD_EXPOSE,
|
||||
)
|
||||
from .prefs import CloudPreferences
|
||||
from .prefs import GOOGLE_SETTINGS_VERSION, CloudPreferences
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CLOUD_GOOGLE = f"{CLOUD_DOMAIN}.{GOOGLE_DOMAIN}"
|
||||
|
||||
|
||||
class CloudGoogleConfig(AbstractConfig):
|
||||
"""HA Cloud Configuration for Google Assistant."""
|
||||
|
@ -48,8 +56,6 @@ class CloudGoogleConfig(AbstractConfig):
|
|||
self._user = cloud_user
|
||||
self._prefs = prefs
|
||||
self._cloud = cloud
|
||||
self._cur_entity_prefs = self._prefs.google_entity_configs
|
||||
self._cur_default_expose = self._prefs.google_default_expose
|
||||
self._sync_entities_lock = asyncio.Lock()
|
||||
|
||||
@property
|
||||
|
@ -89,10 +95,35 @@ class CloudGoogleConfig(AbstractConfig):
|
|||
"""Return Cloud User account."""
|
||||
return self._user
|
||||
|
||||
def _migrate_google_entity_settings_v1(self):
|
||||
"""Migrate Google entity settings to entity registry options."""
|
||||
if not self._config[CONF_FILTER].empty_filter:
|
||||
# Don't migrate if there's a YAML config
|
||||
return
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
|
||||
for entity_id, entry in entity_registry.entities.items():
|
||||
if CLOUD_GOOGLE in entry.options:
|
||||
continue
|
||||
options = {"should_expose": self._should_expose_legacy(entity_id)}
|
||||
if _2fa_disabled := (self._2fa_disabled_legacy(entity_id) is not None):
|
||||
options[PREF_DISABLE_2FA] = _2fa_disabled
|
||||
entity_registry.async_update_entity_options(
|
||||
entity_id, CLOUD_GOOGLE, options
|
||||
)
|
||||
|
||||
async def async_initialize(self):
|
||||
"""Perform async initialization of config."""
|
||||
await super().async_initialize()
|
||||
|
||||
if self._prefs.google_settings_version != GOOGLE_SETTINGS_VERSION:
|
||||
if self._prefs.google_settings_version < 2:
|
||||
self._migrate_google_entity_settings_v1()
|
||||
await self._prefs.async_update(
|
||||
google_settings_version=GOOGLE_SETTINGS_VERSION
|
||||
)
|
||||
|
||||
async def hass_started(hass):
|
||||
if self.enabled and GOOGLE_DOMAIN not in self.hass.config.components:
|
||||
await async_setup_component(self.hass, GOOGLE_DOMAIN, {})
|
||||
|
@ -109,7 +140,9 @@ class CloudGoogleConfig(AbstractConfig):
|
|||
await self.async_disconnect_agent_user(agent_user_id)
|
||||
|
||||
self._prefs.async_listen_updates(self._async_prefs_updated)
|
||||
|
||||
async_listen_entity_updates(
|
||||
self.hass, CLOUD_GOOGLE, self._async_exposed_entities_updated
|
||||
)
|
||||
self.hass.bus.async_listen(
|
||||
er.EVENT_ENTITY_REGISTRY_UPDATED,
|
||||
self._handle_entity_registry_updated,
|
||||
|
@ -123,14 +156,11 @@ class CloudGoogleConfig(AbstractConfig):
|
|||
"""If a state object should be exposed."""
|
||||
return self._should_expose_entity_id(state.entity_id)
|
||||
|
||||
def _should_expose_entity_id(self, entity_id):
|
||||
def _should_expose_legacy(self, entity_id):
|
||||
"""If an entity ID should be exposed."""
|
||||
if entity_id in CLOUD_NEVER_EXPOSED_ENTITIES:
|
||||
return False
|
||||
|
||||
if not self._config["filter"].empty_filter:
|
||||
return self._config["filter"](entity_id)
|
||||
|
||||
entity_configs = self._prefs.google_entity_configs
|
||||
entity_config = entity_configs.get(entity_id, {})
|
||||
entity_expose = entity_config.get(PREF_SHOULD_EXPOSE)
|
||||
|
@ -154,6 +184,15 @@ class CloudGoogleConfig(AbstractConfig):
|
|||
|
||||
return not auxiliary_entity and split_entity_id(entity_id)[0] in default_expose
|
||||
|
||||
def _should_expose_entity_id(self, entity_id):
|
||||
"""If an entity should be exposed."""
|
||||
if not self._config[CONF_FILTER].empty_filter:
|
||||
if entity_id in CLOUD_NEVER_EXPOSED_ENTITIES:
|
||||
return False
|
||||
return self._config[CONF_FILTER](entity_id)
|
||||
|
||||
return async_should_expose(self.hass, CLOUD_GOOGLE, entity_id)
|
||||
|
||||
@property
|
||||
def agent_user_id(self):
|
||||
"""Return Agent User Id to use for query responses."""
|
||||
|
@ -168,11 +207,23 @@ class CloudGoogleConfig(AbstractConfig):
|
|||
"""Get agent user ID making request."""
|
||||
return self.agent_user_id
|
||||
|
||||
def should_2fa(self, state):
|
||||
def _2fa_disabled_legacy(self, entity_id):
|
||||
"""If an entity should be checked for 2FA."""
|
||||
entity_configs = self._prefs.google_entity_configs
|
||||
entity_config = entity_configs.get(state.entity_id, {})
|
||||
return not entity_config.get(PREF_DISABLE_2FA, DEFAULT_DISABLE_2FA)
|
||||
entity_config = entity_configs.get(entity_id, {})
|
||||
return entity_config.get(PREF_DISABLE_2FA)
|
||||
|
||||
def should_2fa(self, state):
|
||||
"""If an entity should be checked for 2FA."""
|
||||
entity_registry = er.async_get(self.hass)
|
||||
|
||||
registry_entry = entity_registry.async_get(state.entity_id)
|
||||
if not registry_entry:
|
||||
# Handle the entity has been removed
|
||||
return False
|
||||
|
||||
assistant_options = registry_entry.options.get(CLOUD_GOOGLE, {})
|
||||
return not assistant_options.get(PREF_DISABLE_2FA, DEFAULT_DISABLE_2FA)
|
||||
|
||||
async def async_report_state(self, message, agent_user_id: str):
|
||||
"""Send a state report to Google."""
|
||||
|
@ -218,14 +269,6 @@ class CloudGoogleConfig(AbstractConfig):
|
|||
# So when we change it, we need to sync all entities.
|
||||
sync_entities = True
|
||||
|
||||
# If entity prefs are the same or we have filter in config.yaml,
|
||||
# don't sync.
|
||||
elif (
|
||||
self._cur_entity_prefs is not prefs.google_entity_configs
|
||||
or self._cur_default_expose is not prefs.google_default_expose
|
||||
) and self._config["filter"].empty_filter:
|
||||
self.async_schedule_google_sync_all()
|
||||
|
||||
if self.enabled and not self.is_local_sdk_active:
|
||||
self.async_enable_local_sdk()
|
||||
sync_entities = True
|
||||
|
@ -233,12 +276,14 @@ class CloudGoogleConfig(AbstractConfig):
|
|||
self.async_disable_local_sdk()
|
||||
sync_entities = True
|
||||
|
||||
self._cur_entity_prefs = prefs.google_entity_configs
|
||||
self._cur_default_expose = prefs.google_default_expose
|
||||
|
||||
if sync_entities and self.hass.is_running:
|
||||
await self.async_sync_entities_all()
|
||||
|
||||
@callback
|
||||
def _async_exposed_entities_updated(self) -> None:
|
||||
"""Handle updated preferences."""
|
||||
self.async_schedule_google_sync_all()
|
||||
|
||||
@callback
|
||||
def _handle_entity_registry_updated(self, event: Event) -> None:
|
||||
"""Handle when entity registry updated."""
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""The HTTP api to control the cloud integration."""
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import dataclasses
|
||||
from functools import wraps
|
||||
from http import HTTPStatus
|
||||
|
@ -22,22 +23,24 @@ from homeassistant.components.alexa import (
|
|||
from homeassistant.components.google_assistant import helpers as google_helpers
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.util.location import async_detect_location_info
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
PREF_ALEXA_DEFAULT_EXPOSE,
|
||||
PREF_ALEXA_REPORT_STATE,
|
||||
PREF_DISABLE_2FA,
|
||||
PREF_ENABLE_ALEXA,
|
||||
PREF_ENABLE_GOOGLE,
|
||||
PREF_GOOGLE_DEFAULT_EXPOSE,
|
||||
PREF_GOOGLE_REPORT_STATE,
|
||||
PREF_GOOGLE_SECURE_DEVICES_PIN,
|
||||
PREF_TTS_DEFAULT_VOICE,
|
||||
REQUEST_TIMEOUT,
|
||||
)
|
||||
from .google_config import CLOUD_GOOGLE
|
||||
from .repairs import async_manage_legacy_subscription_issue
|
||||
from .subscription import async_subscription_info
|
||||
|
||||
|
@ -66,11 +69,11 @@ async def async_setup(hass):
|
|||
websocket_api.async_register_command(hass, websocket_remote_connect)
|
||||
websocket_api.async_register_command(hass, websocket_remote_disconnect)
|
||||
|
||||
websocket_api.async_register_command(hass, google_assistant_get)
|
||||
websocket_api.async_register_command(hass, google_assistant_list)
|
||||
websocket_api.async_register_command(hass, google_assistant_update)
|
||||
|
||||
websocket_api.async_register_command(hass, alexa_list)
|
||||
websocket_api.async_register_command(hass, alexa_update)
|
||||
websocket_api.async_register_command(hass, alexa_sync)
|
||||
|
||||
websocket_api.async_register_command(hass, thingtalk_convert)
|
||||
|
@ -350,8 +353,6 @@ async def websocket_subscription(
|
|||
vol.Optional(PREF_ENABLE_ALEXA): bool,
|
||||
vol.Optional(PREF_ALEXA_REPORT_STATE): bool,
|
||||
vol.Optional(PREF_GOOGLE_REPORT_STATE): bool,
|
||||
vol.Optional(PREF_ALEXA_DEFAULT_EXPOSE): [str],
|
||||
vol.Optional(PREF_GOOGLE_DEFAULT_EXPOSE): [str],
|
||||
vol.Optional(PREF_GOOGLE_SECURE_DEVICES_PIN): vol.Any(None, str),
|
||||
vol.Optional(PREF_TTS_DEFAULT_VOICE): vol.All(
|
||||
vol.Coerce(tuple), vol.In(MAP_VOICE)
|
||||
|
@ -523,6 +524,54 @@ async def websocket_remote_disconnect(
|
|||
connection.send_result(msg["id"], await _account_data(hass, cloud))
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@_require_cloud_login
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "cloud/google_assistant/entities/get",
|
||||
"entity_id": str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@_ws_handle_cloud_errors
|
||||
async def google_assistant_get(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get data for a single google assistant entity."""
|
||||
cloud = hass.data[DOMAIN]
|
||||
gconf = await cloud.client.get_google_config()
|
||||
entity_registry = er.async_get(hass)
|
||||
entity_id: str = msg["entity_id"]
|
||||
state = hass.states.get(entity_id)
|
||||
|
||||
if not entity_registry.async_is_registered(entity_id) or not state:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
websocket_api.const.ERR_NOT_FOUND,
|
||||
f"{entity_id} unknown or not in the entity registry",
|
||||
)
|
||||
return
|
||||
|
||||
entity = google_helpers.GoogleEntity(hass, gconf, state)
|
||||
if entity_id in CLOUD_NEVER_EXPOSED_ENTITIES or not entity.is_supported():
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
websocket_api.const.ERR_NOT_SUPPORTED,
|
||||
f"{entity_id} not supported by Google assistant",
|
||||
)
|
||||
return
|
||||
|
||||
result = {
|
||||
"entity_id": entity.entity_id,
|
||||
"traits": [trait.name for trait in entity.traits()],
|
||||
"might_2fa": entity.might_2fa_traits(),
|
||||
}
|
||||
|
||||
connection.send_result(msg["id"], result)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@_require_cloud_login
|
||||
@websocket_api.websocket_command({"type": "cloud/google_assistant/entities"})
|
||||
|
@ -536,11 +585,14 @@ async def google_assistant_list(
|
|||
"""List all google assistant entities."""
|
||||
cloud = hass.data[DOMAIN]
|
||||
gconf = await cloud.client.get_google_config()
|
||||
entity_registry = er.async_get(hass)
|
||||
entities = google_helpers.async_get_entities(hass, gconf)
|
||||
|
||||
result = []
|
||||
|
||||
for entity in entities:
|
||||
if not entity_registry.async_is_registered(entity.entity_id):
|
||||
continue
|
||||
result.append(
|
||||
{
|
||||
"entity_id": entity.entity_id,
|
||||
|
@ -558,8 +610,7 @@ async def google_assistant_list(
|
|||
{
|
||||
"type": "cloud/google_assistant/entities/update",
|
||||
"entity_id": str,
|
||||
vol.Optional("should_expose"): vol.Any(None, bool),
|
||||
vol.Optional("disable_2fa"): bool,
|
||||
vol.Optional(PREF_DISABLE_2FA): bool,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
|
@ -569,17 +620,30 @@ async def google_assistant_update(
|
|||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Update google assistant config."""
|
||||
cloud = hass.data[DOMAIN]
|
||||
changes = dict(msg)
|
||||
changes.pop("type")
|
||||
changes.pop("id")
|
||||
"""Update google assistant entity config."""
|
||||
entity_registry = er.async_get(hass)
|
||||
entity_id: str = msg["entity_id"]
|
||||
|
||||
await cloud.client.prefs.async_update_google_entity_config(**changes)
|
||||
if not (registry_entry := entity_registry.async_get(entity_id)):
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
websocket_api.const.ERR_NOT_ALLOWED,
|
||||
f"can't configure {entity_id}",
|
||||
)
|
||||
return
|
||||
|
||||
connection.send_result(
|
||||
msg["id"], cloud.client.prefs.google_entity_configs.get(msg["entity_id"])
|
||||
disable_2fa = msg[PREF_DISABLE_2FA]
|
||||
assistant_options: Mapping[str, Any]
|
||||
if (
|
||||
assistant_options := registry_entry.options.get(CLOUD_GOOGLE, {})
|
||||
) and assistant_options.get(PREF_DISABLE_2FA) == disable_2fa:
|
||||
return
|
||||
|
||||
assistant_options = assistant_options | {PREF_DISABLE_2FA: disable_2fa}
|
||||
entity_registry.async_update_entity_options(
|
||||
entity_id, CLOUD_GOOGLE, assistant_options
|
||||
)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
|
@ -595,11 +659,14 @@ async def alexa_list(
|
|||
"""List all alexa entities."""
|
||||
cloud = hass.data[DOMAIN]
|
||||
alexa_config = await cloud.client.get_alexa_config()
|
||||
entity_registry = er.async_get(hass)
|
||||
entities = alexa_entities.async_get_entities(hass, alexa_config)
|
||||
|
||||
result = []
|
||||
|
||||
for entity in entities:
|
||||
if not entity_registry.async_is_registered(entity.entity_id):
|
||||
continue
|
||||
result.append(
|
||||
{
|
||||
"entity_id": entity.entity_id,
|
||||
|
@ -611,35 +678,6 @@ async def alexa_list(
|
|||
connection.send_result(msg["id"], result)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@_require_cloud_login
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "cloud/alexa/entities/update",
|
||||
"entity_id": str,
|
||||
vol.Optional("should_expose"): vol.Any(None, bool),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@_ws_handle_cloud_errors
|
||||
async def alexa_update(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Update alexa entity config."""
|
||||
cloud = hass.data[DOMAIN]
|
||||
changes = dict(msg)
|
||||
changes.pop("type")
|
||||
changes.pop("id")
|
||||
|
||||
await cloud.client.prefs.async_update_alexa_entity_config(**changes)
|
||||
|
||||
connection.send_result(
|
||||
msg["id"], cloud.client.prefs.alexa_entity_configs.get(msg["entity_id"])
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@_require_cloud_login
|
||||
@websocket_api.websocket_command({"type": "cloud/alexa/sync"})
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"name": "Home Assistant Cloud",
|
||||
"after_dependencies": ["google_assistant", "alexa"],
|
||||
"codeowners": ["@home-assistant/cloud"],
|
||||
"dependencies": ["http", "webhook"],
|
||||
"dependencies": ["homeassistant", "http", "webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/cloud",
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
"""Preference management for cloud."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.auth.const import GROUP_ID_ADMIN
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.components import webhook
|
||||
|
@ -18,9 +20,9 @@ from .const import (
|
|||
PREF_ALEXA_DEFAULT_EXPOSE,
|
||||
PREF_ALEXA_ENTITY_CONFIGS,
|
||||
PREF_ALEXA_REPORT_STATE,
|
||||
PREF_ALEXA_SETTINGS_VERSION,
|
||||
PREF_CLOUD_USER,
|
||||
PREF_CLOUDHOOKS,
|
||||
PREF_DISABLE_2FA,
|
||||
PREF_ENABLE_ALEXA,
|
||||
PREF_ENABLE_GOOGLE,
|
||||
PREF_ENABLE_REMOTE,
|
||||
|
@ -29,14 +31,33 @@ from .const import (
|
|||
PREF_GOOGLE_LOCAL_WEBHOOK_ID,
|
||||
PREF_GOOGLE_REPORT_STATE,
|
||||
PREF_GOOGLE_SECURE_DEVICES_PIN,
|
||||
PREF_GOOGLE_SETTINGS_VERSION,
|
||||
PREF_REMOTE_DOMAIN,
|
||||
PREF_SHOULD_EXPOSE,
|
||||
PREF_TTS_DEFAULT_VOICE,
|
||||
PREF_USERNAME,
|
||||
)
|
||||
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 2
|
||||
|
||||
ALEXA_SETTINGS_VERSION = 2
|
||||
GOOGLE_SETTINGS_VERSION = 2
|
||||
|
||||
|
||||
class CloudPreferencesStore(Store):
|
||||
"""Store entity registry data."""
|
||||
|
||||
async def _async_migrate_func(
|
||||
self, old_major_version: int, old_minor_version: int, old_data: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Migrate to the new version."""
|
||||
if old_major_version == 1:
|
||||
if old_minor_version < 2:
|
||||
old_data.setdefault(PREF_ALEXA_SETTINGS_VERSION, 1)
|
||||
old_data.setdefault(PREF_GOOGLE_SETTINGS_VERSION, 1)
|
||||
|
||||
return old_data
|
||||
|
||||
|
||||
class CloudPreferences:
|
||||
|
@ -45,7 +66,9 @@ class CloudPreferences:
|
|||
def __init__(self, hass):
|
||||
"""Initialize cloud prefs."""
|
||||
self._hass = hass
|
||||
self._store = Store(hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
self._store = CloudPreferencesStore(
|
||||
hass, STORAGE_VERSION, STORAGE_KEY, minor_version=STORAGE_VERSION_MINOR
|
||||
)
|
||||
self._prefs = None
|
||||
self._listeners = []
|
||||
self.last_updated: set[str] = set()
|
||||
|
@ -79,14 +102,12 @@ class CloudPreferences:
|
|||
google_secure_devices_pin=UNDEFINED,
|
||||
cloudhooks=UNDEFINED,
|
||||
cloud_user=UNDEFINED,
|
||||
google_entity_configs=UNDEFINED,
|
||||
alexa_entity_configs=UNDEFINED,
|
||||
alexa_report_state=UNDEFINED,
|
||||
google_report_state=UNDEFINED,
|
||||
alexa_default_expose=UNDEFINED,
|
||||
google_default_expose=UNDEFINED,
|
||||
tts_default_voice=UNDEFINED,
|
||||
remote_domain=UNDEFINED,
|
||||
alexa_settings_version=UNDEFINED,
|
||||
google_settings_version=UNDEFINED,
|
||||
):
|
||||
"""Update user preferences."""
|
||||
prefs = {**self._prefs}
|
||||
|
@ -98,12 +119,10 @@ class CloudPreferences:
|
|||
(PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin),
|
||||
(PREF_CLOUDHOOKS, cloudhooks),
|
||||
(PREF_CLOUD_USER, cloud_user),
|
||||
(PREF_GOOGLE_ENTITY_CONFIGS, google_entity_configs),
|
||||
(PREF_ALEXA_ENTITY_CONFIGS, alexa_entity_configs),
|
||||
(PREF_ALEXA_REPORT_STATE, alexa_report_state),
|
||||
(PREF_GOOGLE_REPORT_STATE, google_report_state),
|
||||
(PREF_ALEXA_DEFAULT_EXPOSE, alexa_default_expose),
|
||||
(PREF_GOOGLE_DEFAULT_EXPOSE, google_default_expose),
|
||||
(PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version),
|
||||
(PREF_GOOGLE_SETTINGS_VERSION, google_settings_version),
|
||||
(PREF_TTS_DEFAULT_VOICE, tts_default_voice),
|
||||
(PREF_REMOTE_DOMAIN, remote_domain),
|
||||
):
|
||||
|
@ -112,53 +131,6 @@ class CloudPreferences:
|
|||
|
||||
await self._save_prefs(prefs)
|
||||
|
||||
async def async_update_google_entity_config(
|
||||
self,
|
||||
*,
|
||||
entity_id,
|
||||
disable_2fa=UNDEFINED,
|
||||
should_expose=UNDEFINED,
|
||||
):
|
||||
"""Update config for a Google entity."""
|
||||
entities = self.google_entity_configs
|
||||
entity = entities.get(entity_id, {})
|
||||
|
||||
changes = {}
|
||||
for key, value in (
|
||||
(PREF_DISABLE_2FA, disable_2fa),
|
||||
(PREF_SHOULD_EXPOSE, should_expose),
|
||||
):
|
||||
if value is not UNDEFINED:
|
||||
changes[key] = value
|
||||
|
||||
if not changes:
|
||||
return
|
||||
|
||||
updated_entity = {**entity, **changes}
|
||||
|
||||
updated_entities = {**entities, entity_id: updated_entity}
|
||||
await self.async_update(google_entity_configs=updated_entities)
|
||||
|
||||
async def async_update_alexa_entity_config(
|
||||
self, *, entity_id, should_expose=UNDEFINED
|
||||
):
|
||||
"""Update config for an Alexa entity."""
|
||||
entities = self.alexa_entity_configs
|
||||
entity = entities.get(entity_id, {})
|
||||
|
||||
changes = {}
|
||||
for key, value in ((PREF_SHOULD_EXPOSE, should_expose),):
|
||||
if value is not UNDEFINED:
|
||||
changes[key] = value
|
||||
|
||||
if not changes:
|
||||
return
|
||||
|
||||
updated_entity = {**entity, **changes}
|
||||
|
||||
updated_entities = {**entities, entity_id: updated_entity}
|
||||
await self.async_update(alexa_entity_configs=updated_entities)
|
||||
|
||||
async def async_set_username(self, username) -> bool:
|
||||
"""Set the username that is logged in."""
|
||||
# Logging out.
|
||||
|
@ -186,14 +158,12 @@ class CloudPreferences:
|
|||
"""Return dictionary version."""
|
||||
return {
|
||||
PREF_ALEXA_DEFAULT_EXPOSE: self.alexa_default_expose,
|
||||
PREF_ALEXA_ENTITY_CONFIGS: self.alexa_entity_configs,
|
||||
PREF_ALEXA_REPORT_STATE: self.alexa_report_state,
|
||||
PREF_CLOUDHOOKS: self.cloudhooks,
|
||||
PREF_ENABLE_ALEXA: self.alexa_enabled,
|
||||
PREF_ENABLE_GOOGLE: self.google_enabled,
|
||||
PREF_ENABLE_REMOTE: self.remote_enabled,
|
||||
PREF_GOOGLE_DEFAULT_EXPOSE: self.google_default_expose,
|
||||
PREF_GOOGLE_ENTITY_CONFIGS: self.google_entity_configs,
|
||||
PREF_GOOGLE_REPORT_STATE: self.google_report_state,
|
||||
PREF_GOOGLE_SECURE_DEVICES_PIN: self.google_secure_devices_pin,
|
||||
PREF_TTS_DEFAULT_VOICE: self.tts_default_voice,
|
||||
|
@ -235,6 +205,11 @@ class CloudPreferences:
|
|||
"""Return Alexa Entity configurations."""
|
||||
return self._prefs.get(PREF_ALEXA_ENTITY_CONFIGS, {})
|
||||
|
||||
@property
|
||||
def alexa_settings_version(self):
|
||||
"""Return version of Alexa settings."""
|
||||
return self._prefs[PREF_ALEXA_SETTINGS_VERSION]
|
||||
|
||||
@property
|
||||
def google_enabled(self):
|
||||
"""Return if Google is enabled."""
|
||||
|
@ -255,6 +230,11 @@ class CloudPreferences:
|
|||
"""Return Google Entity configurations."""
|
||||
return self._prefs.get(PREF_GOOGLE_ENTITY_CONFIGS, {})
|
||||
|
||||
@property
|
||||
def google_settings_version(self):
|
||||
"""Return version of Google settings."""
|
||||
return self._prefs[PREF_GOOGLE_SETTINGS_VERSION]
|
||||
|
||||
@property
|
||||
def google_local_webhook_id(self):
|
||||
"""Return Google webhook ID to receive local messages."""
|
||||
|
@ -319,6 +299,7 @@ class CloudPreferences:
|
|||
return {
|
||||
PREF_ALEXA_DEFAULT_EXPOSE: DEFAULT_EXPOSED_DOMAINS,
|
||||
PREF_ALEXA_ENTITY_CONFIGS: {},
|
||||
PREF_ALEXA_SETTINGS_VERSION: ALEXA_SETTINGS_VERSION,
|
||||
PREF_CLOUD_USER: None,
|
||||
PREF_CLOUDHOOKS: {},
|
||||
PREF_ENABLE_ALEXA: True,
|
||||
|
@ -326,6 +307,7 @@ class CloudPreferences:
|
|||
PREF_ENABLE_REMOTE: False,
|
||||
PREF_GOOGLE_DEFAULT_EXPOSE: DEFAULT_EXPOSED_DOMAINS,
|
||||
PREF_GOOGLE_ENTITY_CONFIGS: {},
|
||||
PREF_GOOGLE_SETTINGS_VERSION: GOOGLE_SETTINGS_VERSION,
|
||||
PREF_GOOGLE_LOCAL_WEBHOOK_ID: webhook.async_generate_id(),
|
||||
PREF_GOOGLE_SECURE_DEVICES_PIN: None,
|
||||
PREF_REMOTE_DOMAIN: None,
|
||||
|
|
|
@ -4,11 +4,16 @@ from hass_nabucasa import Cloud
|
|||
from hass_nabucasa.voice import MAP_VOICE, AudioOutput, VoiceError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider
|
||||
from homeassistant.components.tts import (
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
CONF_LANG,
|
||||
PLATFORM_SCHEMA,
|
||||
Provider,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
CONF_GENDER = "gender"
|
||||
ATTR_GENDER = "gender"
|
||||
|
||||
SUPPORT_LANGUAGES = list({key[0] for key in MAP_VOICE})
|
||||
|
||||
|
@ -18,8 +23,8 @@ def validate_lang(value):
|
|||
if (lang := value.get(CONF_LANG)) is None:
|
||||
return value
|
||||
|
||||
if (gender := value.get(CONF_GENDER)) is None:
|
||||
gender = value[CONF_GENDER] = next(
|
||||
if (gender := value.get(ATTR_GENDER)) is None:
|
||||
gender = value[ATTR_GENDER] = next(
|
||||
(chk_gender for chk_lang, chk_gender in MAP_VOICE if chk_lang == lang), None
|
||||
)
|
||||
|
||||
|
@ -33,7 +38,7 @@ PLATFORM_SCHEMA = vol.All(
|
|||
PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_LANG): str,
|
||||
vol.Optional(CONF_GENDER): str,
|
||||
vol.Optional(ATTR_GENDER): str,
|
||||
}
|
||||
),
|
||||
validate_lang,
|
||||
|
@ -49,7 +54,7 @@ async def async_get_engine(hass, config, discovery_info=None):
|
|||
gender = None
|
||||
else:
|
||||
language = config[CONF_LANG]
|
||||
gender = config[CONF_GENDER]
|
||||
gender = config[ATTR_GENDER]
|
||||
|
||||
return CloudProvider(cloud, language, gender)
|
||||
|
||||
|
@ -87,12 +92,15 @@ class CloudProvider(Provider):
|
|||
@property
|
||||
def supported_options(self):
|
||||
"""Return list of supported options like voice, emotion."""
|
||||
return [CONF_GENDER]
|
||||
return [ATTR_GENDER, ATTR_AUDIO_OUTPUT]
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
"""Return a dict include default options."""
|
||||
return {CONF_GENDER: self._gender}
|
||||
return {
|
||||
ATTR_GENDER: self._gender,
|
||||
ATTR_AUDIO_OUTPUT: AudioOutput.MP3,
|
||||
}
|
||||
|
||||
async def async_get_tts_audio(self, message, language, options=None):
|
||||
"""Load TTS from NabuCasa Cloud."""
|
||||
|
@ -101,10 +109,10 @@ class CloudProvider(Provider):
|
|||
data = await self.cloud.voice.process_tts(
|
||||
message,
|
||||
language,
|
||||
gender=options[CONF_GENDER],
|
||||
output=AudioOutput.MP3,
|
||||
gender=options[ATTR_GENDER],
|
||||
output=options[ATTR_AUDIO_OUTPUT],
|
||||
)
|
||||
except VoiceError:
|
||||
return (None, None)
|
||||
|
||||
return ("mp3", data)
|
||||
return (str(options[ATTR_AUDIO_OUTPUT]), data)
|
||||
|
|
|
@ -35,13 +35,13 @@ class CO2SensorEntityDescription(SensorEntityDescription):
|
|||
SENSORS = (
|
||||
CO2SensorEntityDescription(
|
||||
key="carbonIntensity",
|
||||
name="CO2 intensity",
|
||||
translation_key="carbon_intensity",
|
||||
unique_id="co2intensity",
|
||||
# No unit, it's extracted from response.
|
||||
),
|
||||
CO2SensorEntityDescription(
|
||||
key="fossilFuelPercentage",
|
||||
name="Grid fossil fuel percentage",
|
||||
translation_key="fossil_fuel_percentage",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
),
|
||||
)
|
||||
|
|
|
@ -30,5 +30,11 @@
|
|||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"api_ratelimit": "API Ratelimit exceeded"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"carbon_intensity": { "name": "CO2 intensity" },
|
||||
"fossil_fuel_percentage": { "name": "Grid fossil fuel percentage" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -137,8 +137,11 @@ class CommandSensor(SensorEntity):
|
|||
_LOGGER.warning("Unable to parse output as JSON: %s", value)
|
||||
else:
|
||||
_LOGGER.warning("Empty reply found when expecting JSON data")
|
||||
if self._value_template is None:
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
elif self._value_template is not None:
|
||||
if self._value_template is not None:
|
||||
self._attr_native_value = (
|
||||
self._value_template.async_render_with_possible_json_value(
|
||||
value,
|
||||
|
|
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Any, TypedDict
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
|
@ -20,6 +20,15 @@ from homeassistant.loader import bind_hass
|
|||
from .agent import AbstractConversationAgent, ConversationInput, ConversationResult
|
||||
from .default_agent import DefaultAgent
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"async_converse",
|
||||
"async_get_agent_info",
|
||||
"async_set_agent",
|
||||
"async_unset_agent",
|
||||
"async_setup",
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_TEXT = "text"
|
||||
|
@ -270,6 +279,31 @@ class ConversationProcessView(http.HomeAssistantView):
|
|||
return self.json(result.as_dict())
|
||||
|
||||
|
||||
class AgentInfo(TypedDict):
|
||||
"""Dictionary holding agent info."""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
|
||||
|
||||
@core.callback
|
||||
def async_get_agent_info(
|
||||
hass: core.HomeAssistant,
|
||||
agent_id: str | None = None,
|
||||
) -> AgentInfo | None:
|
||||
"""Get information on the agent or None if not found."""
|
||||
manager = _get_agent_manager(hass)
|
||||
|
||||
if agent_id is None:
|
||||
agent_id = manager.default_agent
|
||||
|
||||
for agent_info in manager.async_get_agent_info():
|
||||
if agent_info["id"] == agent_id:
|
||||
return agent_info
|
||||
|
||||
return None
|
||||
|
||||
|
||||
async def async_converse(
|
||||
hass: core.HomeAssistant,
|
||||
text: str,
|
||||
|
@ -332,12 +366,15 @@ class AgentManager:
|
|||
|
||||
return self._builtin_agent
|
||||
|
||||
if agent_id not in self._agents:
|
||||
raise ValueError(f"Agent {agent_id} not found")
|
||||
|
||||
return self._agents[agent_id]
|
||||
|
||||
@core.callback
|
||||
def async_get_agent_info(self) -> list[dict[str, Any]]:
|
||||
def async_get_agent_info(self) -> list[AgentInfo]:
|
||||
"""List all agents."""
|
||||
agents = [
|
||||
agents: list[AgentInfo] = [
|
||||
{
|
||||
"id": AgentManager.HOME_ASSISTANT_AGENT,
|
||||
"name": "Home Assistant",
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
"""The Coronavirus integration."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
import coronavirus
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
entity_registry as er,
|
||||
update_coordinator,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Coronavirus component."""
|
||||
# Make sure coordinator is initialized.
|
||||
await get_coordinator(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Coronavirus from a config entry."""
|
||||
if isinstance(entry.data["country"], int):
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data={**entry.data, "country": entry.title}
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_migrator(entity_entry: er.RegistryEntry):
|
||||
"""Migrate away from unstable ID."""
|
||||
country, info_type = entity_entry.unique_id.rsplit("-", 1)
|
||||
if not country.isnumeric():
|
||||
return None
|
||||
return {"new_unique_id": f"{entry.title}-{info_type}"}
|
||||
|
||||
await er.async_migrate_entries(hass, entry.entry_id, _async_migrator)
|
||||
|
||||
if not entry.unique_id:
|
||||
hass.config_entries.async_update_entry(entry, unique_id=entry.data["country"])
|
||||
|
||||
coordinator = await get_coordinator(hass)
|
||||
if not coordinator.last_update_success:
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def get_coordinator(
|
||||
hass: HomeAssistant,
|
||||
) -> update_coordinator.DataUpdateCoordinator:
|
||||
"""Get the data update coordinator."""
|
||||
if DOMAIN in hass.data:
|
||||
return hass.data[DOMAIN]
|
||||
|
||||
async def async_get_cases():
|
||||
async with async_timeout.timeout(10):
|
||||
return {
|
||||
case.country: case
|
||||
for case in await coronavirus.get_cases(
|
||||
aiohttp_client.async_get_clientsession(hass)
|
||||
)
|
||||
}
|
||||
|
||||
hass.data[DOMAIN] = update_coordinator.DataUpdateCoordinator(
|
||||
hass,
|
||||
logging.getLogger(__name__),
|
||||
name=DOMAIN,
|
||||
update_method=async_get_cases,
|
||||
update_interval=timedelta(hours=1),
|
||||
)
|
||||
await hass.data[DOMAIN].async_refresh()
|
||||
return hass.data[DOMAIN]
|
|
@ -1,50 +0,0 @@
|
|||
"""Config flow for Coronavirus integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from . import get_coordinator
|
||||
from .const import DOMAIN, OPTION_WORLDWIDE
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Coronavirus."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
_options: dict[str, Any] | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if self._options is None:
|
||||
coordinator = await get_coordinator(self.hass)
|
||||
if not coordinator.last_update_success or coordinator.data is None:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
self._options = {OPTION_WORLDWIDE: "Worldwide"}
|
||||
for case in sorted(
|
||||
coordinator.data.values(), key=lambda case: case.country
|
||||
):
|
||||
self._options[case.country] = case.country
|
||||
|
||||
if user_input is not None:
|
||||
await self.async_set_unique_id(user_input["country"])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=self._options[user_input["country"]], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema({vol.Required("country"): vol.In(self._options)}),
|
||||
errors=errors,
|
||||
)
|
|
@ -1,6 +0,0 @@
|
|||
"""Constants for the Coronavirus integration."""
|
||||
from coronavirus import DEFAULT_SOURCE
|
||||
|
||||
DOMAIN = "coronavirus"
|
||||
OPTION_WORLDWIDE = "__worldwide"
|
||||
ATTRIBUTION = f"Data provided by {DEFAULT_SOURCE.NAME}"
|
|
@ -1,10 +0,0 @@
|
|||
{
|
||||
"domain": "coronavirus",
|
||||
"name": "Coronavirus (COVID-19)",
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/coronavirus",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["coronavirus"],
|
||||
"requirements": ["coronavirus==1.1.1"]
|
||||
}
|
|
@ -1,73 +0,0 @@
|
|||
"""Sensor platform for the Corona virus."""
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import get_coordinator
|
||||
from .const import ATTRIBUTION, OPTION_WORLDWIDE
|
||||
|
||||
SENSORS = {
|
||||
"confirmed": "mdi:emoticon-neutral-outline",
|
||||
"current": "mdi:emoticon-sad-outline",
|
||||
"recovered": "mdi:emoticon-happy-outline",
|
||||
"deaths": "mdi:emoticon-cry-outline",
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Defer sensor setup to the shared sensor module."""
|
||||
coordinator = await get_coordinator(hass)
|
||||
|
||||
async_add_entities(
|
||||
CoronavirusSensor(coordinator, config_entry.data["country"], info_type)
|
||||
for info_type in SENSORS
|
||||
)
|
||||
|
||||
|
||||
class CoronavirusSensor(CoordinatorEntity, SensorEntity):
|
||||
"""Sensor representing corona virus data."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
_attr_native_unit_of_measurement = "people"
|
||||
|
||||
def __init__(self, coordinator, country, info_type):
|
||||
"""Initialize coronavirus sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_icon = SENSORS[info_type]
|
||||
self._attr_unique_id = f"{country}-{info_type}"
|
||||
if country == OPTION_WORLDWIDE:
|
||||
self._attr_name = f"Worldwide Coronavirus {info_type}"
|
||||
else:
|
||||
self._attr_name = (
|
||||
f"{coordinator.data[country].country} Coronavirus {info_type}"
|
||||
)
|
||||
|
||||
self.country = country
|
||||
self.info_type = info_type
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if sensor is available."""
|
||||
return self.coordinator.last_update_success and (
|
||||
self.country in self.coordinator.data or self.country == OPTION_WORLDWIDE
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""State of the sensor."""
|
||||
if self.country == OPTION_WORLDWIDE:
|
||||
sum_cases = 0
|
||||
for case in self.coordinator.data.values():
|
||||
if (value := getattr(case, self.info_type)) is None:
|
||||
continue
|
||||
sum_cases += value
|
||||
|
||||
return sum_cases
|
||||
|
||||
return getattr(self.coordinator.data[self.country], self.info_type)
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Pick a country to monitor",
|
||||
"data": { "country": "Country" }
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -106,7 +106,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
|
||||
storage_collection = CounterStorageCollection(
|
||||
Store(hass, STORAGE_VERSION, STORAGE_KEY),
|
||||
logging.getLogger(f"{__name__}.storage_collection"),
|
||||
id_manager,
|
||||
)
|
||||
collection.sync_entity_lifecycle(
|
||||
|
@ -140,7 +139,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
return True
|
||||
|
||||
|
||||
class CounterStorageCollection(collection.StorageCollection):
|
||||
class CounterStorageCollection(collection.DictStorageCollection):
|
||||
"""Input storage based collection."""
|
||||
|
||||
CREATE_UPDATE_SCHEMA = vol.Schema(STORAGE_FIELDS)
|
||||
|
@ -154,10 +153,10 @@ class CounterStorageCollection(collection.StorageCollection):
|
|||
"""Suggest an ID based on the config."""
|
||||
return info[CONF_NAME]
|
||||
|
||||
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||
async def _update_data(self, item: dict, update_data: dict) -> dict:
|
||||
"""Return a new updated data object."""
|
||||
update_data = self.CREATE_UPDATE_SCHEMA(update_data)
|
||||
return {CONF_ID: data[CONF_ID]} | update_data
|
||||
return {CONF_ID: item[CONF_ID]} | update_data
|
||||
|
||||
|
||||
class Counter(collection.CollectionEntity, RestoreEntity):
|
||||
|
|
|
@ -20,7 +20,6 @@ _RESOURCE = "http://apilayer.net/api/live"
|
|||
DEFAULT_BASE = "USD"
|
||||
DEFAULT_NAME = "CurrencyLayer Sensor"
|
||||
|
||||
ICON = "mdi:currency"
|
||||
|
||||
SCAN_INTERVAL = timedelta(hours=4)
|
||||
|
||||
|
@ -60,6 +59,7 @@ class CurrencylayerSensor(SensorEntity):
|
|||
"""Implementing the Currencylayer sensor."""
|
||||
|
||||
_attr_attribution = "Data provided by currencylayer.com"
|
||||
_attr_icon = "mdi:currency"
|
||||
|
||||
def __init__(self, rest, base, quote):
|
||||
"""Initialize the sensor."""
|
||||
|
@ -78,11 +78,6 @@ class CurrencylayerSensor(SensorEntity):
|
|||
"""Return the name of the sensor."""
|
||||
return self._base
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon to use in the frontend, if any."""
|
||||
return ICON
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
"""The darksky component."""
|
|
@ -1,9 +0,0 @@
|
|||
{
|
||||
"domain": "darksky",
|
||||
"name": "Dark Sky",
|
||||
"codeowners": ["@fabaff"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/darksky",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["forecastio"],
|
||||
"requirements": ["python-forecastio==1.4.0"]
|
||||
}
|
|
@ -1,927 +0,0 @@
|
|||
"""Support for Dark Sky weather service."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Literal, NamedTuple
|
||||
|
||||
import forecastio
|
||||
from requests.exceptions import ConnectionError as ConnectError, HTTPError, Timeout
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY,
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
CONF_MONITORED_CONDITIONS,
|
||||
CONF_NAME,
|
||||
CONF_SCAN_INTERVAL,
|
||||
DEGREE,
|
||||
PERCENTAGE,
|
||||
UV_INDEX,
|
||||
UnitOfLength,
|
||||
UnitOfPrecipitationDepth,
|
||||
UnitOfPressure,
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
UnitOfVolumetricFlux,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_FORECAST = "forecast"
|
||||
CONF_HOURLY_FORECAST = "hourly_forecast"
|
||||
CONF_LANGUAGE = "language"
|
||||
CONF_UNITS = "units"
|
||||
|
||||
DEFAULT_LANGUAGE = "en"
|
||||
DEFAULT_NAME = "Dark Sky"
|
||||
SCAN_INTERVAL = timedelta(seconds=300)
|
||||
|
||||
DEPRECATED_SENSOR_TYPES = {
|
||||
"apparent_temperature_max",
|
||||
"apparent_temperature_min",
|
||||
"temperature_max",
|
||||
"temperature_min",
|
||||
}
|
||||
|
||||
MAP_UNIT_SYSTEM: dict[
|
||||
Literal["si", "us", "ca", "uk", "uk2"],
|
||||
Literal["si_unit", "us_unit", "ca_unit", "uk_unit", "uk2_unit"],
|
||||
] = {
|
||||
"si": "si_unit",
|
||||
"us": "us_unit",
|
||||
"ca": "ca_unit",
|
||||
"uk": "uk_unit",
|
||||
"uk2": "uk2_unit",
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class DarkskySensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Darksky sensor entity."""
|
||||
|
||||
si_unit: str | None = None
|
||||
us_unit: str | None = None
|
||||
ca_unit: str | None = None
|
||||
uk_unit: str | None = None
|
||||
uk2_unit: str | None = None
|
||||
forecast_mode: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
SENSOR_TYPES: dict[str, DarkskySensorEntityDescription] = {
|
||||
"summary": DarkskySensorEntityDescription(
|
||||
key="summary",
|
||||
name="Summary",
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"minutely_summary": DarkskySensorEntityDescription(
|
||||
key="minutely_summary",
|
||||
name="Minutely Summary",
|
||||
forecast_mode=[],
|
||||
),
|
||||
"hourly_summary": DarkskySensorEntityDescription(
|
||||
key="hourly_summary",
|
||||
name="Hourly Summary",
|
||||
forecast_mode=[],
|
||||
),
|
||||
"daily_summary": DarkskySensorEntityDescription(
|
||||
key="daily_summary",
|
||||
name="Daily Summary",
|
||||
forecast_mode=[],
|
||||
),
|
||||
"icon": DarkskySensorEntityDescription(
|
||||
key="icon",
|
||||
name="Icon",
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"nearest_storm_distance": DarkskySensorEntityDescription(
|
||||
key="nearest_storm_distance",
|
||||
name="Nearest Storm Distance",
|
||||
si_unit=UnitOfLength.KILOMETERS,
|
||||
us_unit=UnitOfLength.MILES,
|
||||
ca_unit=UnitOfLength.KILOMETERS,
|
||||
uk_unit=UnitOfLength.KILOMETERS,
|
||||
uk2_unit=UnitOfLength.MILES,
|
||||
icon="mdi:weather-lightning",
|
||||
forecast_mode=["currently"],
|
||||
),
|
||||
"nearest_storm_bearing": DarkskySensorEntityDescription(
|
||||
key="nearest_storm_bearing",
|
||||
name="Nearest Storm Bearing",
|
||||
si_unit=DEGREE,
|
||||
us_unit=DEGREE,
|
||||
ca_unit=DEGREE,
|
||||
uk_unit=DEGREE,
|
||||
uk2_unit=DEGREE,
|
||||
icon="mdi:weather-lightning",
|
||||
forecast_mode=["currently"],
|
||||
),
|
||||
"precip_type": DarkskySensorEntityDescription(
|
||||
key="precip_type",
|
||||
name="Precip",
|
||||
icon="mdi:weather-pouring",
|
||||
forecast_mode=["currently", "minutely", "hourly", "daily"],
|
||||
),
|
||||
"precip_intensity": DarkskySensorEntityDescription(
|
||||
key="precip_intensity",
|
||||
name="Precip Intensity",
|
||||
si_unit=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
us_unit=UnitOfVolumetricFlux.INCHES_PER_HOUR,
|
||||
ca_unit=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
uk_unit=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
uk2_unit=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
icon="mdi:weather-rainy",
|
||||
forecast_mode=["currently", "minutely", "hourly", "daily"],
|
||||
),
|
||||
"precip_probability": DarkskySensorEntityDescription(
|
||||
key="precip_probability",
|
||||
name="Precip Probability",
|
||||
si_unit=PERCENTAGE,
|
||||
us_unit=PERCENTAGE,
|
||||
ca_unit=PERCENTAGE,
|
||||
uk_unit=PERCENTAGE,
|
||||
uk2_unit=PERCENTAGE,
|
||||
icon="mdi:water-percent",
|
||||
forecast_mode=["currently", "minutely", "hourly", "daily"],
|
||||
),
|
||||
"precip_accumulation": DarkskySensorEntityDescription(
|
||||
key="precip_accumulation",
|
||||
name="Precip Accumulation",
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
si_unit=UnitOfPrecipitationDepth.CENTIMETERS,
|
||||
us_unit=UnitOfPrecipitationDepth.INCHES,
|
||||
ca_unit=UnitOfPrecipitationDepth.CENTIMETERS,
|
||||
uk_unit=UnitOfPrecipitationDepth.CENTIMETERS,
|
||||
uk2_unit=UnitOfPrecipitationDepth.CENTIMETERS,
|
||||
icon="mdi:weather-snowy",
|
||||
forecast_mode=["hourly", "daily"],
|
||||
),
|
||||
"temperature": DarkskySensorEntityDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["currently", "hourly"],
|
||||
),
|
||||
"apparent_temperature": DarkskySensorEntityDescription(
|
||||
key="apparent_temperature",
|
||||
name="Apparent Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["currently", "hourly"],
|
||||
),
|
||||
"dew_point": DarkskySensorEntityDescription(
|
||||
key="dew_point",
|
||||
name="Dew Point",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"wind_speed": DarkskySensorEntityDescription(
|
||||
key="wind_speed",
|
||||
name="Wind Speed",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
si_unit=UnitOfSpeed.METERS_PER_SECOND,
|
||||
us_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
ca_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
uk_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
uk2_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"wind_bearing": DarkskySensorEntityDescription(
|
||||
key="wind_bearing",
|
||||
name="Wind Bearing",
|
||||
si_unit=DEGREE,
|
||||
us_unit=DEGREE,
|
||||
ca_unit=DEGREE,
|
||||
uk_unit=DEGREE,
|
||||
uk2_unit=DEGREE,
|
||||
icon="mdi:compass",
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"wind_gust": DarkskySensorEntityDescription(
|
||||
key="wind_gust",
|
||||
name="Wind Gust",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
si_unit=UnitOfSpeed.METERS_PER_SECOND,
|
||||
us_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
ca_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
uk_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
uk2_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
icon="mdi:weather-windy-variant",
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"cloud_cover": DarkskySensorEntityDescription(
|
||||
key="cloud_cover",
|
||||
name="Cloud Coverage",
|
||||
si_unit=PERCENTAGE,
|
||||
us_unit=PERCENTAGE,
|
||||
ca_unit=PERCENTAGE,
|
||||
uk_unit=PERCENTAGE,
|
||||
uk2_unit=PERCENTAGE,
|
||||
icon="mdi:weather-partly-cloudy",
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"humidity": DarkskySensorEntityDescription(
|
||||
key="humidity",
|
||||
name="Humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
si_unit=PERCENTAGE,
|
||||
us_unit=PERCENTAGE,
|
||||
ca_unit=PERCENTAGE,
|
||||
uk_unit=PERCENTAGE,
|
||||
uk2_unit=PERCENTAGE,
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"pressure": DarkskySensorEntityDescription(
|
||||
key="pressure",
|
||||
name="Pressure",
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
si_unit=UnitOfPressure.MBAR,
|
||||
us_unit=UnitOfPressure.MBAR,
|
||||
ca_unit=UnitOfPressure.MBAR,
|
||||
uk_unit=UnitOfPressure.MBAR,
|
||||
uk2_unit=UnitOfPressure.MBAR,
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"visibility": DarkskySensorEntityDescription(
|
||||
key="visibility",
|
||||
name="Visibility",
|
||||
si_unit=UnitOfLength.KILOMETERS,
|
||||
us_unit=UnitOfLength.MILES,
|
||||
ca_unit=UnitOfLength.KILOMETERS,
|
||||
uk_unit=UnitOfLength.KILOMETERS,
|
||||
uk2_unit=UnitOfLength.MILES,
|
||||
icon="mdi:eye",
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"ozone": DarkskySensorEntityDescription(
|
||||
key="ozone",
|
||||
name="Ozone",
|
||||
device_class=SensorDeviceClass.OZONE,
|
||||
si_unit="DU",
|
||||
us_unit="DU",
|
||||
ca_unit="DU",
|
||||
uk_unit="DU",
|
||||
uk2_unit="DU",
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"apparent_temperature_max": DarkskySensorEntityDescription(
|
||||
key="apparent_temperature_max",
|
||||
name="Daily High Apparent Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"apparent_temperature_high": DarkskySensorEntityDescription(
|
||||
key="apparent_temperature_high",
|
||||
name="Daytime High Apparent Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"apparent_temperature_min": DarkskySensorEntityDescription(
|
||||
key="apparent_temperature_min",
|
||||
name="Daily Low Apparent Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"apparent_temperature_low": DarkskySensorEntityDescription(
|
||||
key="apparent_temperature_low",
|
||||
name="Overnight Low Apparent Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"temperature_max": DarkskySensorEntityDescription(
|
||||
key="temperature_max",
|
||||
name="Daily High Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"temperature_high": DarkskySensorEntityDescription(
|
||||
key="temperature_high",
|
||||
name="Daytime High Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"temperature_min": DarkskySensorEntityDescription(
|
||||
key="temperature_min",
|
||||
name="Daily Low Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"temperature_low": DarkskySensorEntityDescription(
|
||||
key="temperature_low",
|
||||
name="Overnight Low Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
si_unit=UnitOfTemperature.CELSIUS,
|
||||
us_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
ca_unit=UnitOfTemperature.CELSIUS,
|
||||
uk_unit=UnitOfTemperature.CELSIUS,
|
||||
uk2_unit=UnitOfTemperature.CELSIUS,
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"precip_intensity_max": DarkskySensorEntityDescription(
|
||||
key="precip_intensity_max",
|
||||
name="Daily Max Precip Intensity",
|
||||
si_unit=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
us_unit=UnitOfVolumetricFlux.INCHES_PER_HOUR,
|
||||
ca_unit=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
uk_unit=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
uk2_unit=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
icon="mdi:thermometer",
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"uv_index": DarkskySensorEntityDescription(
|
||||
key="uv_index",
|
||||
name="UV Index",
|
||||
si_unit=UV_INDEX,
|
||||
us_unit=UV_INDEX,
|
||||
ca_unit=UV_INDEX,
|
||||
uk_unit=UV_INDEX,
|
||||
uk2_unit=UV_INDEX,
|
||||
icon="mdi:weather-sunny",
|
||||
forecast_mode=["currently", "hourly", "daily"],
|
||||
),
|
||||
"moon_phase": DarkskySensorEntityDescription(
|
||||
key="moon_phase",
|
||||
name="Moon Phase",
|
||||
icon="mdi:weather-night",
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"sunrise_time": DarkskySensorEntityDescription(
|
||||
key="sunrise_time",
|
||||
name="Sunrise",
|
||||
icon="mdi:white-balance-sunny",
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"sunset_time": DarkskySensorEntityDescription(
|
||||
key="sunset_time",
|
||||
name="Sunset",
|
||||
icon="mdi:weather-night",
|
||||
forecast_mode=["daily"],
|
||||
),
|
||||
"alerts": DarkskySensorEntityDescription(
|
||||
key="alerts",
|
||||
name="Alerts",
|
||||
icon="mdi:alert-circle-outline",
|
||||
forecast_mode=[],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class ConditionPicture(NamedTuple):
|
||||
"""Entity picture and icon for condition."""
|
||||
|
||||
entity_picture: str
|
||||
icon: str
|
||||
|
||||
|
||||
CONDITION_PICTURES: dict[str, ConditionPicture] = {
|
||||
"clear-day": ConditionPicture(
|
||||
entity_picture="/static/images/darksky/weather-sunny.svg",
|
||||
icon="mdi:weather-sunny",
|
||||
),
|
||||
"clear-night": ConditionPicture(
|
||||
entity_picture="/static/images/darksky/weather-night.svg",
|
||||
icon="mdi:weather-night",
|
||||
),
|
||||
"rain": ConditionPicture(
|
||||
entity_picture="/static/images/darksky/weather-pouring.svg",
|
||||
icon="mdi:weather-pouring",
|
||||
),
|
||||
"snow": ConditionPicture(
|
||||
entity_picture="/static/images/darksky/weather-snowy.svg",
|
||||
icon="mdi:weather-snowy",
|
||||
),
|
||||
"sleet": ConditionPicture(
|
||||
entity_picture="/static/images/darksky/weather-hail.svg",
|
||||
icon="mdi:weather-snowy-rainy",
|
||||
),
|
||||
"wind": ConditionPicture(
|
||||
entity_picture="/static/images/darksky/weather-windy.svg",
|
||||
icon="mdi:weather-windy",
|
||||
),
|
||||
"fog": ConditionPicture(
|
||||
entity_picture="/static/images/darksky/weather-fog.svg",
|
||||
icon="mdi:weather-fog",
|
||||
),
|
||||
"cloudy": ConditionPicture(
|
||||
entity_picture="/static/images/darksky/weather-cloudy.svg",
|
||||
icon="mdi:weather-cloudy",
|
||||
),
|
||||
"partly-cloudy-day": ConditionPicture(
|
||||
entity_picture="/static/images/darksky/weather-partlycloudy.svg",
|
||||
icon="mdi:weather-partly-cloudy",
|
||||
),
|
||||
"partly-cloudy-night": ConditionPicture(
|
||||
entity_picture="/static/images/darksky/weather-cloudy.svg",
|
||||
icon="mdi:weather-night-partly-cloudy",
|
||||
),
|
||||
}
|
||||
|
||||
# Language Supported Codes
|
||||
LANGUAGE_CODES = [
|
||||
"ar",
|
||||
"az",
|
||||
"be",
|
||||
"bg",
|
||||
"bn",
|
||||
"bs",
|
||||
"ca",
|
||||
"cs",
|
||||
"da",
|
||||
"de",
|
||||
"el",
|
||||
"en",
|
||||
"ja",
|
||||
"ka",
|
||||
"kn",
|
||||
"ko",
|
||||
"eo",
|
||||
"es",
|
||||
"et",
|
||||
"fi",
|
||||
"fr",
|
||||
"he",
|
||||
"hi",
|
||||
"hr",
|
||||
"hu",
|
||||
"id",
|
||||
"is",
|
||||
"it",
|
||||
"kw",
|
||||
"lv",
|
||||
"ml",
|
||||
"mr",
|
||||
"nb",
|
||||
"nl",
|
||||
"pa",
|
||||
"pl",
|
||||
"pt",
|
||||
"ro",
|
||||
"ru",
|
||||
"sk",
|
||||
"sl",
|
||||
"sr",
|
||||
"sv",
|
||||
"ta",
|
||||
"te",
|
||||
"tet",
|
||||
"tr",
|
||||
"uk",
|
||||
"ur",
|
||||
"x-pig-latin",
|
||||
"zh",
|
||||
"zh-tw",
|
||||
]
|
||||
|
||||
ALLOWED_UNITS = ["auto", "si", "us", "ca", "uk", "uk2"]
|
||||
|
||||
ALERTS_ATTRS = ["time", "description", "expires", "severity", "uri", "regions", "title"]
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_MONITORED_CONDITIONS): vol.All(
|
||||
cv.ensure_list, [vol.In(SENSOR_TYPES)]
|
||||
),
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_UNITS): vol.In(ALLOWED_UNITS),
|
||||
vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In(LANGUAGE_CODES),
|
||||
vol.Inclusive(
|
||||
CONF_LATITUDE, "coordinates", "Latitude and longitude must exist together"
|
||||
): cv.latitude,
|
||||
vol.Inclusive(
|
||||
CONF_LONGITUDE, "coordinates", "Latitude and longitude must exist together"
|
||||
): cv.longitude,
|
||||
vol.Optional(CONF_FORECAST): vol.All(cv.ensure_list, [vol.Range(min=0, max=7)]),
|
||||
vol.Optional(CONF_HOURLY_FORECAST): vol.All(
|
||||
cv.ensure_list, [vol.Range(min=0, max=48)]
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Dark Sky sensor."""
|
||||
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
|
||||
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
|
||||
language = config.get(CONF_LANGUAGE)
|
||||
interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
|
||||
|
||||
if CONF_UNITS in config:
|
||||
units = config[CONF_UNITS]
|
||||
elif hass.config.units is METRIC_SYSTEM:
|
||||
units = "si"
|
||||
else:
|
||||
units = "us"
|
||||
|
||||
forecast_data = DarkSkyData(
|
||||
api_key=config.get(CONF_API_KEY),
|
||||
latitude=latitude,
|
||||
longitude=longitude,
|
||||
units=units,
|
||||
language=language,
|
||||
interval=interval,
|
||||
)
|
||||
forecast_data.update()
|
||||
forecast_data.update_currently()
|
||||
|
||||
# If connection failed don't setup platform.
|
||||
if forecast_data.data is None:
|
||||
return
|
||||
|
||||
name = config.get(CONF_NAME)
|
||||
|
||||
forecast = config.get(CONF_FORECAST)
|
||||
forecast_hour = config.get(CONF_HOURLY_FORECAST)
|
||||
sensors: list[SensorEntity] = []
|
||||
for variable in config[CONF_MONITORED_CONDITIONS]:
|
||||
if variable in DEPRECATED_SENSOR_TYPES:
|
||||
_LOGGER.warning("Monitored condition %s is deprecated", variable)
|
||||
description = SENSOR_TYPES[variable]
|
||||
if not description.forecast_mode or "currently" in description.forecast_mode:
|
||||
if variable == "alerts":
|
||||
sensors.append(DarkSkyAlertSensor(forecast_data, description, name))
|
||||
else:
|
||||
sensors.append(DarkSkySensor(forecast_data, description, name))
|
||||
|
||||
if forecast is not None and "daily" in description.forecast_mode:
|
||||
sensors.extend(
|
||||
[
|
||||
DarkSkySensor(
|
||||
forecast_data, description, name, forecast_day=forecast_day
|
||||
)
|
||||
for forecast_day in forecast
|
||||
]
|
||||
)
|
||||
if forecast_hour is not None and "hourly" in description.forecast_mode:
|
||||
sensors.extend(
|
||||
[
|
||||
DarkSkySensor(
|
||||
forecast_data, description, name, forecast_hour=forecast_h
|
||||
)
|
||||
for forecast_h in forecast_hour
|
||||
]
|
||||
)
|
||||
|
||||
add_entities(sensors, True)
|
||||
|
||||
|
||||
class DarkSkySensor(SensorEntity):
|
||||
"""Implementation of a Dark Sky sensor."""
|
||||
|
||||
_attr_attribution = "Powered by Dark Sky"
|
||||
entity_description: DarkskySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
forecast_data,
|
||||
description: DarkskySensorEntityDescription,
|
||||
name,
|
||||
forecast_day=None,
|
||||
forecast_hour=None,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_description = description
|
||||
self.forecast_data = forecast_data
|
||||
self.forecast_day = forecast_day
|
||||
self.forecast_hour = forecast_hour
|
||||
self._icon: str | None = None
|
||||
|
||||
if forecast_day is not None:
|
||||
self._attr_name = f"{name} {description.name} {forecast_day}d"
|
||||
elif forecast_hour is not None:
|
||||
self._attr_name = f"{name} {description.name} {forecast_hour}h"
|
||||
else:
|
||||
self._attr_name = f"{name} {description.name}"
|
||||
|
||||
@property
|
||||
def unit_system(self):
|
||||
"""Return the unit system of this entity."""
|
||||
return self.forecast_data.unit_system
|
||||
|
||||
@property
|
||||
def entity_picture(self) -> str | None:
|
||||
"""Return the entity picture to use in the frontend, if any."""
|
||||
if self._icon is None or "summary" not in self.entity_description.key:
|
||||
return None
|
||||
|
||||
if self._icon in CONDITION_PICTURES:
|
||||
return CONDITION_PICTURES[self._icon].entity_picture
|
||||
|
||||
return None
|
||||
|
||||
def update_unit_of_measurement(self) -> None:
|
||||
"""Update units based on unit system."""
|
||||
unit_key = MAP_UNIT_SYSTEM.get(self.unit_system, "si_unit")
|
||||
self._attr_native_unit_of_measurement = getattr(
|
||||
self.entity_description, unit_key
|
||||
)
|
||||
|
||||
@property
|
||||
def icon(self) -> str | None:
|
||||
"""Icon to use in the frontend, if any."""
|
||||
if (
|
||||
"summary" in self.entity_description.key
|
||||
and self._icon in CONDITION_PICTURES
|
||||
):
|
||||
return CONDITION_PICTURES[self._icon].icon
|
||||
|
||||
return self.entity_description.icon
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from Dark Sky and updates the states."""
|
||||
# Call the API for new forecast data. Each sensor will re-trigger this
|
||||
# same exact call, but that's fine. We cache results for a short period
|
||||
# of time to prevent hitting API limits. Note that Dark Sky will
|
||||
# charge users for too many calls in 1 day, so take care when updating.
|
||||
self.forecast_data.update()
|
||||
self.update_unit_of_measurement()
|
||||
|
||||
sensor_type = self.entity_description.key
|
||||
if sensor_type == "minutely_summary":
|
||||
self.forecast_data.update_minutely()
|
||||
minutely = self.forecast_data.data_minutely
|
||||
self._attr_native_value = getattr(minutely, "summary", "")
|
||||
self._icon = getattr(minutely, "icon", "")
|
||||
elif sensor_type == "hourly_summary":
|
||||
self.forecast_data.update_hourly()
|
||||
hourly = self.forecast_data.data_hourly
|
||||
self._attr_native_value = getattr(hourly, "summary", "")
|
||||
self._icon = getattr(hourly, "icon", "")
|
||||
elif self.forecast_hour is not None:
|
||||
self.forecast_data.update_hourly()
|
||||
hourly = self.forecast_data.data_hourly
|
||||
if hasattr(hourly, "data"):
|
||||
self._attr_native_value = self.get_state(
|
||||
hourly.data[self.forecast_hour]
|
||||
)
|
||||
else:
|
||||
self._attr_native_value = 0
|
||||
elif sensor_type == "daily_summary":
|
||||
self.forecast_data.update_daily()
|
||||
daily = self.forecast_data.data_daily
|
||||
self._attr_native_value = getattr(daily, "summary", "")
|
||||
self._icon = getattr(daily, "icon", "")
|
||||
elif self.forecast_day is not None:
|
||||
self.forecast_data.update_daily()
|
||||
daily = self.forecast_data.data_daily
|
||||
if hasattr(daily, "data"):
|
||||
self._attr_native_value = self.get_state(daily.data[self.forecast_day])
|
||||
else:
|
||||
self._attr_native_value = 0
|
||||
else:
|
||||
self.forecast_data.update_currently()
|
||||
currently = self.forecast_data.data_currently
|
||||
self._attr_native_value = self.get_state(currently)
|
||||
|
||||
def get_state(self, data):
|
||||
"""Return a new state based on the type.
|
||||
|
||||
If the sensor type is unknown, the current state is returned.
|
||||
"""
|
||||
sensor_type = self.entity_description.key
|
||||
lookup_type = convert_to_camel(sensor_type)
|
||||
|
||||
if (state := getattr(data, lookup_type, None)) is None:
|
||||
return None
|
||||
|
||||
if "summary" in sensor_type:
|
||||
self._icon = getattr(data, "icon", "")
|
||||
|
||||
# Some state data needs to be rounded to whole values or converted to
|
||||
# percentages
|
||||
if sensor_type in {"precip_probability", "cloud_cover", "humidity"}:
|
||||
return round(state * 100, 1)
|
||||
|
||||
if sensor_type in {
|
||||
"dew_point",
|
||||
"temperature",
|
||||
"apparent_temperature",
|
||||
"temperature_low",
|
||||
"apparent_temperature_low",
|
||||
"temperature_min",
|
||||
"apparent_temperature_min",
|
||||
"temperature_high",
|
||||
"apparent_temperature_high",
|
||||
"temperature_max",
|
||||
"apparent_temperature_max",
|
||||
"precip_accumulation",
|
||||
"pressure",
|
||||
"ozone",
|
||||
"uvIndex",
|
||||
}:
|
||||
return round(state, 1)
|
||||
return state
|
||||
|
||||
|
||||
class DarkSkyAlertSensor(SensorEntity):
|
||||
"""Implementation of a Dark Sky sensor."""
|
||||
|
||||
entity_description: DarkskySensorEntityDescription
|
||||
_attr_native_value: int | None
|
||||
|
||||
def __init__(
|
||||
self, forecast_data, description: DarkskySensorEntityDescription, name
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_description = description
|
||||
self.forecast_data = forecast_data
|
||||
self._alerts = None
|
||||
|
||||
self._attr_name = f"{name} {description.name}"
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Icon to use in the frontend, if any."""
|
||||
if self._attr_native_value is not None and self._attr_native_value > 0:
|
||||
return "mdi:alert-circle"
|
||||
return "mdi:alert-circle-outline"
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
return self._alerts
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from Dark Sky and updates the states."""
|
||||
# Call the API for new forecast data. Each sensor will re-trigger this
|
||||
# same exact call, but that's fine. We cache results for a short period
|
||||
# of time to prevent hitting API limits. Note that Dark Sky will
|
||||
# charge users for too many calls in 1 day, so take care when updating.
|
||||
self.forecast_data.update()
|
||||
self.forecast_data.update_alerts()
|
||||
alerts = self.forecast_data.data_alerts
|
||||
self._attr_native_value = self.get_state(alerts)
|
||||
|
||||
def get_state(self, data):
|
||||
"""Return a new state based on the type.
|
||||
|
||||
If the sensor type is unknown, the current state is returned.
|
||||
"""
|
||||
alerts = {}
|
||||
if data is None:
|
||||
self._alerts = alerts
|
||||
return data
|
||||
|
||||
multiple_alerts = len(data) > 1
|
||||
for i, alert in enumerate(data):
|
||||
for attr in ALERTS_ATTRS:
|
||||
if multiple_alerts:
|
||||
dkey = f"{attr}_{i!s}"
|
||||
else:
|
||||
dkey = attr
|
||||
alerts[dkey] = getattr(alert, attr)
|
||||
self._alerts = alerts
|
||||
|
||||
return len(data)
|
||||
|
||||
|
||||
def convert_to_camel(data):
|
||||
"""Convert snake case (foo_bar_bat) to camel case (fooBarBat).
|
||||
|
||||
This is not pythonic, but needed for certain situations.
|
||||
"""
|
||||
components = data.split("_")
|
||||
capital_components = "".join(x.title() for x in components[1:])
|
||||
return f"{components[0]}{capital_components}"
|
||||
|
||||
|
||||
class DarkSkyData:
|
||||
"""Get the latest data from Darksky."""
|
||||
|
||||
def __init__(self, api_key, latitude, longitude, units, language, interval):
|
||||
"""Initialize the data object."""
|
||||
self._api_key = api_key
|
||||
self.latitude = latitude
|
||||
self.longitude = longitude
|
||||
self.units = units
|
||||
self.language = language
|
||||
self._connect_error = False
|
||||
|
||||
self.data = None
|
||||
self.unit_system = None
|
||||
self.data_currently = None
|
||||
self.data_minutely = None
|
||||
self.data_hourly = None
|
||||
self.data_daily = None
|
||||
self.data_alerts = None
|
||||
|
||||
# Apply throttling to methods using configured interval
|
||||
self.update = Throttle(interval)(self._update)
|
||||
self.update_currently = Throttle(interval)(self._update_currently)
|
||||
self.update_minutely = Throttle(interval)(self._update_minutely)
|
||||
self.update_hourly = Throttle(interval)(self._update_hourly)
|
||||
self.update_daily = Throttle(interval)(self._update_daily)
|
||||
self.update_alerts = Throttle(interval)(self._update_alerts)
|
||||
|
||||
def _update(self):
|
||||
"""Get the latest data from Dark Sky."""
|
||||
try:
|
||||
self.data = forecastio.load_forecast(
|
||||
self._api_key,
|
||||
self.latitude,
|
||||
self.longitude,
|
||||
units=self.units,
|
||||
lang=self.language,
|
||||
)
|
||||
if self._connect_error:
|
||||
self._connect_error = False
|
||||
_LOGGER.info("Reconnected to Dark Sky")
|
||||
except (ConnectError, HTTPError, Timeout, ValueError) as error:
|
||||
if not self._connect_error:
|
||||
self._connect_error = True
|
||||
_LOGGER.error("Unable to connect to Dark Sky: %s", error)
|
||||
self.data = None
|
||||
self.unit_system = self.data and self.data.json["flags"]["units"]
|
||||
|
||||
def _update_currently(self):
|
||||
"""Update currently data."""
|
||||
self.data_currently = self.data and self.data.currently()
|
||||
|
||||
def _update_minutely(self):
|
||||
"""Update minutely data."""
|
||||
self.data_minutely = self.data and self.data.minutely()
|
||||
|
||||
def _update_hourly(self):
|
||||
"""Update hourly data."""
|
||||
self.data_hourly = self.data and self.data.hourly()
|
||||
|
||||
def _update_daily(self):
|
||||
"""Update daily data."""
|
||||
self.data_daily = self.data and self.data.daily()
|
||||
|
||||
def _update_alerts(self):
|
||||
"""Update alerts data."""
|
||||
self.data_alerts = self.data and self.data.alerts()
|
|
@ -1,281 +0,0 @@
|
|||
"""Support for retrieving meteorological data from Dark Sky."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import forecastio
|
||||
from requests.exceptions import ConnectionError as ConnectError, HTTPError, Timeout
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.weather import (
|
||||
ATTR_CONDITION_CLEAR_NIGHT,
|
||||
ATTR_CONDITION_CLOUDY,
|
||||
ATTR_CONDITION_FOG,
|
||||
ATTR_CONDITION_HAIL,
|
||||
ATTR_CONDITION_LIGHTNING,
|
||||
ATTR_CONDITION_PARTLYCLOUDY,
|
||||
ATTR_CONDITION_RAINY,
|
||||
ATTR_CONDITION_SNOWY,
|
||||
ATTR_CONDITION_SNOWY_RAINY,
|
||||
ATTR_CONDITION_SUNNY,
|
||||
ATTR_CONDITION_WINDY,
|
||||
ATTR_FORECAST_CONDITION,
|
||||
ATTR_FORECAST_NATIVE_PRECIPITATION,
|
||||
ATTR_FORECAST_NATIVE_TEMP,
|
||||
ATTR_FORECAST_NATIVE_TEMP_LOW,
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||
ATTR_FORECAST_TIME,
|
||||
ATTR_FORECAST_WIND_BEARING,
|
||||
PLATFORM_SCHEMA,
|
||||
WeatherEntity,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY,
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
CONF_MODE,
|
||||
CONF_NAME,
|
||||
UnitOfLength,
|
||||
UnitOfPrecipitationDepth,
|
||||
UnitOfPressure,
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTRIBUTION = "Powered by Dark Sky"
|
||||
|
||||
FORECAST_MODE = ["hourly", "daily"]
|
||||
|
||||
MAP_CONDITION = {
|
||||
"clear-day": ATTR_CONDITION_SUNNY,
|
||||
"clear-night": ATTR_CONDITION_CLEAR_NIGHT,
|
||||
"rain": ATTR_CONDITION_RAINY,
|
||||
"snow": ATTR_CONDITION_SNOWY,
|
||||
"sleet": ATTR_CONDITION_SNOWY_RAINY,
|
||||
"wind": ATTR_CONDITION_WINDY,
|
||||
"fog": ATTR_CONDITION_FOG,
|
||||
"cloudy": ATTR_CONDITION_CLOUDY,
|
||||
"partly-cloudy-day": ATTR_CONDITION_PARTLYCLOUDY,
|
||||
"partly-cloudy-night": ATTR_CONDITION_PARTLYCLOUDY,
|
||||
"hail": ATTR_CONDITION_HAIL,
|
||||
"thunderstorm": ATTR_CONDITION_LIGHTNING,
|
||||
"tornado": None,
|
||||
}
|
||||
|
||||
CONF_UNITS = "units"
|
||||
|
||||
DEFAULT_NAME = "Dark Sky"
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
cv.removed(CONF_UNITS),
|
||||
PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
vol.Optional(CONF_LATITUDE): cv.latitude,
|
||||
vol.Optional(CONF_LONGITUDE): cv.longitude,
|
||||
vol.Optional(CONF_MODE, default="hourly"): vol.In(FORECAST_MODE),
|
||||
vol.Optional(CONF_UNITS): vol.In(["auto", "si", "us", "ca", "uk", "uk2"]),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=3)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Dark Sky weather."""
|
||||
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
|
||||
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
|
||||
name = config.get(CONF_NAME)
|
||||
mode = config.get(CONF_MODE)
|
||||
|
||||
units = "si"
|
||||
dark_sky = DarkSkyData(config.get(CONF_API_KEY), latitude, longitude, units)
|
||||
|
||||
add_entities([DarkSkyWeather(name, dark_sky, mode)], True)
|
||||
|
||||
|
||||
class DarkSkyWeather(WeatherEntity):
|
||||
"""Representation of a weather condition."""
|
||||
|
||||
_attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
_attr_native_pressure_unit = UnitOfPressure.MBAR
|
||||
_attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_native_visibility_unit = UnitOfLength.KILOMETERS
|
||||
_attr_native_wind_speed_unit = UnitOfSpeed.METERS_PER_SECOND
|
||||
|
||||
def __init__(self, name, dark_sky, mode):
|
||||
"""Initialize Dark Sky weather."""
|
||||
self._name = name
|
||||
self._dark_sky = dark_sky
|
||||
self._mode = mode
|
||||
|
||||
self._ds_data = None
|
||||
self._ds_currently = None
|
||||
self._ds_hourly = None
|
||||
self._ds_daily = None
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if weather data is available from Dark Sky."""
|
||||
return self._ds_data is not None
|
||||
|
||||
@property
|
||||
def attribution(self):
|
||||
"""Return the attribution."""
|
||||
return ATTRIBUTION
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def native_temperature(self):
|
||||
"""Return the temperature."""
|
||||
return self._ds_currently.get("temperature")
|
||||
|
||||
@property
|
||||
def humidity(self):
|
||||
"""Return the humidity."""
|
||||
return round(self._ds_currently.get("humidity") * 100.0, 2)
|
||||
|
||||
@property
|
||||
def native_wind_speed(self):
|
||||
"""Return the wind speed."""
|
||||
return self._ds_currently.get("windSpeed")
|
||||
|
||||
@property
|
||||
def wind_bearing(self):
|
||||
"""Return the wind bearing."""
|
||||
return self._ds_currently.get("windBearing")
|
||||
|
||||
@property
|
||||
def ozone(self):
|
||||
"""Return the ozone level."""
|
||||
return self._ds_currently.get("ozone")
|
||||
|
||||
@property
|
||||
def native_pressure(self):
|
||||
"""Return the pressure."""
|
||||
return self._ds_currently.get("pressure")
|
||||
|
||||
@property
|
||||
def native_visibility(self):
|
||||
"""Return the visibility."""
|
||||
return self._ds_currently.get("visibility")
|
||||
|
||||
@property
|
||||
def condition(self):
|
||||
"""Return the weather condition."""
|
||||
return MAP_CONDITION.get(self._ds_currently.get("icon"))
|
||||
|
||||
@property
|
||||
def forecast(self):
|
||||
"""Return the forecast array."""
|
||||
|
||||
# Per conversation with Joshua Reyes of Dark Sky, to get the total
|
||||
# forecasted precipitation, you have to multiple the intensity by
|
||||
# the hours for the forecast interval
|
||||
def calc_precipitation(intensity, hours):
|
||||
amount = None
|
||||
if intensity is not None:
|
||||
amount = round((intensity * hours), 1)
|
||||
return amount if amount > 0 else None
|
||||
|
||||
data = None
|
||||
|
||||
if self._mode == "daily":
|
||||
data = [
|
||||
{
|
||||
ATTR_FORECAST_TIME: utc_from_timestamp(
|
||||
entry.d.get("time")
|
||||
).isoformat(),
|
||||
ATTR_FORECAST_NATIVE_TEMP: entry.d.get("temperatureHigh"),
|
||||
ATTR_FORECAST_NATIVE_TEMP_LOW: entry.d.get("temperatureLow"),
|
||||
ATTR_FORECAST_NATIVE_PRECIPITATION: calc_precipitation(
|
||||
entry.d.get("precipIntensity"), 24
|
||||
),
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED: entry.d.get("windSpeed"),
|
||||
ATTR_FORECAST_WIND_BEARING: entry.d.get("windBearing"),
|
||||
ATTR_FORECAST_CONDITION: MAP_CONDITION.get(entry.d.get("icon")),
|
||||
}
|
||||
for entry in self._ds_daily.data
|
||||
]
|
||||
else:
|
||||
data = [
|
||||
{
|
||||
ATTR_FORECAST_TIME: utc_from_timestamp(
|
||||
entry.d.get("time")
|
||||
).isoformat(),
|
||||
ATTR_FORECAST_NATIVE_TEMP: entry.d.get("temperature"),
|
||||
ATTR_FORECAST_NATIVE_PRECIPITATION: calc_precipitation(
|
||||
entry.d.get("precipIntensity"), 1
|
||||
),
|
||||
ATTR_FORECAST_CONDITION: MAP_CONDITION.get(entry.d.get("icon")),
|
||||
}
|
||||
for entry in self._ds_hourly.data
|
||||
]
|
||||
|
||||
return data
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from Dark Sky."""
|
||||
self._dark_sky.update()
|
||||
|
||||
self._ds_data = self._dark_sky.data
|
||||
currently = self._dark_sky.currently
|
||||
self._ds_currently = currently.d if currently else {}
|
||||
self._ds_hourly = self._dark_sky.hourly
|
||||
self._ds_daily = self._dark_sky.daily
|
||||
|
||||
|
||||
class DarkSkyData:
|
||||
"""Get the latest data from Dark Sky."""
|
||||
|
||||
def __init__(self, api_key, latitude, longitude, units):
|
||||
"""Initialize the data object."""
|
||||
self._api_key = api_key
|
||||
self.latitude = latitude
|
||||
self.longitude = longitude
|
||||
self.requested_units = units
|
||||
|
||||
self.data = None
|
||||
self.currently = None
|
||||
self.hourly = None
|
||||
self.daily = None
|
||||
self._connect_error = False
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data from Dark Sky."""
|
||||
try:
|
||||
self.data = forecastio.load_forecast(
|
||||
self._api_key, self.latitude, self.longitude, units=self.requested_units
|
||||
)
|
||||
self.currently = self.data.currently()
|
||||
self.hourly = self.data.hourly()
|
||||
self.daily = self.data.daily()
|
||||
if self._connect_error:
|
||||
self._connect_error = False
|
||||
_LOGGER.info("Reconnected to Dark Sky")
|
||||
except (ConnectError, HTTPError, Timeout, ValueError) as error:
|
||||
if not self._connect_error:
|
||||
self._connect_error = True
|
||||
_LOGGER.error("Unable to connect to Dark Sky. %s", error)
|
||||
self.data = None
|
|
@ -348,7 +348,7 @@ class ScannerEntity(BaseTrackerEntity):
|
|||
self.mac_address,
|
||||
self.unique_id,
|
||||
)
|
||||
if self.is_connected:
|
||||
if self.is_connected and self.ip_address:
|
||||
_async_connected_device_registered(
|
||||
hass,
|
||||
self.mac_address,
|
||||
|
@ -405,7 +405,7 @@ class ScannerEntity(BaseTrackerEntity):
|
|||
"""Return the device state attributes."""
|
||||
attr: dict[str, StateType] = {}
|
||||
attr.update(super().state_attributes)
|
||||
if self.ip_address is not None:
|
||||
if self.ip_address:
|
||||
attr[ATTR_IP] = self.ip_address
|
||||
if self.mac_address is not None:
|
||||
attr[ATTR_MAC] = self.mac_address
|
||||
|
|
|
@ -25,10 +25,11 @@ from homeassistant.const import (
|
|||
CONF_MAC,
|
||||
CONF_NAME,
|
||||
DEVICE_DEFAULT_NAME,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
STATE_HOME,
|
||||
STATE_NOT_HOME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
config_per_platform,
|
||||
|
@ -216,7 +217,7 @@ async def async_setup_integration(hass: HomeAssistant, config: ConfigType) -> No
|
|||
discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered)
|
||||
|
||||
# Clean up stale devices
|
||||
async_track_utc_time_change(
|
||||
cancel_update_stale = async_track_utc_time_change(
|
||||
hass, tracker.async_update_stale, second=range(0, 60, 5)
|
||||
)
|
||||
|
||||
|
@ -235,6 +236,16 @@ async def async_setup_integration(hass: HomeAssistant, config: ConfigType) -> No
|
|||
# restore
|
||||
await tracker.async_setup_tracked_device()
|
||||
|
||||
@callback
|
||||
def _on_hass_stop(_: Event) -> None:
|
||||
"""Cleanup when Home Assistant stops.
|
||||
|
||||
Cancel the async_update_stale schedule.
|
||||
"""
|
||||
cancel_update_stale()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _on_hass_stop)
|
||||
|
||||
|
||||
@attr.s
|
||||
class DeviceTrackerPlatform:
|
||||
|
@ -356,6 +367,27 @@ async def async_create_platform_type(
|
|||
return DeviceTrackerPlatform(p_type, platform, p_config)
|
||||
|
||||
|
||||
def _load_device_names_and_attributes(
|
||||
scanner: DeviceScanner,
|
||||
device_name_uses_executor: bool,
|
||||
extra_attributes_uses_executor: bool,
|
||||
seen: set[str],
|
||||
found_devices: list[str],
|
||||
) -> tuple[dict[str, str | None], dict[str, dict[str, Any]]]:
|
||||
"""Load device names and attributes in a single executor job."""
|
||||
host_name_by_mac: dict[str, str | None] = {}
|
||||
extra_attributes_by_mac: dict[str, dict[str, Any]] = {}
|
||||
for mac in found_devices:
|
||||
if device_name_uses_executor and mac not in seen:
|
||||
host_name_by_mac[mac] = scanner.get_device_name(mac)
|
||||
if extra_attributes_uses_executor:
|
||||
try:
|
||||
extra_attributes_by_mac[mac] = scanner.get_extra_attributes(mac)
|
||||
except NotImplementedError:
|
||||
extra_attributes_by_mac[mac] = {}
|
||||
return host_name_by_mac, extra_attributes_by_mac
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_scanner_platform(
|
||||
hass: HomeAssistant,
|
||||
|
@ -373,7 +405,7 @@ def async_setup_scanner_platform(
|
|||
scanner.hass = hass
|
||||
|
||||
# Initial scan of each mac we also tell about host name for config
|
||||
seen: Any = set()
|
||||
seen: set[str] = set()
|
||||
|
||||
async def async_device_tracker_scan(now: datetime | None) -> None:
|
||||
"""Handle interval matches."""
|
||||
|
@ -391,15 +423,42 @@ def async_setup_scanner_platform(
|
|||
async with update_lock:
|
||||
found_devices = await scanner.async_scan_devices()
|
||||
|
||||
device_name_uses_executor = (
|
||||
scanner.async_get_device_name.__func__ # type: ignore[attr-defined]
|
||||
is DeviceScanner.async_get_device_name
|
||||
)
|
||||
extra_attributes_uses_executor = (
|
||||
scanner.async_get_extra_attributes.__func__ # type: ignore[attr-defined]
|
||||
is DeviceScanner.async_get_extra_attributes
|
||||
)
|
||||
host_name_by_mac: dict[str, str | None] = {}
|
||||
extra_attributes_by_mac: dict[str, dict[str, Any]] = {}
|
||||
if device_name_uses_executor or extra_attributes_uses_executor:
|
||||
(
|
||||
host_name_by_mac,
|
||||
extra_attributes_by_mac,
|
||||
) = await hass.async_add_executor_job(
|
||||
_load_device_names_and_attributes,
|
||||
scanner,
|
||||
device_name_uses_executor,
|
||||
extra_attributes_uses_executor,
|
||||
seen,
|
||||
found_devices,
|
||||
)
|
||||
|
||||
for mac in found_devices:
|
||||
if mac in seen:
|
||||
host_name = None
|
||||
else:
|
||||
host_name = await scanner.async_get_device_name(mac)
|
||||
host_name = host_name_by_mac.get(
|
||||
mac, await scanner.async_get_device_name(mac)
|
||||
)
|
||||
seen.add(mac)
|
||||
|
||||
try:
|
||||
extra_attributes = await scanner.async_get_extra_attributes(mac)
|
||||
extra_attributes = extra_attributes_by_mac.get(
|
||||
mac, await scanner.async_get_extra_attributes(mac)
|
||||
)
|
||||
except NotImplementedError:
|
||||
extra_attributes = {}
|
||||
|
||||
|
@ -423,14 +482,24 @@ def async_setup_scanner_platform(
|
|||
|
||||
hass.async_create_task(async_see_device(**kwargs))
|
||||
|
||||
async_track_time_interval(
|
||||
cancel_legacy_scan = async_track_time_interval(
|
||||
hass,
|
||||
async_device_tracker_scan,
|
||||
interval,
|
||||
f"device_tracker {platform} legacy scan",
|
||||
name=f"device_tracker {platform} legacy scan",
|
||||
)
|
||||
hass.async_create_task(async_device_tracker_scan(None))
|
||||
|
||||
@callback
|
||||
def _on_hass_stop(_: Event) -> None:
|
||||
"""Cleanup when Home Assistant stops.
|
||||
|
||||
Cancel the legacy scan.
|
||||
"""
|
||||
cancel_legacy_scan()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _on_hass_stop)
|
||||
|
||||
|
||||
async def get_tracker(hass: HomeAssistant, config: ConfigType) -> DeviceTracker:
|
||||
"""Create a tracker."""
|
||||
|
|
|
@ -77,7 +77,7 @@ SCAN_INTERVAL = timedelta(minutes=60)
|
|||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
@dataclass(slots=True)
|
||||
class DhcpServiceInfo(BaseServiceInfo):
|
||||
"""Prepared info from dhcp entries."""
|
||||
|
||||
|
@ -260,7 +260,10 @@ class NetworkWatcher(WatcherBase):
|
|||
"""Start scanning for new devices on the network."""
|
||||
self._discover_hosts = DiscoverHosts()
|
||||
self._unsub = async_track_time_interval(
|
||||
self.hass, self.async_start_discover, SCAN_INTERVAL, "DHCP network watcher"
|
||||
self.hass,
|
||||
self.async_start_discover,
|
||||
SCAN_INTERVAL,
|
||||
name="DHCP network watcher",
|
||||
)
|
||||
self.async_start_discover()
|
||||
|
||||
|
|
|
@ -7,5 +7,5 @@
|
|||
"iot_class": "local_push",
|
||||
"loggers": ["aiodiscover", "dnspython", "pyroute2", "scapy"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["scapy==2.5.0", "aiodiscover==1.4.15"]
|
||||
"requirements": ["scapy==2.5.0", "aiodiscover==1.4.16"]
|
||||
}
|
||||
|
|
|
@ -48,49 +48,49 @@ class DSMRReaderSensorEntityDescription(SensorEntityDescription):
|
|||
SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/electricity_delivered_1",
|
||||
name="Low tariff usage",
|
||||
translation_key="low_tariff_usage",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/electricity_returned_1",
|
||||
name="Low tariff returned",
|
||||
translation_key="low_tariff_returned",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/electricity_delivered_2",
|
||||
name="High tariff usage",
|
||||
translation_key="high_tariff_usage",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/electricity_returned_2",
|
||||
name="High tariff returned",
|
||||
translation_key="high_tariff_returned",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/electricity_currently_delivered",
|
||||
name="Current power usage",
|
||||
translation_key="current_power_usage",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/electricity_currently_returned",
|
||||
name="Current power return",
|
||||
translation_key="current_power_return",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_currently_delivered_l1",
|
||||
name="Current power usage L1",
|
||||
translation_key="current_power_usage_l1",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
|
@ -98,7 +98,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_currently_delivered_l2",
|
||||
name="Current power usage L2",
|
||||
translation_key="current_power_usage_l2",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
|
@ -106,7 +106,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_currently_delivered_l3",
|
||||
name="Current power usage L3",
|
||||
translation_key="current_power_usage_l3",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
|
@ -114,7 +114,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_currently_returned_l1",
|
||||
name="Current power return L1",
|
||||
translation_key="current_power_return_l1",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
|
@ -122,7 +122,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_currently_returned_l2",
|
||||
name="Current power return L2",
|
||||
translation_key="current_power_return_l2",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
|
@ -130,7 +130,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_currently_returned_l3",
|
||||
name="Current power return L3",
|
||||
translation_key="current_power_return_l3",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
|
@ -138,7 +138,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/extra_device_delivered",
|
||||
name="Gas meter usage",
|
||||
translation_key="gas_meter_usage",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:fire",
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
|
@ -146,7 +146,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_voltage_l1",
|
||||
name="Current voltage L1",
|
||||
translation_key="current_voltage_l1",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
|
@ -154,7 +154,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_voltage_l2",
|
||||
name="Current voltage L2",
|
||||
translation_key="current_voltage_l2",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
|
@ -162,7 +162,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_voltage_l3",
|
||||
name="Current voltage L3",
|
||||
translation_key="current_voltage_l3",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
|
@ -170,7 +170,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_power_current_l1",
|
||||
name="Phase power current L1",
|
||||
translation_key="phase_power_current_l1",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
|
@ -178,7 +178,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_power_current_l2",
|
||||
name="Phase power current L2",
|
||||
translation_key="phase_power_current_l2",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
|
@ -186,7 +186,7 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/phase_power_current_l3",
|
||||
name="Phase power current L3",
|
||||
translation_key="phase_power_current_l3",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
|
@ -194,384 +194,386 @@ SENSORS: tuple[DSMRReaderSensorEntityDescription, ...] = (
|
|||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/reading/timestamp",
|
||||
name="Telegram timestamp",
|
||||
translation_key="telegram_timestamp",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
state=dt_util.parse_datetime,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/consumption/gas/delivered",
|
||||
name="Gas usage",
|
||||
translation_key="gas_usage",
|
||||
device_class=SensorDeviceClass.GAS,
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/consumption/gas/currently_delivered",
|
||||
name="Current gas usage",
|
||||
translation_key="current_gas_usage",
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/consumption/gas/read_at",
|
||||
name="Gas meter read",
|
||||
translation_key="gas_meter_read",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
state=dt_util.parse_datetime,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/electricity1",
|
||||
name="Low tariff usage (daily)",
|
||||
translation_key="daily_low_tariff_usage",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/electricity2",
|
||||
name="High tariff usage (daily)",
|
||||
translation_key="daily_high_tariff_usage",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/electricity1_returned",
|
||||
name="Low tariff return (daily)",
|
||||
translation_key="daily_low_tariff_return",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/electricity2_returned",
|
||||
name="High tariff return (daily)",
|
||||
translation_key="daily_high_tariff_return",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/electricity_merged",
|
||||
name="Power usage total (daily)",
|
||||
translation_key="daily_power_usage_total",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/electricity_returned_merged",
|
||||
name="Power return total (daily)",
|
||||
translation_key="daily_power_return_total",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/electricity1_cost",
|
||||
name="Low tariff cost (daily)",
|
||||
translation_key="daily_low_tariff_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/electricity2_cost",
|
||||
name="High tariff cost (daily)",
|
||||
translation_key="daily_high_tariff_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/electricity_cost_merged",
|
||||
name="Power total cost (daily)",
|
||||
translation_key="daily_power_total_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/gas",
|
||||
name="Gas usage (daily)",
|
||||
translation_key="daily_gas_usage",
|
||||
icon="mdi:counter",
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/gas_cost",
|
||||
name="Gas cost",
|
||||
translation_key="gas_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/total_cost",
|
||||
name="Total cost",
|
||||
translation_key="total_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/energy_supplier_price_electricity_delivered_1",
|
||||
name="Low tariff delivered price",
|
||||
translation_key="low_tariff_delivered_price",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=PRICE_EUR_KWH,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/energy_supplier_price_electricity_delivered_2",
|
||||
name="High tariff delivered price",
|
||||
translation_key="high_tariff_delivered_price",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=PRICE_EUR_KWH,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/energy_supplier_price_electricity_returned_1",
|
||||
name="Low tariff returned price",
|
||||
translation_key="low_tariff_returned_price",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=PRICE_EUR_KWH,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/energy_supplier_price_electricity_returned_2",
|
||||
name="High tariff returned price",
|
||||
translation_key="high_tariff_returned_price",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=PRICE_EUR_KWH,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/energy_supplier_price_gas",
|
||||
name="Gas price",
|
||||
translation_key="gas_price",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=PRICE_EUR_M3,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/day-consumption/fixed_cost",
|
||||
name="Current day fixed cost",
|
||||
translation_key="current_day_fixed_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/dsmr_version",
|
||||
name="DSMR version",
|
||||
translation_key="dsmr_version",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:alert-circle",
|
||||
state=dsmr_transform,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/electricity_tariff",
|
||||
name="Electricity tariff",
|
||||
translation_key="electricity_tariff",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["low", "high"],
|
||||
icon="mdi:flash",
|
||||
state=tariff_transform,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/power_failure_count",
|
||||
name="Power failure count",
|
||||
translation_key="power_failure_count",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:flash",
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/long_power_failure_count",
|
||||
name="Long power failure count",
|
||||
translation_key="long_power_failure_count",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:flash",
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/voltage_sag_count_l1",
|
||||
name="Voltage sag L1",
|
||||
translation_key="voltage_sag_l1",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:flash",
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/voltage_sag_count_l2",
|
||||
name="Voltage sag L2",
|
||||
translation_key="voltage_sag_l2",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:flash",
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/voltage_sag_count_l3",
|
||||
name="Voltage sag L3",
|
||||
translation_key="voltage_sag_l3",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:flash",
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/voltage_swell_count_l1",
|
||||
name="Voltage swell L1",
|
||||
translation_key="voltage_swell_l1",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:flash",
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/voltage_swell_count_l2",
|
||||
name="Voltage swell L2",
|
||||
translation_key="voltage_swell_l2",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:flash",
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/voltage_swell_count_l3",
|
||||
name="Voltage swell L3",
|
||||
translation_key="voltage_swell_l3",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:flash",
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/meter-stats/rejected_telegrams",
|
||||
name="Rejected telegrams",
|
||||
translation_key="rejected_telegrams",
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:flash",
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/electricity1",
|
||||
name="Current month low tariff usage",
|
||||
translation_key="current_month_low_tariff_usage",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/electricity2",
|
||||
name="Current month high tariff usage",
|
||||
translation_key="current_month_high_tariff_usage",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/electricity1_returned",
|
||||
name="Current month low tariff returned",
|
||||
translation_key="current_month_low_tariff_returned",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/electricity2_returned",
|
||||
name="Current month high tariff returned",
|
||||
translation_key="current_month_high_tariff_returned",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/electricity_merged",
|
||||
name="Current month power usage total",
|
||||
translation_key="current_month_power_usage_total",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/electricity_returned_merged",
|
||||
name="Current month power return total",
|
||||
translation_key="current_month_power_return_total",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/electricity1_cost",
|
||||
name="Current month low tariff cost",
|
||||
translation_key="current_month_low_tariff_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/electricity2_cost",
|
||||
name="Current month high tariff cost",
|
||||
translation_key="current_month_high_tariff_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/electricity_cost_merged",
|
||||
name="Current month power total cost",
|
||||
translation_key="current_month_power_total_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/gas",
|
||||
name="Current month gas usage",
|
||||
translation_key="current_month_gas_usage",
|
||||
icon="mdi:counter",
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/gas_cost",
|
||||
name="Current month gas cost",
|
||||
translation_key="current_month_gas_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/fixed_cost",
|
||||
name="Current month fixed cost",
|
||||
translation_key="current_month_fixed_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-month/total_cost",
|
||||
name="Current month total cost",
|
||||
translation_key="current_month_total_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/electricity1",
|
||||
name="Current year low tariff usage",
|
||||
translation_key="current_year_low_tariff_usage",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/electricity2",
|
||||
name="Current year high tariff usage",
|
||||
translation_key="current_year_high_tariff_usage",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/electricity1_returned",
|
||||
name="Current year low tariff returned",
|
||||
translation_key="current_year_low_tariff_returned",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/electricity2_returned",
|
||||
name="Current year high tariff returned",
|
||||
translation_key="current_year_high_tariff_returned",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/electricity_merged",
|
||||
name="Current year power usage total",
|
||||
translation_key="current_year_power_usage_total",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/electricity_returned_merged",
|
||||
name="Current year power returned total",
|
||||
translation_key="current_year_power_returned_total",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/electricity1_cost",
|
||||
name="Current year low tariff cost",
|
||||
translation_key="current_year_low_tariff_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/electricity2_cost",
|
||||
name="Current year high tariff cost",
|
||||
translation_key="current_year_high_tariff_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/electricity_cost_merged",
|
||||
name="Current year power total cost",
|
||||
translation_key="current_year_power_total_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/gas",
|
||||
name="Current year gas usage",
|
||||
translation_key="current_year_gas_usage",
|
||||
icon="mdi:counter",
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/gas_cost",
|
||||
name="Current year gas cost",
|
||||
translation_key="current_year_gas_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/fixed_cost",
|
||||
name="Current year fixed cost",
|
||||
translation_key="current_year_fixed_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/current-year/total_cost",
|
||||
name="Current year total cost",
|
||||
translation_key="current_year_total_cost",
|
||||
icon="mdi:currency-eur",
|
||||
native_unit_of_measurement=CURRENCY_EURO,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/consumption/quarter-hour-peak-electricity/average_delivered",
|
||||
name="Previous quarter-hour peak usage",
|
||||
translation_key="previous_quarter_hour_peak_usage",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/consumption/quarter-hour-peak-electricity/read_at_start",
|
||||
name="Quarter-hour peak start time",
|
||||
translation_key="quarter_hour_peak_start_time",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
state=dt_util.parse_datetime,
|
||||
),
|
||||
DSMRReaderSensorEntityDescription(
|
||||
key="dsmr/consumption/quarter-hour-peak-electricity/read_at_end",
|
||||
name="Quarter-hour peak end time",
|
||||
translation_key="quarter_hour_peak_end_time",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
state=dt_util.parse_datetime,
|
||||
|
|
|
@ -23,6 +23,7 @@ async def async_setup_entry(
|
|||
class DSMRSensor(SensorEntity):
|
||||
"""Representation of a DSMR sensor that is updated via MQTT."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: DSMRReaderSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
|
|
|
@ -8,5 +8,256 @@
|
|||
"description": "Make sure to configure the 'split topic' data sources in DSMR Reader."
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"low_tariff_usage": {
|
||||
"name": "Low tariff usage"
|
||||
},
|
||||
"low_tariff_returned": {
|
||||
"name": "Low tariff returned"
|
||||
},
|
||||
"high_tariff_usage": {
|
||||
"name": "High tariff usage"
|
||||
},
|
||||
"high_tariff_returned": {
|
||||
"name": "High tariff returned"
|
||||
},
|
||||
"current_power_usage": {
|
||||
"name": "Current power usage"
|
||||
},
|
||||
"current_power_return": {
|
||||
"name": "Current power return"
|
||||
},
|
||||
"current_power_usage_l1": {
|
||||
"name": "Current power usage L1"
|
||||
},
|
||||
"current_power_usage_l2": {
|
||||
"name": "Current power usage L2"
|
||||
},
|
||||
"current_power_usage_l3": {
|
||||
"name": "Current power usage L3"
|
||||
},
|
||||
"current_power_return_l1": {
|
||||
"name": "Current power return L1"
|
||||
},
|
||||
"current_power_return_l2": {
|
||||
"name": "Current power return L2"
|
||||
},
|
||||
"current_power_return_l3": {
|
||||
"name": "Current power return L3"
|
||||
},
|
||||
"gas_meter_usage": {
|
||||
"name": "Gas meter usage"
|
||||
},
|
||||
"current_voltage_l1": {
|
||||
"name": "Current voltage L1"
|
||||
},
|
||||
"current_voltage_l2": {
|
||||
"name": "Current voltage L2"
|
||||
},
|
||||
"current_voltage_l3": {
|
||||
"name": "Current voltage L3"
|
||||
},
|
||||
"phase_power_current_l1": {
|
||||
"name": "Phase power current L1"
|
||||
},
|
||||
"phase_power_current_l2": {
|
||||
"name": "Phase power current L2"
|
||||
},
|
||||
"phase_power_current_l3": {
|
||||
"name": "Phase power current L3"
|
||||
},
|
||||
"telegram_timestamp": {
|
||||
"name": "Telegram timestamp"
|
||||
},
|
||||
"gas_usage": {
|
||||
"name": "Gas usage"
|
||||
},
|
||||
"current_gas_usage": {
|
||||
"name": "Current gas usage"
|
||||
},
|
||||
"gas_meter_read": {
|
||||
"name": "Gas meter read"
|
||||
},
|
||||
"daily_low_tariff_usage": {
|
||||
"name": "Low tariff usage (daily)"
|
||||
},
|
||||
"daily_high_tariff_usage": {
|
||||
"name": "High tariff usage (daily)"
|
||||
},
|
||||
"daily_low_tariff_return": {
|
||||
"name": "Low tariff return (daily)"
|
||||
},
|
||||
"daily_high_tariff_return": {
|
||||
"name": "High tariff return (daily)"
|
||||
},
|
||||
"daily_power_usage_total": {
|
||||
"name": "Power usage total (daily)"
|
||||
},
|
||||
"daily_power_return_total": {
|
||||
"name": "Power return total (daily)"
|
||||
},
|
||||
"daily_low_tariff_cost": {
|
||||
"name": "Low tariff cost (daily)"
|
||||
},
|
||||
"daily_high_tariff_cost": {
|
||||
"name": "High tariff cost (daily)"
|
||||
},
|
||||
"daily_power_total_cost": {
|
||||
"name": "Power total cost (daily)"
|
||||
},
|
||||
"daily_gas_usage": {
|
||||
"name": "Gas usage (daily)"
|
||||
},
|
||||
"gas_cost": {
|
||||
"name": "Gas cost"
|
||||
},
|
||||
"total_cost": {
|
||||
"name": "Total cost"
|
||||
},
|
||||
"low_tariff_delivered_price": {
|
||||
"name": "Low tariff delivered price"
|
||||
},
|
||||
"high_tariff_delivered_price": {
|
||||
"name": "High tariff delivered price"
|
||||
},
|
||||
"low_tariff_returned_price": {
|
||||
"name": "Low tariff returned price"
|
||||
},
|
||||
"high_tariff_returned_price": {
|
||||
"name": "High tariff returned price"
|
||||
},
|
||||
"gas_price": {
|
||||
"name": "Gas Price"
|
||||
},
|
||||
"current_day_fixed_cost": {
|
||||
"name": "Current day fixed cost"
|
||||
},
|
||||
"dsmr_version": {
|
||||
"name": "DSMR version"
|
||||
},
|
||||
"electricity_tariff": {
|
||||
"name": "Electricity tariff",
|
||||
"state": {
|
||||
"low": "Low",
|
||||
"high": "High"
|
||||
}
|
||||
},
|
||||
"power_failure_count": {
|
||||
"name": "Power failure count"
|
||||
},
|
||||
"long_power_failure_count": {
|
||||
"name": "Long power failure count"
|
||||
},
|
||||
"voltage_sag_l1": {
|
||||
"name": "Voltage sag L1"
|
||||
},
|
||||
"voltage_sag_l2": {
|
||||
"name": "Voltage sag L2"
|
||||
},
|
||||
"voltage_sag_l3": {
|
||||
"name": "Voltage sag L3"
|
||||
},
|
||||
"voltage_swell_l1": {
|
||||
"name": "Voltage swell L1"
|
||||
},
|
||||
"voltage_swell_l2": {
|
||||
"name": "Voltage swell L2"
|
||||
},
|
||||
"voltage_swell_l3": {
|
||||
"name": "Voltage swell L3"
|
||||
},
|
||||
"rejected_telegrams": {
|
||||
"name": "Rejected telegrams"
|
||||
},
|
||||
"current_month_low_tariff_usage": {
|
||||
"name": "Current month low tariff usage"
|
||||
},
|
||||
"current_month_high_tariff_usage": {
|
||||
"name": "Current month high tariff usage"
|
||||
},
|
||||
"current_month_low_tariff_returned": {
|
||||
"name": "Current month low tariff returned"
|
||||
},
|
||||
"current_month_high_tariff_returned": {
|
||||
"name": "Current month high tariff returned"
|
||||
},
|
||||
"current_month_power_usage_total": {
|
||||
"name": "Current month power usage total"
|
||||
},
|
||||
"current_month_power_return_total": {
|
||||
"name": "Current month power return total"
|
||||
},
|
||||
"current_month_low_tariff_cost": {
|
||||
"name": "Current month low tariff cost"
|
||||
},
|
||||
"current_month_high_tariff_cost": {
|
||||
"name": "Current month high tariff cost"
|
||||
},
|
||||
"current_month_power_total_cost": {
|
||||
"name": "Current month power total cost"
|
||||
},
|
||||
"current_month_gas_usage": {
|
||||
"name": "Current month gas usage"
|
||||
},
|
||||
"current_month_gas_cost": {
|
||||
"name": "Current month gas cost"
|
||||
},
|
||||
"current_month_fixed_cost": {
|
||||
"name": "Current month fixed cost"
|
||||
},
|
||||
"current_month_total_cost": {
|
||||
"name": "Current month total cost"
|
||||
},
|
||||
"current_year_low_tariff_usage": {
|
||||
"name": "Current year low tariff usage"
|
||||
},
|
||||
"current_year_high_tariff_usage": {
|
||||
"name": "Current year high tariff usage"
|
||||
},
|
||||
"current_year_low_tariff_returned": {
|
||||
"name": "Current year low tariff returned"
|
||||
},
|
||||
"current_year_high_tariff_returned": {
|
||||
"name": "Current year high tariff returned"
|
||||
},
|
||||
"current_year_power_usage_total": {
|
||||
"name": "Current year power usage total"
|
||||
},
|
||||
"current_year_power_returned_total": {
|
||||
"name": "Current year power returned total"
|
||||
},
|
||||
"current_year_low_tariff_cost": {
|
||||
"name": "Current year low tariff cost"
|
||||
},
|
||||
"current_year_high_tariff_cost": {
|
||||
"name": "Current year high tariff cost"
|
||||
},
|
||||
"current_year_power_total_cost": {
|
||||
"name": "Current year power total cost"
|
||||
},
|
||||
"current_year_gas_usage": {
|
||||
"name": "Current year gas usage"
|
||||
},
|
||||
"current_year_gas_cost": {
|
||||
"name": "Current year gas cost"
|
||||
},
|
||||
"current_year_fixed_cost": {
|
||||
"name": "Current year fixed cost"
|
||||
},
|
||||
"current_year_total_cost": {
|
||||
"name": "Current year total cost"
|
||||
},
|
||||
"previous_quarter_hour_peak_usage": {
|
||||
"name": "Previous quarter-hour peak usage"
|
||||
},
|
||||
"quarter_hour_peak_start_time": {
|
||||
"name": "Quarter-hour peak start time"
|
||||
},
|
||||
"quarter_hour_peak_end_time": {
|
||||
"name": "Quarter-hour peak end time"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ CONF_STOP_ID = "stopid"
|
|||
CONF_ROUTE = "route"
|
||||
|
||||
DEFAULT_NAME = "Next Bus"
|
||||
ICON = "mdi:bus"
|
||||
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
TIME_STR_FORMAT = "%H:%M"
|
||||
|
@ -77,6 +77,7 @@ class DublinPublicTransportSensor(SensorEntity):
|
|||
"""Implementation of an Dublin public transport sensor."""
|
||||
|
||||
_attr_attribution = "Data provided by data.dublinked.ie"
|
||||
_attr_icon = "mdi:bus"
|
||||
|
||||
def __init__(self, data, stop, route, name):
|
||||
"""Initialize the sensor."""
|
||||
|
@ -118,11 +119,6 @@ class DublinPublicTransportSensor(SensorEntity):
|
|||
"""Return the unit this state is expressed in."""
|
||||
return UnitOfTime.MINUTES
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Icon to use in the frontend, if any."""
|
||||
return ICON
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from opendata.ch and update the states."""
|
||||
self.data.update()
|
||||
|
|
33
homeassistant/components/dwd_weather_warnings/const.py
Normal file
33
homeassistant/components/dwd_weather_warnings/const.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
"""Constants for the dwd_weather_warnings integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
CONF_REGION_NAME: Final = "region_name"
|
||||
|
||||
ATTR_REGION_NAME: Final = "region_name"
|
||||
ATTR_REGION_ID: Final = "region_id"
|
||||
ATTR_LAST_UPDATE: Final = "last_update"
|
||||
ATTR_WARNING_COUNT: Final = "warning_count"
|
||||
|
||||
API_ATTR_WARNING_NAME: Final = "event"
|
||||
API_ATTR_WARNING_TYPE: Final = "event_code"
|
||||
API_ATTR_WARNING_LEVEL: Final = "level"
|
||||
API_ATTR_WARNING_HEADLINE: Final = "headline"
|
||||
API_ATTR_WARNING_DESCRIPTION: Final = "description"
|
||||
API_ATTR_WARNING_INSTRUCTION: Final = "instruction"
|
||||
API_ATTR_WARNING_START: Final = "start_time"
|
||||
API_ATTR_WARNING_END: Final = "end_time"
|
||||
API_ATTR_WARNING_PARAMETERS: Final = "parameters"
|
||||
API_ATTR_WARNING_COLOR: Final = "color"
|
||||
|
||||
CURRENT_WARNING_SENSOR: Final = "current_warning_level"
|
||||
ADVANCE_WARNING_SENSOR: Final = "advance_warning_level"
|
||||
|
||||
DEFAULT_NAME: Final = "DWD-Weather-Warnings"
|
||||
DEFAULT_SCAN_INTERVAL: Final = timedelta(minutes=15)
|
|
@ -1,9 +1,9 @@
|
|||
{
|
||||
"domain": "dwd_weather_warnings",
|
||||
"name": "Deutscher Wetterdienst (DWD) Weather Warnings",
|
||||
"codeowners": ["@runningman84", "@stephan192", "@Hummel95"],
|
||||
"codeowners": ["@runningman84", "@stephan192", "@Hummel95", "@andarotajo"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dwd_weather_warnings",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["dwdwfsapi"],
|
||||
"requirements": ["dwdwfsapi==1.0.5"]
|
||||
"requirements": ["dwdwfsapi==1.0.6"]
|
||||
}
|
||||
|
|
|
@ -10,9 +10,6 @@ Wetterwarnungen (Stufe 1)
|
|||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from dwdwfsapi import DwdWeatherWarningsAPI
|
||||
import voluptuous as vol
|
||||
|
||||
|
@ -28,33 +25,28 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_REGION_NAME = "region_name"
|
||||
ATTR_REGION_ID = "region_id"
|
||||
ATTR_LAST_UPDATE = "last_update"
|
||||
ATTR_WARNING_COUNT = "warning_count"
|
||||
|
||||
API_ATTR_WARNING_NAME = "event"
|
||||
API_ATTR_WARNING_TYPE = "event_code"
|
||||
API_ATTR_WARNING_LEVEL = "level"
|
||||
API_ATTR_WARNING_HEADLINE = "headline"
|
||||
API_ATTR_WARNING_DESCRIPTION = "description"
|
||||
API_ATTR_WARNING_INSTRUCTION = "instruction"
|
||||
API_ATTR_WARNING_START = "start_time"
|
||||
API_ATTR_WARNING_END = "end_time"
|
||||
API_ATTR_WARNING_PARAMETERS = "parameters"
|
||||
API_ATTR_WARNING_COLOR = "color"
|
||||
|
||||
DEFAULT_NAME = "DWD-Weather-Warnings"
|
||||
|
||||
CONF_REGION_NAME = "region_name"
|
||||
|
||||
CURRENT_WARNING_SENSOR = "current_warning_level"
|
||||
ADVANCE_WARNING_SENSOR = "advance_warning_level"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
from .const import (
|
||||
ADVANCE_WARNING_SENSOR,
|
||||
API_ATTR_WARNING_COLOR,
|
||||
API_ATTR_WARNING_DESCRIPTION,
|
||||
API_ATTR_WARNING_END,
|
||||
API_ATTR_WARNING_HEADLINE,
|
||||
API_ATTR_WARNING_INSTRUCTION,
|
||||
API_ATTR_WARNING_LEVEL,
|
||||
API_ATTR_WARNING_NAME,
|
||||
API_ATTR_WARNING_PARAMETERS,
|
||||
API_ATTR_WARNING_START,
|
||||
API_ATTR_WARNING_TYPE,
|
||||
ATTR_LAST_UPDATE,
|
||||
ATTR_REGION_ID,
|
||||
ATTR_REGION_NAME,
|
||||
ATTR_WARNING_COUNT,
|
||||
CONF_REGION_NAME,
|
||||
CURRENT_WARNING_SENSOR,
|
||||
DEFAULT_NAME,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
LOGGER,
|
||||
)
|
||||
|
||||
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
|
@ -169,7 +161,7 @@ class DwdWeatherWarningsSensor(SensorEntity):
|
|||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from the DWD-Weather-Warnings API."""
|
||||
_LOGGER.debug(
|
||||
LOGGER.debug(
|
||||
"Update requested for %s (%s) by %s",
|
||||
self._api.api.warncell_name,
|
||||
self._api.api.warncell_id,
|
||||
|
@ -185,8 +177,8 @@ class WrappedDwDWWAPI:
|
|||
"""Initialize a DWD-Weather-Warnings wrapper."""
|
||||
self.api = api
|
||||
|
||||
@Throttle(SCAN_INTERVAL)
|
||||
@Throttle(DEFAULT_SCAN_INTERVAL)
|
||||
def update(self):
|
||||
"""Get the latest data from the DWD-Weather-Warnings API."""
|
||||
self.api.update()
|
||||
_LOGGER.debug("Update performed")
|
||||
LOGGER.debug("Update performed")
|
||||
|
|
|
@ -290,7 +290,7 @@ async def async_setup_platform(
|
|||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml",
|
||||
breaks_in_ha_version="2023.2.0",
|
||||
breaks_in_ha_version="2023.6.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
|
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env_canada==0.5.29"]
|
||||
"requirements": ["env_canada==0.5.31"]
|
||||
}
|
||||
|
|
|
@ -5,12 +5,11 @@ from collections.abc import Callable
|
|||
import functools
|
||||
import logging
|
||||
import math
|
||||
from typing import Any, Generic, NamedTuple, TypeVar, cast, overload
|
||||
from typing import Any, Generic, NamedTuple, TypeVar, cast
|
||||
|
||||
from aioesphomeapi import (
|
||||
APIClient,
|
||||
APIConnectionError,
|
||||
APIIntEnum,
|
||||
APIVersion,
|
||||
DeviceInfo as EsphomeDeviceInfo,
|
||||
EntityCategory as EsphomeEntityCategory,
|
||||
|
@ -64,6 +63,7 @@ from .domain_data import DomainData
|
|||
|
||||
# Import config flow so that it's added to the registry
|
||||
from .entry_data import RuntimeEntryData
|
||||
from .enum_mapper import EsphomeEnumMapper
|
||||
|
||||
CONF_DEVICE_NAME = "device_name"
|
||||
CONF_NOISE_PSK = "noise_psk"
|
||||
|
@ -345,11 +345,19 @@ async def async_setup_entry( # noqa: C901
|
|||
disconnect_cb()
|
||||
entry_data.disconnect_callbacks = []
|
||||
entry_data.available = False
|
||||
# Clear out the states so that we will always dispatch
|
||||
# Mark state as stale so that we will always dispatch
|
||||
# the next state update of that type when the device reconnects
|
||||
for state_keys in entry_data.state.values():
|
||||
state_keys.clear()
|
||||
entry_data.async_update_device_state(hass)
|
||||
entry_data.stale_state = {
|
||||
(type(entity_state), key)
|
||||
for state_dict in entry_data.state.values()
|
||||
for key, entity_state in state_dict.items()
|
||||
}
|
||||
if not hass.is_stopping:
|
||||
# Avoid marking every esphome entity as unavailable on shutdown
|
||||
# since it generates a lot of state changed events and database
|
||||
# writes when we already know we're shutting down and the state
|
||||
# will be cleared anyway.
|
||||
entry_data.async_update_device_state(hass)
|
||||
|
||||
async def on_connect_error(err: Exception) -> None:
|
||||
"""Start reauth flow if appropriate connect error type."""
|
||||
|
@ -682,41 +690,6 @@ def esphome_state_property(
|
|||
return _wrapper
|
||||
|
||||
|
||||
_EnumT = TypeVar("_EnumT", bound=APIIntEnum)
|
||||
_ValT = TypeVar("_ValT")
|
||||
|
||||
|
||||
class EsphomeEnumMapper(Generic[_EnumT, _ValT]):
|
||||
"""Helper class to convert between hass and esphome enum values."""
|
||||
|
||||
def __init__(self, mapping: dict[_EnumT, _ValT]) -> None:
|
||||
"""Construct a EsphomeEnumMapper."""
|
||||
# Add none mapping
|
||||
augmented_mapping: dict[
|
||||
_EnumT | None, _ValT | None
|
||||
] = mapping # type: ignore[assignment]
|
||||
augmented_mapping[None] = None
|
||||
|
||||
self._mapping = augmented_mapping
|
||||
self._inverse: dict[_ValT, _EnumT] = {v: k for k, v in mapping.items()}
|
||||
|
||||
@overload
|
||||
def from_esphome(self, value: _EnumT) -> _ValT:
|
||||
...
|
||||
|
||||
@overload
|
||||
def from_esphome(self, value: _EnumT | None) -> _ValT | None:
|
||||
...
|
||||
|
||||
def from_esphome(self, value: _EnumT | None) -> _ValT | None:
|
||||
"""Convert from an esphome int representation to a hass string."""
|
||||
return self._mapping[value]
|
||||
|
||||
def from_hass(self, value: _ValT) -> _EnumT:
|
||||
"""Convert from a hass string to a esphome int representation."""
|
||||
return self._inverse[value]
|
||||
|
||||
|
||||
ICON_SCHEMA = vol.Schema(cv.icon)
|
||||
|
||||
|
||||
|
|
|
@ -54,12 +54,8 @@ from homeassistant.const import (
|
|||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import (
|
||||
EsphomeEntity,
|
||||
EsphomeEnumMapper,
|
||||
esphome_state_property,
|
||||
platform_async_setup_entry,
|
||||
)
|
||||
from . import EsphomeEntity, esphome_state_property, platform_async_setup_entry
|
||||
from .enum_mapper import EsphomeEnumMapper
|
||||
|
||||
FAN_QUIET = "quiet"
|
||||
|
||||
|
|
|
@ -70,6 +70,10 @@ class RuntimeEntryData:
|
|||
client: APIClient
|
||||
store: Store
|
||||
state: dict[type[EntityState], dict[int, EntityState]] = field(default_factory=dict)
|
||||
# When the disconnect callback is called, we mark all states
|
||||
# as stale so we will always dispatch a state update when the
|
||||
# device reconnects. This is the same format as state_subscriptions.
|
||||
stale_state: set[tuple[type[EntityState], int]] = field(default_factory=set)
|
||||
info: dict[str, dict[int, EntityInfo]] = field(default_factory=dict)
|
||||
|
||||
# A second list of EntityInfo objects
|
||||
|
@ -206,9 +210,11 @@ class RuntimeEntryData:
|
|||
"""Distribute an update of state information to the target."""
|
||||
key = state.key
|
||||
state_type = type(state)
|
||||
stale_state = self.stale_state
|
||||
current_state_by_type = self.state[state_type]
|
||||
current_state = current_state_by_type.get(key, _SENTINEL)
|
||||
if current_state == state:
|
||||
subscription_key = (state_type, key)
|
||||
if current_state == state and subscription_key not in stale_state:
|
||||
_LOGGER.debug(
|
||||
"%s: ignoring duplicate update with and key %s: %s",
|
||||
self.name,
|
||||
|
@ -222,8 +228,8 @@ class RuntimeEntryData:
|
|||
key,
|
||||
state,
|
||||
)
|
||||
stale_state.discard(subscription_key)
|
||||
current_state_by_type[key] = state
|
||||
subscription_key = (state_type, key)
|
||||
if subscription_key in self.state_subscriptions:
|
||||
self.state_subscriptions[subscription_key]()
|
||||
|
||||
|
|
39
homeassistant/components/esphome/enum_mapper.py
Normal file
39
homeassistant/components/esphome/enum_mapper.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
"""Helper class to convert between Home Assistant and ESPHome enum values."""
|
||||
|
||||
from typing import Generic, TypeVar, overload
|
||||
|
||||
from aioesphomeapi import APIIntEnum
|
||||
|
||||
_EnumT = TypeVar("_EnumT", bound=APIIntEnum)
|
||||
_ValT = TypeVar("_ValT")
|
||||
|
||||
|
||||
class EsphomeEnumMapper(Generic[_EnumT, _ValT]):
|
||||
"""Helper class to convert between hass and esphome enum values."""
|
||||
|
||||
def __init__(self, mapping: dict[_EnumT, _ValT]) -> None:
|
||||
"""Construct a EsphomeEnumMapper."""
|
||||
# Add none mapping
|
||||
augmented_mapping: dict[
|
||||
_EnumT | None, _ValT | None
|
||||
] = mapping # type: ignore[assignment]
|
||||
augmented_mapping[None] = None
|
||||
|
||||
self._mapping = augmented_mapping
|
||||
self._inverse: dict[_ValT, _EnumT] = {v: k for k, v in mapping.items()}
|
||||
|
||||
@overload
|
||||
def from_esphome(self, value: _EnumT) -> _ValT:
|
||||
...
|
||||
|
||||
@overload
|
||||
def from_esphome(self, value: _EnumT | None) -> _ValT | None:
|
||||
...
|
||||
|
||||
def from_esphome(self, value: _EnumT | None) -> _ValT | None:
|
||||
"""Convert from an esphome int representation to a hass string."""
|
||||
return self._mapping[value]
|
||||
|
||||
def from_hass(self, value: _ValT) -> _EnumT:
|
||||
"""Convert from a hass string to a esphome int representation."""
|
||||
return self._inverse[value]
|
|
@ -22,12 +22,8 @@ from homeassistant.util.percentage import (
|
|||
ranged_value_to_percentage,
|
||||
)
|
||||
|
||||
from . import (
|
||||
EsphomeEntity,
|
||||
EsphomeEnumMapper,
|
||||
esphome_state_property,
|
||||
platform_async_setup_entry,
|
||||
)
|
||||
from . import EsphomeEntity, esphome_state_property, platform_async_setup_entry
|
||||
from .enum_mapper import EsphomeEnumMapper
|
||||
|
||||
ORDERED_NAMED_FAN_SPEEDS = [FanSpeed.LOW, FanSpeed.MEDIUM, FanSpeed.HIGH]
|
||||
|
||||
|
|
|
@ -14,6 +14,6 @@
|
|||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol"],
|
||||
"requirements": ["aioesphomeapi==13.6.0", "esphome-dashboard-api==1.2.3"],
|
||||
"requirements": ["aioesphomeapi==13.6.1", "esphome-dashboard-api==1.2.3"],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
|
|
@ -24,12 +24,8 @@ from homeassistant.config_entries import ConfigEntry
|
|||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import (
|
||||
EsphomeEntity,
|
||||
EsphomeEnumMapper,
|
||||
esphome_state_property,
|
||||
platform_async_setup_entry,
|
||||
)
|
||||
from . import EsphomeEntity, esphome_state_property, platform_async_setup_entry
|
||||
from .enum_mapper import EsphomeEnumMapper
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue