Use get_ha_sensor_data
method to update glances sensors (#83983)
* Use `get_ha_sensor_data` method to update sensor state * update tests * Use `get_ha_sensor_data` to validate connection * Update test_sensor.py --------- Co-authored-by: Erik Montnemery <erik@montnemery.com>
This commit is contained in:
parent
a616ac2b60
commit
8cbe394028
8 changed files with 224 additions and 189 deletions
|
@ -45,7 +45,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
|||
"""Validate the user input allows us to connect."""
|
||||
api = get_api(hass, data)
|
||||
try:
|
||||
await api.get_data("all")
|
||||
await api.get_ha_sensor_data()
|
||||
except GlancesApiError as err:
|
||||
raise CannotConnect from err
|
||||
|
||||
|
|
|
@ -36,7 +36,6 @@ class GlancesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Get the latest data from the Glances REST API."""
|
||||
try:
|
||||
await self.api.get_data("all")
|
||||
return await self.api.get_ha_sensor_data()
|
||||
except exceptions.GlancesApiError as err:
|
||||
raise UpdateFailed from err
|
||||
return self.api.data
|
||||
|
|
|
@ -15,7 +15,6 @@ from homeassistant.const import (
|
|||
CONF_NAME,
|
||||
PERCENTAGE,
|
||||
REVOLUTIONS_PER_MINUTE,
|
||||
STATE_UNAVAILABLE,
|
||||
Platform,
|
||||
UnitOfInformation,
|
||||
UnitOfTemperature,
|
||||
|
@ -45,8 +44,8 @@ class GlancesSensorEntityDescription(
|
|||
"""Describe Glances sensor entity."""
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
||||
GlancesSensorEntityDescription(
|
||||
SENSOR_TYPES = {
|
||||
("fs", "disk_use_percent"): GlancesSensorEntityDescription(
|
||||
key="disk_use_percent",
|
||||
type="fs",
|
||||
name_suffix="used percent",
|
||||
|
@ -54,7 +53,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:harddisk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("fs", "disk_use"): GlancesSensorEntityDescription(
|
||||
key="disk_use",
|
||||
type="fs",
|
||||
name_suffix="used",
|
||||
|
@ -63,7 +62,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:harddisk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("fs", "disk_free"): GlancesSensorEntityDescription(
|
||||
key="disk_free",
|
||||
type="fs",
|
||||
name_suffix="free",
|
||||
|
@ -72,7 +71,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:harddisk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("mem", "memory_use_percent"): GlancesSensorEntityDescription(
|
||||
key="memory_use_percent",
|
||||
type="mem",
|
||||
name_suffix="RAM used percent",
|
||||
|
@ -80,7 +79,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:memory",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("mem", "memory_use"): GlancesSensorEntityDescription(
|
||||
key="memory_use",
|
||||
type="mem",
|
||||
name_suffix="RAM used",
|
||||
|
@ -89,7 +88,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:memory",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("mem", "memory_free"): GlancesSensorEntityDescription(
|
||||
key="memory_free",
|
||||
type="mem",
|
||||
name_suffix="RAM free",
|
||||
|
@ -98,7 +97,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:memory",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("memswap", "swap_use_percent"): GlancesSensorEntityDescription(
|
||||
key="swap_use_percent",
|
||||
type="memswap",
|
||||
name_suffix="Swap used percent",
|
||||
|
@ -106,7 +105,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:memory",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("memswap", "swap_use"): GlancesSensorEntityDescription(
|
||||
key="swap_use",
|
||||
type="memswap",
|
||||
name_suffix="Swap used",
|
||||
|
@ -115,7 +114,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:memory",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("memswap", "swap_free"): GlancesSensorEntityDescription(
|
||||
key="swap_free",
|
||||
type="memswap",
|
||||
name_suffix="Swap free",
|
||||
|
@ -124,42 +123,42 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:memory",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("load", "processor_load"): GlancesSensorEntityDescription(
|
||||
key="processor_load",
|
||||
type="load",
|
||||
name_suffix="CPU load",
|
||||
icon=CPU_ICON,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("processcount", "process_running"): GlancesSensorEntityDescription(
|
||||
key="process_running",
|
||||
type="processcount",
|
||||
name_suffix="Running",
|
||||
icon=CPU_ICON,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("processcount", "process_total"): GlancesSensorEntityDescription(
|
||||
key="process_total",
|
||||
type="processcount",
|
||||
name_suffix="Total",
|
||||
icon=CPU_ICON,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("processcount", "process_thread"): GlancesSensorEntityDescription(
|
||||
key="process_thread",
|
||||
type="processcount",
|
||||
name_suffix="Thread",
|
||||
icon=CPU_ICON,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("processcount", "process_sleeping"): GlancesSensorEntityDescription(
|
||||
key="process_sleeping",
|
||||
type="processcount",
|
||||
name_suffix="Sleeping",
|
||||
icon=CPU_ICON,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("cpu", "cpu_use_percent"): GlancesSensorEntityDescription(
|
||||
key="cpu_use_percent",
|
||||
type="cpu",
|
||||
name_suffix="CPU used",
|
||||
|
@ -167,7 +166,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon=CPU_ICON,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("sensors", "temperature_core"): GlancesSensorEntityDescription(
|
||||
key="temperature_core",
|
||||
type="sensors",
|
||||
name_suffix="Temperature",
|
||||
|
@ -175,7 +174,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("sensors", "temperature_hdd"): GlancesSensorEntityDescription(
|
||||
key="temperature_hdd",
|
||||
type="sensors",
|
||||
name_suffix="Temperature",
|
||||
|
@ -183,7 +182,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("sensors", "fan_speed"): GlancesSensorEntityDescription(
|
||||
key="fan_speed",
|
||||
type="sensors",
|
||||
name_suffix="Fan speed",
|
||||
|
@ -191,7 +190,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:fan",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("sensors", "battery"): GlancesSensorEntityDescription(
|
||||
key="battery",
|
||||
type="sensors",
|
||||
name_suffix="Charge",
|
||||
|
@ -200,14 +199,14 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:battery",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("docker", "docker_active"): GlancesSensorEntityDescription(
|
||||
key="docker_active",
|
||||
type="docker",
|
||||
name_suffix="Containers active",
|
||||
icon="mdi:docker",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("docker", "docker_cpu_use"): GlancesSensorEntityDescription(
|
||||
key="docker_cpu_use",
|
||||
type="docker",
|
||||
name_suffix="Containers CPU used",
|
||||
|
@ -215,7 +214,7 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:docker",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("docker", "docker_memory_use"): GlancesSensorEntityDescription(
|
||||
key="docker_memory_use",
|
||||
type="docker",
|
||||
name_suffix="Containers RAM used",
|
||||
|
@ -224,21 +223,21 @@ SENSOR_TYPES: tuple[GlancesSensorEntityDescription, ...] = (
|
|||
icon="mdi:docker",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("raid", "used"): GlancesSensorEntityDescription(
|
||||
key="used",
|
||||
type="raid",
|
||||
name_suffix="Raid used",
|
||||
icon="mdi:harddisk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GlancesSensorEntityDescription(
|
||||
("raid", "available"): GlancesSensorEntityDescription(
|
||||
key="available",
|
||||
type="raid",
|
||||
name_suffix="Raid available",
|
||||
icon="mdi:harddisk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@ -266,64 +265,40 @@ async def async_setup_entry(
|
|||
entity_id, new_unique_id=f"{config_entry.entry_id}-{new_key}"
|
||||
)
|
||||
|
||||
for description in SENSOR_TYPES:
|
||||
if description.type == "fs":
|
||||
# fs will provide a list of disks attached
|
||||
for disk in coordinator.data[description.type]:
|
||||
_migrate_old_unique_ids(
|
||||
hass,
|
||||
f"{coordinator.host}-{name} {disk['mnt_point']} {description.name_suffix}",
|
||||
f"{disk['mnt_point']}-{description.key}",
|
||||
)
|
||||
entities.append(
|
||||
GlancesSensor(
|
||||
coordinator,
|
||||
name,
|
||||
disk["mnt_point"],
|
||||
description,
|
||||
)
|
||||
)
|
||||
elif description.type == "sensors":
|
||||
# sensors will provide temp for different devices
|
||||
for sensor in coordinator.data[description.type]:
|
||||
if sensor["type"] == description.key:
|
||||
for sensor_type, sensors in coordinator.data.items():
|
||||
if sensor_type in ["fs", "sensors", "raid"]:
|
||||
for sensor_label, params in sensors.items():
|
||||
for param in params:
|
||||
sensor_description = SENSOR_TYPES[(sensor_type, param)]
|
||||
_migrate_old_unique_ids(
|
||||
hass,
|
||||
f"{coordinator.host}-{name} {sensor['label']} {description.name_suffix}",
|
||||
f"{sensor['label']}-{description.key}",
|
||||
f"{coordinator.host}-{name} {sensor_label} {sensor_description.name_suffix}",
|
||||
f"{sensor_label}-{sensor_description.key}",
|
||||
)
|
||||
entities.append(
|
||||
GlancesSensor(
|
||||
coordinator,
|
||||
name,
|
||||
sensor["label"],
|
||||
description,
|
||||
sensor_label,
|
||||
sensor_description,
|
||||
)
|
||||
)
|
||||
elif description.type == "raid":
|
||||
for raid_device in coordinator.data[description.type]:
|
||||
else:
|
||||
for sensor in sensors:
|
||||
sensor_description = SENSOR_TYPES[(sensor_type, sensor)]
|
||||
_migrate_old_unique_ids(
|
||||
hass,
|
||||
f"{coordinator.host}-{name} {raid_device} {description.name_suffix}",
|
||||
f"{raid_device}-{description.key}",
|
||||
f"{coordinator.host}-{name} {sensor_description.name_suffix}",
|
||||
f"-{sensor_description.key}",
|
||||
)
|
||||
entities.append(
|
||||
GlancesSensor(coordinator, name, raid_device, description)
|
||||
GlancesSensor(
|
||||
coordinator,
|
||||
name,
|
||||
"",
|
||||
sensor_description,
|
||||
)
|
||||
)
|
||||
elif coordinator.data[description.type]:
|
||||
_migrate_old_unique_ids(
|
||||
hass,
|
||||
f"{coordinator.host}-{name} {description.name_suffix}",
|
||||
f"-{description.key}",
|
||||
)
|
||||
entities.append(
|
||||
GlancesSensor(
|
||||
coordinator,
|
||||
name,
|
||||
"",
|
||||
description,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
@ -354,114 +329,10 @@ class GlancesSensor(CoordinatorEntity[GlancesDataUpdateCoordinator], SensorEntit
|
|||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}-{sensor_name_prefix}-{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType: # noqa: C901
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the resources."""
|
||||
if (value := self.coordinator.data) is None:
|
||||
return None
|
||||
state: StateType = None
|
||||
if self.entity_description.type == "fs":
|
||||
for var in value["fs"]:
|
||||
if var["mnt_point"] == self._sensor_name_prefix:
|
||||
disk = var
|
||||
break
|
||||
if self.entity_description.key == "disk_free":
|
||||
try:
|
||||
state = round(disk["free"] / 1024**3, 1)
|
||||
except KeyError:
|
||||
state = round(
|
||||
(disk["size"] - disk["used"]) / 1024**3,
|
||||
1,
|
||||
)
|
||||
elif self.entity_description.key == "disk_use":
|
||||
state = round(disk["used"] / 1024**3, 1)
|
||||
elif self.entity_description.key == "disk_use_percent":
|
||||
state = disk["percent"]
|
||||
elif self.entity_description.key == "battery":
|
||||
for sensor in value["sensors"]:
|
||||
if (
|
||||
sensor["type"] == "battery"
|
||||
and sensor["label"] == self._sensor_name_prefix
|
||||
):
|
||||
state = sensor["value"]
|
||||
elif self.entity_description.key == "fan_speed":
|
||||
for sensor in value["sensors"]:
|
||||
if (
|
||||
sensor["type"] == "fan_speed"
|
||||
and sensor["label"] == self._sensor_name_prefix
|
||||
):
|
||||
state = sensor["value"]
|
||||
elif self.entity_description.key == "temperature_core":
|
||||
for sensor in value["sensors"]:
|
||||
if (
|
||||
sensor["type"] == "temperature_core"
|
||||
and sensor["label"] == self._sensor_name_prefix
|
||||
):
|
||||
state = sensor["value"]
|
||||
elif self.entity_description.key == "temperature_hdd":
|
||||
for sensor in value["sensors"]:
|
||||
if (
|
||||
sensor["type"] == "temperature_hdd"
|
||||
and sensor["label"] == self._sensor_name_prefix
|
||||
):
|
||||
state = sensor["value"]
|
||||
elif self.entity_description.key == "memory_use_percent":
|
||||
state = value["mem"]["percent"]
|
||||
elif self.entity_description.key == "memory_use":
|
||||
state = round(value["mem"]["used"] / 1024**2, 1)
|
||||
elif self.entity_description.key == "memory_free":
|
||||
state = round(value["mem"]["free"] / 1024**2, 1)
|
||||
elif self.entity_description.key == "swap_use_percent":
|
||||
state = value["memswap"]["percent"]
|
||||
elif self.entity_description.key == "swap_use":
|
||||
state = round(value["memswap"]["used"] / 1024**3, 1)
|
||||
elif self.entity_description.key == "swap_free":
|
||||
state = round(value["memswap"]["free"] / 1024**3, 1)
|
||||
elif self.entity_description.key == "processor_load":
|
||||
# Windows systems don't provide load details
|
||||
try:
|
||||
state = value["load"]["min15"]
|
||||
except KeyError:
|
||||
state = value["cpu"]["total"]
|
||||
elif self.entity_description.key == "process_running":
|
||||
state = value["processcount"]["running"]
|
||||
elif self.entity_description.key == "process_total":
|
||||
state = value["processcount"]["total"]
|
||||
elif self.entity_description.key == "process_thread":
|
||||
state = value["processcount"]["thread"]
|
||||
elif self.entity_description.key == "process_sleeping":
|
||||
state = value["processcount"]["sleeping"]
|
||||
elif self.entity_description.key == "cpu_use_percent":
|
||||
state = value["quicklook"]["cpu"]
|
||||
elif self.entity_description.key == "docker_active":
|
||||
count = 0
|
||||
try:
|
||||
for container in value["docker"]["containers"]:
|
||||
if container["Status"] == "running" or "Up" in container["Status"]:
|
||||
count += 1
|
||||
state = count
|
||||
except KeyError:
|
||||
state = count
|
||||
elif self.entity_description.key == "docker_cpu_use":
|
||||
cpu_use = 0.0
|
||||
try:
|
||||
for container in value["docker"]["containers"]:
|
||||
if container["Status"] == "running" or "Up" in container["Status"]:
|
||||
cpu_use += container["cpu"]["total"]
|
||||
state = round(cpu_use, 1)
|
||||
except KeyError:
|
||||
state = STATE_UNAVAILABLE
|
||||
elif self.entity_description.key == "docker_memory_use":
|
||||
mem_use = 0.0
|
||||
try:
|
||||
for container in value["docker"]["containers"]:
|
||||
if container["Status"] == "running" or "Up" in container["Status"]:
|
||||
mem_use += container["memory"]["usage"]
|
||||
state = round(mem_use / 1024**2, 1)
|
||||
except KeyError:
|
||||
state = STATE_UNAVAILABLE
|
||||
elif self.entity_description.type == "raid":
|
||||
for raid_device, raid in value["raid"].items():
|
||||
if raid_device == self._sensor_name_prefix:
|
||||
state = raid[self.entity_description.key]
|
||||
value = self.coordinator.data[self.entity_description.type]
|
||||
|
||||
return state
|
||||
if isinstance(value.get(self._sensor_name_prefix), dict):
|
||||
return value[self._sensor_name_prefix][self.entity_description.key]
|
||||
return value[self.entity_description.key]
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
"""Tests for Glances."""
|
||||
|
||||
MOCK_USER_INPUT = {
|
||||
from typing import Any
|
||||
|
||||
MOCK_USER_INPUT: dict[str, Any] = {
|
||||
"host": "0.0.0.0",
|
||||
"username": "username",
|
||||
"password": "password",
|
||||
|
@ -30,6 +32,85 @@ MOCK_DATA = {
|
|||
"key": "disk_name",
|
||||
},
|
||||
],
|
||||
"docker": {
|
||||
"containers": [
|
||||
{
|
||||
"key": "name",
|
||||
"name": "container1",
|
||||
"Status": "running",
|
||||
"cpu": {"total": 50.94973493230174},
|
||||
"cpu_percent": 50.94973493230174,
|
||||
"memory": {
|
||||
"usage": 1120321536,
|
||||
"limit": 3976318976,
|
||||
"rss": 480641024,
|
||||
"cache": 580915200,
|
||||
"max_usage": 1309597696,
|
||||
},
|
||||
"memory_usage": 539406336,
|
||||
},
|
||||
{
|
||||
"key": "name",
|
||||
"name": "container2",
|
||||
"Status": "running",
|
||||
"cpu": {"total": 26.23567931034483},
|
||||
"cpu_percent": 26.23567931034483,
|
||||
"memory": {
|
||||
"usage": 85139456,
|
||||
"limit": 3976318976,
|
||||
"rss": 33677312,
|
||||
"cache": 35012608,
|
||||
"max_usage": 87650304,
|
||||
},
|
||||
"memory_usage": 50126848,
|
||||
},
|
||||
]
|
||||
},
|
||||
"fs": [
|
||||
{
|
||||
"device_name": "/dev/sda8",
|
||||
"fs_type": "ext4",
|
||||
"mnt_point": "/ssl",
|
||||
"size": 511320748032,
|
||||
"used": 32910458880,
|
||||
"free": 457917374464,
|
||||
"percent": 6.7,
|
||||
"key": "mnt_point",
|
||||
},
|
||||
{
|
||||
"device_name": "/dev/sda8",
|
||||
"fs_type": "ext4",
|
||||
"mnt_point": "/media",
|
||||
"size": 511320748032,
|
||||
"used": 32910458880,
|
||||
"free": 457917374464,
|
||||
"percent": 6.7,
|
||||
"key": "mnt_point",
|
||||
},
|
||||
],
|
||||
"mem": {
|
||||
"total": 3976318976,
|
||||
"available": 2878337024,
|
||||
"percent": 27.6,
|
||||
"used": 1097981952,
|
||||
"free": 2878337024,
|
||||
"active": 567971840,
|
||||
"inactive": 1679704064,
|
||||
"buffers": 149807104,
|
||||
"cached": 1334816768,
|
||||
"shared": 1499136,
|
||||
},
|
||||
"sensors": [
|
||||
{
|
||||
"label": "cpu_thermal 1",
|
||||
"value": 59,
|
||||
"warning": None,
|
||||
"critical": None,
|
||||
"unit": "C",
|
||||
"type": "temperature_core",
|
||||
"key": "label",
|
||||
}
|
||||
],
|
||||
"system": {
|
||||
"os_name": "Linux",
|
||||
"hostname": "fedora-35",
|
||||
|
@ -40,3 +121,17 @@ MOCK_DATA = {
|
|||
},
|
||||
"uptime": "3 days, 10:25:20",
|
||||
}
|
||||
|
||||
HA_SENSOR_DATA: dict[str, Any] = {
|
||||
"fs": {
|
||||
"/ssl": {"disk_use": 30.7, "disk_use_percent": 6.7, "disk_free": 426.5},
|
||||
"/media": {"disk_use": 30.7, "disk_use_percent": 6.7, "disk_free": 426.5},
|
||||
},
|
||||
"sensors": {"cpu_thermal 1": {"temperature_core": 59}},
|
||||
"mem": {
|
||||
"memory_use_percent": 27.6,
|
||||
"memory_use": 1047.1,
|
||||
"memory_free": 2745.0,
|
||||
},
|
||||
"docker": {"docker_active": 2, "docker_cpu_use": 77.2, "docker_memory_use": 1149.6},
|
||||
}
|
||||
|
|
|
@ -3,13 +3,14 @@ from unittest.mock import AsyncMock, patch
|
|||
|
||||
import pytest
|
||||
|
||||
from . import MOCK_DATA
|
||||
from . import HA_SENSOR_DATA
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_api():
|
||||
"""Mock glances api."""
|
||||
with patch("homeassistant.components.glances.Glances") as mock_api:
|
||||
mock_api.return_value.get_data = AsyncMock(return_value=None)
|
||||
mock_api.return_value.data.return_value = MOCK_DATA
|
||||
mock_api.return_value.get_ha_sensor_data = AsyncMock(
|
||||
return_value=HA_SENSOR_DATA
|
||||
)
|
||||
yield mock_api
|
||||
|
|
|
@ -42,7 +42,7 @@ async def test_form(hass: HomeAssistant) -> None:
|
|||
async def test_form_cannot_connect(hass: HomeAssistant, mock_api: MagicMock) -> None:
|
||||
"""Test to return error if we cannot connect."""
|
||||
|
||||
mock_api.return_value.get_data.side_effect = GlancesApiConnectionError
|
||||
mock_api.return_value.get_ha_sensor_data.side_effect = GlancesApiConnectionError
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
glances.DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
|
|
|
@ -29,7 +29,7 @@ async def test_conn_error(hass: HomeAssistant, mock_api: MagicMock) -> None:
|
|||
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
mock_api.return_value.get_data.side_effect = GlancesApiConnectionError
|
||||
mock_api.return_value.get_ha_sensor_data.side_effect = GlancesApiConnectionError
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
assert entry.state is ConfigEntryState.SETUP_RETRY
|
||||
|
||||
|
|
69
tests/components/glances/test_sensor.py
Normal file
69
tests/components/glances/test_sensor.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
"""Tests for glances sensors."""
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.glances.const import DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import HA_SENSOR_DATA, MOCK_USER_INPUT
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_sensor_states(hass: HomeAssistant) -> None:
|
||||
"""Test sensor states are correctly collected from library."""
|
||||
|
||||
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
|
||||
if state := hass.states.get("sensor.0_0_0_0_ssl_disk_use"):
|
||||
assert state.state == HA_SENSOR_DATA["fs"]["/ssl"]["disk_use"]
|
||||
|
||||
if state := hass.states.get("sensor.0_0_0_0_cpu_thermal_1"):
|
||||
assert state.state == HA_SENSOR_DATA["sensors"]["cpu_thermal 1"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("object_id", "old_unique_id", "new_unique_id"),
|
||||
[
|
||||
(
|
||||
"glances_ssl_used_percent",
|
||||
"0.0.0.0-Glances /ssl used percent",
|
||||
"/ssl-disk_use_percent",
|
||||
),
|
||||
(
|
||||
"glances_cpu_thermal_1_temperature",
|
||||
"0.0.0.0-Glances cpu_thermal 1 Temperature",
|
||||
"cpu_thermal 1-temperature_core",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_migrate_unique_id(
|
||||
hass: HomeAssistant, object_id: str, old_unique_id: str, new_unique_id: str
|
||||
):
|
||||
"""Test unique id migration."""
|
||||
old_config_data = {**MOCK_USER_INPUT, "name": "Glances"}
|
||||
entry = MockConfigEntry(domain=DOMAIN, data=old_config_data)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
entity: er.RegistryEntry = ent_reg.async_get_or_create(
|
||||
suggested_object_id=object_id,
|
||||
disabled_by=None,
|
||||
domain=SENSOR_DOMAIN,
|
||||
platform=DOMAIN,
|
||||
unique_id=old_unique_id,
|
||||
config_entry=entry,
|
||||
)
|
||||
assert entity.unique_id == old_unique_id
|
||||
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_migrated = ent_reg.async_get(entity.entity_id)
|
||||
assert entity_migrated
|
||||
assert entity_migrated.unique_id == f"{entry.entry_id}-{new_unique_id}"
|
Loading…
Add table
Reference in a new issue