Remove warnings from compensation (#63691)
This commit is contained in:
parent
2df8ab865f
commit
5f9a351889
2 changed files with 8 additions and 26 deletions
|
@ -1,6 +1,5 @@
|
|||
"""The Compensation integration."""
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
import numpy as np
|
||||
import voluptuous as vol
|
||||
|
@ -84,22 +83,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
# try to get valid coefficients for a polynomial
|
||||
coefficients = None
|
||||
with np.errstate(all="raise"):
|
||||
with warnings.catch_warnings(record=True) as all_warnings:
|
||||
warnings.simplefilter("always")
|
||||
try:
|
||||
coefficients = np.polyfit(x_values, y_values, degree)
|
||||
except FloatingPointError as error:
|
||||
_LOGGER.error(
|
||||
"Setup of %s encountered an error, %s",
|
||||
compensation,
|
||||
error,
|
||||
)
|
||||
for warning in all_warnings:
|
||||
_LOGGER.warning(
|
||||
"Setup of %s encountered a warning, %s",
|
||||
compensation,
|
||||
str(warning.message).lower(),
|
||||
)
|
||||
try:
|
||||
coefficients = np.polyfit(x_values, y_values, degree)
|
||||
except FloatingPointError as error:
|
||||
_LOGGER.error(
|
||||
"Setup of %s encountered an error, %s",
|
||||
compensation,
|
||||
error,
|
||||
)
|
||||
|
||||
if coefficients is not None:
|
||||
data = {
|
||||
|
|
|
@ -151,13 +151,6 @@ async def test_numpy_errors(hass, caplog):
|
|||
"compensation": {
|
||||
"test": {
|
||||
"source": "sensor.uncompensated",
|
||||
"data_points": [
|
||||
[1.0, 1.0],
|
||||
[1.0, 1.0],
|
||||
],
|
||||
},
|
||||
"test2": {
|
||||
"source": "sensor.uncompensated2",
|
||||
"data_points": [
|
||||
[0.0, 1.0],
|
||||
[0.0, 1.0],
|
||||
|
@ -170,8 +163,6 @@ async def test_numpy_errors(hass, caplog):
|
|||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "polyfit may be poorly conditioned" in caplog.text
|
||||
|
||||
assert "invalid value encountered in true_divide" in caplog.text
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue