Upgrade ruff to 0.0.285 (#98647)
This commit is contained in:
parent
f318063a77
commit
3094991236
39 changed files with 109 additions and 119 deletions
|
@ -1,6 +1,6 @@
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.0.280
|
rev: v0.0.285
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
|
|
|
@ -100,7 +100,7 @@ class AmbiclimateFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||||
try:
|
try:
|
||||||
token_info = await oauth.get_access_token(code)
|
token_info = await oauth.get_access_token(code)
|
||||||
except ambiclimate.AmbiclimateOauthError:
|
except ambiclimate.AmbiclimateOauthError:
|
||||||
_LOGGER.error("Failed to get access token", exc_info=True)
|
_LOGGER.exception("Failed to get access token")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
store = Store(self.hass, STORAGE_VERSION, STORAGE_KEY)
|
store = Store(self.hass, STORAGE_VERSION, STORAGE_KEY)
|
||||||
|
|
|
@ -349,11 +349,10 @@ class HaScanner(BaseHaScanner):
|
||||||
try:
|
try:
|
||||||
await self._async_start()
|
await self._async_start()
|
||||||
except ScannerStartError as ex:
|
except ScannerStartError as ex:
|
||||||
_LOGGER.error(
|
_LOGGER.exception(
|
||||||
"%s: Failed to restart Bluetooth scanner: %s",
|
"%s: Failed to restart Bluetooth scanner: %s",
|
||||||
self.name,
|
self.name,
|
||||||
ex,
|
ex,
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _async_reset_adapter(self) -> None:
|
async def _async_reset_adapter(self) -> None:
|
||||||
|
|
|
@ -246,8 +246,8 @@ async def async_get_still_stream(
|
||||||
await response.write(
|
await response.write(
|
||||||
bytes(
|
bytes(
|
||||||
"--frameboundary\r\n"
|
"--frameboundary\r\n"
|
||||||
"Content-Type: {}\r\n"
|
f"Content-Type: {content_type}\r\n"
|
||||||
"Content-Length: {}\r\n\r\n".format(content_type, len(img_bytes)),
|
f"Content-Length: {len(img_bytes)}\r\n\r\n",
|
||||||
"utf-8",
|
"utf-8",
|
||||||
)
|
)
|
||||||
+ img_bytes
|
+ img_bytes
|
||||||
|
|
|
@ -196,11 +196,10 @@ def async_log_errors(
|
||||||
)
|
)
|
||||||
except DenonAvrError as err:
|
except DenonAvrError as err:
|
||||||
available = False
|
available = False
|
||||||
_LOGGER.error(
|
_LOGGER.exception(
|
||||||
"Error %s occurred in method %s for Denon AVR receiver",
|
"Error %s occurred in method %s for Denon AVR receiver",
|
||||||
err,
|
err,
|
||||||
func.__name__,
|
func.__name__,
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
if available and not self.available:
|
if available and not self.available:
|
||||||
|
|
|
@ -427,9 +427,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||||
_LOGGER.error("Error talking to the dashboard: %s", err)
|
_LOGGER.error("Error talking to the dashboard: %s", err)
|
||||||
return False
|
return False
|
||||||
except json.JSONDecodeError as err:
|
except json.JSONDecodeError as err:
|
||||||
_LOGGER.error(
|
_LOGGER.exception("Error parsing response from dashboard: %s", err)
|
||||||
"Error parsing response from dashboard: %s", err, exc_info=True
|
|
||||||
)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self._noise_psk = noise_psk
|
self._noise_psk = noise_psk
|
||||||
|
|
|
@ -312,7 +312,7 @@ class RuntimeEntryData:
|
||||||
and subscription_key not in stale_state
|
and subscription_key not in stale_state
|
||||||
and state_type is not CameraState
|
and state_type is not CameraState
|
||||||
and not (
|
and not (
|
||||||
state_type is SensorState # pylint: disable=unidiomatic-typecheck
|
state_type is SensorState # noqa: E721
|
||||||
and (platform_info := self.info.get(SensorInfo))
|
and (platform_info := self.info.get(SensorInfo))
|
||||||
and (entity_info := platform_info.get(state.key))
|
and (entity_info := platform_info.get(state.key))
|
||||||
and (cast(SensorInfo, entity_info)).force_update
|
and (cast(SensorInfo, entity_info)).force_update
|
||||||
|
|
|
@ -126,7 +126,7 @@ class FreeboxHomeEntity(Entity):
|
||||||
)
|
)
|
||||||
if not node:
|
if not node:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"The Freebox Home device has no node for: " + ep_type + "/" + name
|
"The Freebox Home device has no node for: %s/%s", ep_type, name
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
return node.get("value")
|
return node.get("value")
|
||||||
|
|
|
@ -785,27 +785,20 @@ class AvmWrapper(FritzBoxTools):
|
||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
except FritzSecurityError:
|
except FritzSecurityError:
|
||||||
_LOGGER.error(
|
_LOGGER.exception(
|
||||||
(
|
"Authorization Error: Please check the provided credentials and"
|
||||||
"Authorization Error: Please check the provided credentials and"
|
" verify that you can log into the web interface"
|
||||||
" verify that you can log into the web interface"
|
|
||||||
),
|
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
except FRITZ_EXCEPTIONS:
|
except FRITZ_EXCEPTIONS:
|
||||||
_LOGGER.error(
|
_LOGGER.exception(
|
||||||
"Service/Action Error: cannot execute service %s with action %s",
|
"Service/Action Error: cannot execute service %s with action %s",
|
||||||
service_name,
|
service_name,
|
||||||
action_name,
|
action_name,
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
except FritzConnectionException:
|
except FritzConnectionException:
|
||||||
_LOGGER.error(
|
_LOGGER.exception(
|
||||||
(
|
"Connection Error: Please check the device is properly configured"
|
||||||
"Connection Error: Please check the device is properly configured"
|
" for remote login"
|
||||||
" for remote login"
|
|
||||||
),
|
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ class LazyEventPartialState:
|
||||||
self.context_parent_id_bin: bytes | None = self.row.context_parent_id_bin
|
self.context_parent_id_bin: bytes | None = self.row.context_parent_id_bin
|
||||||
# We need to explicitly check for the row is EventAsRow as the unhappy path
|
# We need to explicitly check for the row is EventAsRow as the unhappy path
|
||||||
# to fetch row.data for Row is very expensive
|
# to fetch row.data for Row is very expensive
|
||||||
if type(row) is EventAsRow: # pylint: disable=unidiomatic-typecheck
|
if type(row) is EventAsRow: # noqa: E721
|
||||||
# If its an EventAsRow we can avoid the whole
|
# If its an EventAsRow we can avoid the whole
|
||||||
# json decode process as we already have the data
|
# json decode process as we already have the data
|
||||||
self.data = row.data
|
self.data = row.data
|
||||||
|
|
|
@ -361,7 +361,7 @@ class EnsureJobAfterCooldown:
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
pass
|
pass
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Error cleaning up task", exc_info=True)
|
_LOGGER.exception("Error cleaning up task")
|
||||||
|
|
||||||
|
|
||||||
class MQTT:
|
class MQTT:
|
||||||
|
|
|
@ -294,13 +294,12 @@ class EntityTopicState:
|
||||||
try:
|
try:
|
||||||
entity.async_write_ha_state()
|
entity.async_write_ha_state()
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.error(
|
_LOGGER.exception(
|
||||||
"Exception raised when updating state of %s, topic: "
|
"Exception raised when updating state of %s, topic: "
|
||||||
"'%s' with payload: %s",
|
"'%s' with payload: %s",
|
||||||
entity.entity_id,
|
entity.entity_id,
|
||||||
msg.topic,
|
msg.topic,
|
||||||
msg.payload,
|
msg.payload,
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
|
|
|
@ -18,7 +18,7 @@ def ulid_to_bytes_or_none(ulid: str | None) -> bytes | None:
|
||||||
try:
|
try:
|
||||||
return ulid_to_bytes(ulid)
|
return ulid_to_bytes(ulid)
|
||||||
except ValueError as ex:
|
except ValueError as ex:
|
||||||
_LOGGER.error("Error converting ulid %s to bytes: %s", ulid, ex, exc_info=True)
|
_LOGGER.exception("Error converting ulid %s to bytes: %s", ulid, ex)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@ -29,9 +29,7 @@ def bytes_to_ulid_or_none(_bytes: bytes | None) -> str | None:
|
||||||
try:
|
try:
|
||||||
return bytes_to_ulid(_bytes)
|
return bytes_to_ulid(_bytes)
|
||||||
except ValueError as ex:
|
except ValueError as ex:
|
||||||
_LOGGER.error(
|
_LOGGER.exception("Error converting bytes %s to ulid: %s", _bytes, ex)
|
||||||
"Error converting bytes %s to ulid: %s", _bytes, ex, exc_info=True
|
|
||||||
)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -132,7 +132,7 @@ def session_scope(
|
||||||
need_rollback = True
|
need_rollback = True
|
||||||
session.commit()
|
session.commit()
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
_LOGGER.error("Error executing query: %s", err, exc_info=True)
|
_LOGGER.exception("Error executing query: %s", err)
|
||||||
if need_rollback:
|
if need_rollback:
|
||||||
session.rollback()
|
session.rollback()
|
||||||
if not exception_filter or not exception_filter(err):
|
if not exception_filter or not exception_filter(err):
|
||||||
|
|
|
@ -459,9 +459,8 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||||
)
|
)
|
||||||
self._brightness = None
|
self._brightness = None
|
||||||
except ValueError:
|
except ValueError:
|
||||||
_LOGGER.error(
|
_LOGGER.exception(
|
||||||
"Template must supply an integer brightness from 0-255, or 'None'",
|
"Template must supply an integer brightness from 0-255, or 'None'"
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
self._brightness = None
|
self._brightness = None
|
||||||
|
|
||||||
|
@ -559,12 +558,9 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||||
)
|
)
|
||||||
self._temperature = None
|
self._temperature = None
|
||||||
except ValueError:
|
except ValueError:
|
||||||
_LOGGER.error(
|
_LOGGER.exception(
|
||||||
(
|
"Template must supply an integer temperature within the range for"
|
||||||
"Template must supply an integer temperature within the range for"
|
" this light, or 'None'"
|
||||||
" this light, or 'None'"
|
|
||||||
),
|
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
self._temperature = None
|
self._temperature = None
|
||||||
|
|
||||||
|
@ -620,12 +616,9 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||||
return
|
return
|
||||||
self._max_mireds = int(render)
|
self._max_mireds = int(render)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
_LOGGER.error(
|
_LOGGER.exception(
|
||||||
(
|
"Template must supply an integer temperature within the range for"
|
||||||
"Template must supply an integer temperature within the range for"
|
" this light, or 'None'"
|
||||||
" this light, or 'None'"
|
|
||||||
),
|
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
self._max_mireds = None
|
self._max_mireds = None
|
||||||
|
|
||||||
|
@ -638,12 +631,9 @@ class LightTemplate(TemplateEntity, LightEntity):
|
||||||
return
|
return
|
||||||
self._min_mireds = int(render)
|
self._min_mireds = int(render)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
_LOGGER.error(
|
_LOGGER.exception(
|
||||||
(
|
"Template must supply an integer temperature within the range for"
|
||||||
"Template must supply an integer temperature within the range for"
|
" this light, or 'None'"
|
||||||
" this light, or 'None'"
|
|
||||||
),
|
|
||||||
exc_info=True,
|
|
||||||
)
|
)
|
||||||
self._min_mireds = None
|
self._min_mireds = None
|
||||||
|
|
||||||
|
|
|
@ -122,10 +122,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
try:
|
try:
|
||||||
await hass.async_add_executor_job(manager.authenticate)
|
await hass.async_add_executor_job(manager.authenticate)
|
||||||
except upcloud_api.UpCloudAPIError:
|
except upcloud_api.UpCloudAPIError:
|
||||||
_LOGGER.error("Authentication failed", exc_info=True)
|
_LOGGER.exception("Authentication failed")
|
||||||
return False
|
return False
|
||||||
except requests.exceptions.RequestException as err:
|
except requests.exceptions.RequestException as err:
|
||||||
_LOGGER.error("Failed to connect", exc_info=True)
|
_LOGGER.exception("Failed to connect")
|
||||||
raise ConfigEntryNotReady from err
|
raise ConfigEntryNotReady from err
|
||||||
|
|
||||||
if entry.options.get(CONF_SCAN_INTERVAL):
|
if entry.options.get(CONF_SCAN_INTERVAL):
|
||||||
|
|
|
@ -164,12 +164,12 @@ class ActiveConnection:
|
||||||
if (
|
if (
|
||||||
# Not using isinstance as we don't care about children
|
# Not using isinstance as we don't care about children
|
||||||
# as these are always coming from JSON
|
# as these are always coming from JSON
|
||||||
type(msg) is not dict # pylint: disable=unidiomatic-typecheck
|
type(msg) is not dict # noqa: E721
|
||||||
or (
|
or (
|
||||||
not (cur_id := msg.get("id"))
|
not (cur_id := msg.get("id"))
|
||||||
or type(cur_id) is not int # pylint: disable=unidiomatic-typecheck
|
or type(cur_id) is not int # noqa: E721
|
||||||
or not (type_ := msg.get("type"))
|
or not (type_ := msg.get("type"))
|
||||||
or type(type_) is not str # pylint: disable=unidiomatic-typecheck
|
or type(type_) is not str # noqa: E721
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
self.logger.error("Received invalid command: %s", msg)
|
self.logger.error("Received invalid command: %s", msg)
|
||||||
|
|
|
@ -115,8 +115,8 @@ class WorldTidesInfoSensor(SensorEntity):
|
||||||
start = int(time.time())
|
start = int(time.time())
|
||||||
resource = (
|
resource = (
|
||||||
"https://www.worldtides.info/api?extremes&length=86400"
|
"https://www.worldtides.info/api?extremes&length=86400"
|
||||||
"&key={}&lat={}&lon={}&start={}"
|
f"&key={self._key}&lat={self._lat}&lon={self._lon}&start={start}"
|
||||||
).format(self._key, self._lat, self._lon, start)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.data = requests.get(resource, timeout=10).json()
|
self.data = requests.get(resource, timeout=10).json()
|
||||||
|
|
|
@ -17,7 +17,7 @@ async def _async_has_devices(hass: HomeAssistant) -> bool:
|
||||||
devices = await pyzerproc.discover()
|
devices = await pyzerproc.discover()
|
||||||
return len(devices) > 0
|
return len(devices) > 0
|
||||||
except pyzerproc.ZerprocException:
|
except pyzerproc.ZerprocException:
|
||||||
_LOGGER.error("Unable to discover nearby Zerproc devices", exc_info=True)
|
_LOGGER.exception("Unable to discover nearby Zerproc devices")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -586,7 +586,7 @@ def string(value: Any) -> str:
|
||||||
raise vol.Invalid("string value is None")
|
raise vol.Invalid("string value is None")
|
||||||
|
|
||||||
# This is expected to be the most common case, so check it first.
|
# This is expected to be the most common case, so check it first.
|
||||||
if type(value) is str: # pylint: disable=unidiomatic-typecheck
|
if type(value) is str: # noqa: E721
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if isinstance(value, template_helper.ResultWrapper):
|
if isinstance(value, template_helper.ResultWrapper):
|
||||||
|
|
|
@ -597,7 +597,7 @@ async def async_get_all_descriptions(
|
||||||
ints_or_excs = await async_get_integrations(hass, missing)
|
ints_or_excs = await async_get_integrations(hass, missing)
|
||||||
integrations: list[Integration] = []
|
integrations: list[Integration] = []
|
||||||
for domain, int_or_exc in ints_or_excs.items():
|
for domain, int_or_exc in ints_or_excs.items():
|
||||||
if type(int_or_exc) is Integration: # pylint: disable=unidiomatic-typecheck
|
if type(int_or_exc) is Integration: # noqa: E721
|
||||||
integrations.append(int_or_exc)
|
integrations.append(int_or_exc)
|
||||||
continue
|
continue
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
|
|
@ -42,7 +42,7 @@ def json_loads_array(__obj: bytes | bytearray | memoryview | str) -> JsonArrayTy
|
||||||
"""Parse JSON data and ensure result is a list."""
|
"""Parse JSON data and ensure result is a list."""
|
||||||
value: JsonValueType = json_loads(__obj)
|
value: JsonValueType = json_loads(__obj)
|
||||||
# Avoid isinstance overhead as we are not interested in list subclasses
|
# Avoid isinstance overhead as we are not interested in list subclasses
|
||||||
if type(value) is list: # pylint: disable=unidiomatic-typecheck
|
if type(value) is list: # noqa: E721
|
||||||
return value
|
return value
|
||||||
raise ValueError(f"Expected JSON to be parsed as a list got {type(value)}")
|
raise ValueError(f"Expected JSON to be parsed as a list got {type(value)}")
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ def json_loads_object(__obj: bytes | bytearray | memoryview | str) -> JsonObject
|
||||||
"""Parse JSON data and ensure result is a dictionary."""
|
"""Parse JSON data and ensure result is a dictionary."""
|
||||||
value: JsonValueType = json_loads(__obj)
|
value: JsonValueType = json_loads(__obj)
|
||||||
# Avoid isinstance overhead as we are not interested in dict subclasses
|
# Avoid isinstance overhead as we are not interested in dict subclasses
|
||||||
if type(value) is dict: # pylint: disable=unidiomatic-typecheck
|
if type(value) is dict: # noqa: E721
|
||||||
return value
|
return value
|
||||||
raise ValueError(f"Expected JSON to be parsed as a dict got {type(value)}")
|
raise ValueError(f"Expected JSON to be parsed as a dict got {type(value)}")
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ def load_json_array(
|
||||||
default = []
|
default = []
|
||||||
value: JsonValueType = load_json(filename, default=default)
|
value: JsonValueType = load_json(filename, default=default)
|
||||||
# Avoid isinstance overhead as we are not interested in list subclasses
|
# Avoid isinstance overhead as we are not interested in list subclasses
|
||||||
if type(value) is list: # pylint: disable=unidiomatic-typecheck
|
if type(value) is list: # noqa: E721
|
||||||
return value
|
return value
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"Expected JSON to be parsed as a list got %s in: %s", {type(value)}, filename
|
"Expected JSON to be parsed as a list got %s in: %s", {type(value)}, filename
|
||||||
|
@ -108,7 +108,7 @@ def load_json_object(
|
||||||
default = {}
|
default = {}
|
||||||
value: JsonValueType = load_json(filename, default=default)
|
value: JsonValueType = load_json(filename, default=default)
|
||||||
# Avoid isinstance overhead as we are not interested in dict subclasses
|
# Avoid isinstance overhead as we are not interested in dict subclasses
|
||||||
if type(value) is dict: # pylint: disable=unidiomatic-typecheck
|
if type(value) is dict: # noqa: E721
|
||||||
return value
|
return value
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"Expected JSON to be parsed as a dict got %s in: %s", {type(value)}, filename
|
"Expected JSON to be parsed as a dict got %s in: %s", {type(value)}, filename
|
||||||
|
|
|
@ -266,6 +266,7 @@ disable = [
|
||||||
"missing-module-docstring", # D100
|
"missing-module-docstring", # D100
|
||||||
"multiple-imports", #E401
|
"multiple-imports", #E401
|
||||||
"singleton-comparison", # E711, E712
|
"singleton-comparison", # E711, E712
|
||||||
|
"subprocess-run-check", # PLW1510
|
||||||
"superfluous-parens", # UP034
|
"superfluous-parens", # UP034
|
||||||
"ungrouped-imports", # I001
|
"ungrouped-imports", # I001
|
||||||
"unidiomatic-typecheck", # E721
|
"unidiomatic-typecheck", # E721
|
||||||
|
|
|
@ -2,5 +2,5 @@
|
||||||
|
|
||||||
black==23.7.0
|
black==23.7.0
|
||||||
codespell==2.2.2
|
codespell==2.2.2
|
||||||
ruff==0.0.280
|
ruff==0.0.285
|
||||||
yamllint==1.32.0
|
yamllint==1.32.0
|
||||||
|
|
|
@ -397,4 +397,5 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
["pre-commit", "run", "--hook-stage", "manual", "prettier", "--files"]
|
["pre-commit", "run", "--hook-stage", "manual", "prettier", "--files"]
|
||||||
+ manifests_resorted,
|
+ manifests_resorted,
|
||||||
stdout=subprocess.DEVNULL,
|
stdout=subprocess.DEVNULL,
|
||||||
|
check=True,
|
||||||
)
|
)
|
||||||
|
|
|
@ -77,11 +77,13 @@ def main():
|
||||||
pipe_null = {} if args.develop else {"stdout": subprocess.DEVNULL}
|
pipe_null = {} if args.develop else {"stdout": subprocess.DEVNULL}
|
||||||
|
|
||||||
print("Running hassfest to pick up new information.")
|
print("Running hassfest to pick up new information.")
|
||||||
subprocess.run(["python", "-m", "script.hassfest"], **pipe_null)
|
subprocess.run(["python", "-m", "script.hassfest"], **pipe_null, check=True)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
print("Running gen_requirements_all to pick up new information.")
|
print("Running gen_requirements_all to pick up new information.")
|
||||||
subprocess.run(["python", "-m", "script.gen_requirements_all"], **pipe_null)
|
subprocess.run(
|
||||||
|
["python", "-m", "script.gen_requirements_all"], **pipe_null, check=True
|
||||||
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
print("Running script/translations_develop to pick up new translation strings.")
|
print("Running script/translations_develop to pick up new translation strings.")
|
||||||
|
@ -95,13 +97,16 @@ def main():
|
||||||
info.domain,
|
info.domain,
|
||||||
],
|
],
|
||||||
**pipe_null,
|
**pipe_null,
|
||||||
|
check=True,
|
||||||
)
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
if args.develop:
|
if args.develop:
|
||||||
print("Running tests")
|
print("Running tests")
|
||||||
print(f"$ pytest -vvv tests/components/{info.domain}")
|
print(f"$ pytest -vvv tests/components/{info.domain}")
|
||||||
subprocess.run(["pytest", "-vvv", f"tests/components/{info.domain}"])
|
subprocess.run(
|
||||||
|
["pytest", "-vvv", f"tests/components/{info.domain}"], check=True
|
||||||
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
docs.print_relevant_docs(args.template, info)
|
docs.print_relevant_docs(args.template, info)
|
||||||
|
|
|
@ -44,7 +44,8 @@ def run_download_docker():
|
||||||
"json",
|
"json",
|
||||||
"--unzip-to",
|
"--unzip-to",
|
||||||
"/opt/dest",
|
"/opt/dest",
|
||||||
]
|
],
|
||||||
|
check=False,
|
||||||
)
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
|
|
@ -42,6 +42,7 @@ def run_upload_docker():
|
||||||
"--convert-placeholders=false",
|
"--convert-placeholders=false",
|
||||||
"--replace-modified",
|
"--replace-modified",
|
||||||
],
|
],
|
||||||
|
check=False,
|
||||||
)
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
|
|
@ -48,7 +48,9 @@ def get_current_branch():
|
||||||
"""Get current branch."""
|
"""Get current branch."""
|
||||||
return (
|
return (
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
["git", "rev-parse", "--abbrev-ref", "HEAD"], stdout=subprocess.PIPE
|
["git", "rev-parse", "--abbrev-ref", "HEAD"],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
check=True,
|
||||||
)
|
)
|
||||||
.stdout.decode()
|
.stdout.decode()
|
||||||
.strip()
|
.strip()
|
||||||
|
|
|
@ -161,7 +161,10 @@ def main():
|
||||||
)
|
)
|
||||||
arguments = parser.parse_args()
|
arguments = parser.parse_args()
|
||||||
|
|
||||||
if arguments.commit and subprocess.run(["git", "diff", "--quiet"]).returncode == 1:
|
if (
|
||||||
|
arguments.commit
|
||||||
|
and subprocess.run(["git", "diff", "--quiet"], check=False).returncode == 1
|
||||||
|
):
|
||||||
print("Cannot use --commit because git is dirty.")
|
print("Cannot use --commit because git is dirty.")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -177,7 +180,7 @@ def main():
|
||||||
if not arguments.commit:
|
if not arguments.commit:
|
||||||
return
|
return
|
||||||
|
|
||||||
subprocess.run(["git", "commit", "-nam", f"Bumped version to {bumped}"])
|
subprocess.run(["git", "commit", "-nam", f"Bumped version to {bumped}"], check=True)
|
||||||
|
|
||||||
|
|
||||||
def test_bump_version():
|
def test_bump_version():
|
||||||
|
|
|
@ -100,7 +100,7 @@ class FakeSchedule:
|
||||||
|
|
||||||
async def fire_time(self, trigger_time: datetime.datetime) -> None:
|
async def fire_time(self, trigger_time: datetime.datetime) -> None:
|
||||||
"""Fire an alarm and wait."""
|
"""Fire an alarm and wait."""
|
||||||
_LOGGER.debug(f"Firing alarm @ {dt_util.as_local(trigger_time)}")
|
_LOGGER.debug("Firing alarm @ %s", dt_util.as_local(trigger_time))
|
||||||
self.freezer.move_to(trigger_time)
|
self.freezer.move_to(trigger_time)
|
||||||
async_fire_time_changed(self.hass, trigger_time)
|
async_fire_time_changed(self.hass, trigger_time)
|
||||||
await self.hass.async_block_till_done()
|
await self.hass.async_block_till_done()
|
||||||
|
|
|
@ -314,7 +314,7 @@ async def test_discover_lights(hass: HomeAssistant, hue_client) -> None:
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
result_json = await async_get_lights(hue_client)
|
result_json = await async_get_lights(hue_client)
|
||||||
assert "1" not in result_json.keys()
|
assert "1" not in result_json
|
||||||
devices = {val["uniqueid"] for val in result_json.values()}
|
devices = {val["uniqueid"] for val in result_json.values()}
|
||||||
assert "00:2f:d2:31:ce:c5:55:cc-ee" not in devices # light.ceiling_lights
|
assert "00:2f:d2:31:ce:c5:55:cc-ee" not in devices # light.ceiling_lights
|
||||||
|
|
||||||
|
|
|
@ -505,7 +505,7 @@ async def test_config_wrong_struct_sensor(
|
||||||
},
|
},
|
||||||
[0x0102],
|
[0x0102],
|
||||||
False,
|
False,
|
||||||
str(int(0x0102)),
|
str(0x0102),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -514,7 +514,7 @@ async def test_config_wrong_struct_sensor(
|
||||||
},
|
},
|
||||||
[0x0201],
|
[0x0201],
|
||||||
False,
|
False,
|
||||||
str(int(0x0102)),
|
str(0x0102),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -523,7 +523,7 @@ async def test_config_wrong_struct_sensor(
|
||||||
},
|
},
|
||||||
[0x0102, 0x0304],
|
[0x0102, 0x0304],
|
||||||
False,
|
False,
|
||||||
str(int(0x02010403)),
|
str(0x02010403),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -532,7 +532,7 @@ async def test_config_wrong_struct_sensor(
|
||||||
},
|
},
|
||||||
[0x0102, 0x0304],
|
[0x0102, 0x0304],
|
||||||
False,
|
False,
|
||||||
str(int(0x03040102)),
|
str(0x03040102),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -541,25 +541,25 @@ async def test_config_wrong_struct_sensor(
|
||||||
},
|
},
|
||||||
[0x0102, 0x0304],
|
[0x0102, 0x0304],
|
||||||
False,
|
False,
|
||||||
str(int(0x04030201)),
|
str(0x04030201),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
CONF_DATA_TYPE: DataType.INT32,
|
CONF_DATA_TYPE: DataType.INT32,
|
||||||
CONF_MAX_VALUE: int(0x02010400),
|
CONF_MAX_VALUE: 0x02010400,
|
||||||
},
|
},
|
||||||
[0x0201, 0x0403],
|
[0x0201, 0x0403],
|
||||||
False,
|
False,
|
||||||
str(int(0x02010400)),
|
str(0x02010400),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
CONF_DATA_TYPE: DataType.INT32,
|
CONF_DATA_TYPE: DataType.INT32,
|
||||||
CONF_MIN_VALUE: int(0x02010404),
|
CONF_MIN_VALUE: 0x02010404,
|
||||||
},
|
},
|
||||||
[0x0201, 0x0403],
|
[0x0201, 0x0403],
|
||||||
False,
|
False,
|
||||||
str(int(0x02010404)),
|
str(0x02010404),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -573,20 +573,20 @@ async def test_config_wrong_struct_sensor(
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
CONF_DATA_TYPE: DataType.INT32,
|
CONF_DATA_TYPE: DataType.INT32,
|
||||||
CONF_ZERO_SUPPRESS: int(0x00000001),
|
CONF_ZERO_SUPPRESS: 0x00000001,
|
||||||
},
|
},
|
||||||
[0x0000, 0x0002],
|
[0x0000, 0x0002],
|
||||||
False,
|
False,
|
||||||
str(int(0x00000002)),
|
str(0x00000002),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
CONF_DATA_TYPE: DataType.INT32,
|
CONF_DATA_TYPE: DataType.INT32,
|
||||||
CONF_ZERO_SUPPRESS: int(0x00000002),
|
CONF_ZERO_SUPPRESS: 0x00000002,
|
||||||
},
|
},
|
||||||
[0x0000, 0x0002],
|
[0x0000, 0x0002],
|
||||||
False,
|
False,
|
||||||
str(int(0)),
|
str(0),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -727,7 +727,7 @@ async def test_slave_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> Non
|
||||||
},
|
},
|
||||||
[0x0102],
|
[0x0102],
|
||||||
False,
|
False,
|
||||||
[str(int(0x0201))],
|
[str(0x0201)],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -738,7 +738,7 @@ async def test_slave_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> Non
|
||||||
},
|
},
|
||||||
[0x0102, 0x0304],
|
[0x0102, 0x0304],
|
||||||
False,
|
False,
|
||||||
[str(int(0x03040102))],
|
[str(0x03040102)],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -749,7 +749,7 @@ async def test_slave_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> Non
|
||||||
},
|
},
|
||||||
[0x0102, 0x0304, 0x0506, 0x0708],
|
[0x0102, 0x0304, 0x0506, 0x0708],
|
||||||
False,
|
False,
|
||||||
[str(int(0x0708050603040102))],
|
[str(0x0708050603040102)],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -760,7 +760,7 @@ async def test_slave_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> Non
|
||||||
},
|
},
|
||||||
[0x0102, 0x0304],
|
[0x0102, 0x0304],
|
||||||
False,
|
False,
|
||||||
[str(int(0x0201)), str(int(0x0403))],
|
[str(0x0201), str(0x0403)],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -771,7 +771,7 @@ async def test_slave_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> Non
|
||||||
},
|
},
|
||||||
[0x0102, 0x0304, 0x0506, 0x0708],
|
[0x0102, 0x0304, 0x0506, 0x0708],
|
||||||
False,
|
False,
|
||||||
[str(int(0x03040102)), str(int(0x07080506))],
|
[str(0x03040102), str(0x07080506)],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -782,7 +782,7 @@ async def test_slave_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> Non
|
||||||
},
|
},
|
||||||
[0x0102, 0x0304, 0x0506, 0x0708, 0x0901, 0x0902, 0x0903, 0x0904],
|
[0x0102, 0x0304, 0x0506, 0x0708, 0x0901, 0x0902, 0x0903, 0x0904],
|
||||||
False,
|
False,
|
||||||
[str(int(0x0708050603040102)), str(int(0x0904090309020901))],
|
[str(0x0708050603040102), str(0x0904090309020901)],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -793,7 +793,7 @@ async def test_slave_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> Non
|
||||||
},
|
},
|
||||||
[0x0102, 0x0304, 0x0506, 0x0708],
|
[0x0102, 0x0304, 0x0506, 0x0708],
|
||||||
False,
|
False,
|
||||||
[str(int(0x0201)), str(int(0x0403)), str(int(0x0605)), str(int(0x0807))],
|
[str(0x0201), str(0x0403), str(0x0605), str(0x0807)],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
|
@ -814,10 +814,10 @@ async def test_slave_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> Non
|
||||||
],
|
],
|
||||||
False,
|
False,
|
||||||
[
|
[
|
||||||
str(int(0x03040102)),
|
str(0x03040102),
|
||||||
str(int(0x07080506)),
|
str(0x07080506),
|
||||||
str(int(0x0B0C090A)),
|
str(0x0B0C090A),
|
||||||
str(int(0x0F000D0E)),
|
str(0x0F000D0E),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
|
@ -847,10 +847,10 @@ async def test_slave_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> Non
|
||||||
],
|
],
|
||||||
False,
|
False,
|
||||||
[
|
[
|
||||||
str(int(0x0604060306020601)),
|
str(0x0604060306020601),
|
||||||
str(int(0x0704070307020701)),
|
str(0x0704070307020701),
|
||||||
str(int(0x0804080308020801)),
|
str(0x0804080308020801),
|
||||||
str(int(0x0904090309020901)),
|
str(0x0904090309020901),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
|
|
@ -506,7 +506,7 @@ async def test_restore_number_save_state(
|
||||||
assert state["entity_id"] == entity0.entity_id
|
assert state["entity_id"] == entity0.entity_id
|
||||||
extra_data = hass_storage[RESTORE_STATE_KEY]["data"][0]["extra_data"]
|
extra_data = hass_storage[RESTORE_STATE_KEY]["data"][0]["extra_data"]
|
||||||
assert extra_data == RESTORE_DATA
|
assert extra_data == RESTORE_DATA
|
||||||
assert type(extra_data["native_value"]) == float
|
assert isinstance(extra_data["native_value"], float)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|
|
@ -157,7 +157,7 @@ async def test_openalpr_process_image(
|
||||||
]
|
]
|
||||||
assert len(event_data) == 1
|
assert len(event_data) == 1
|
||||||
assert event_data[0]["plate"] == "H786P0J"
|
assert event_data[0]["plate"] == "H786P0J"
|
||||||
assert event_data[0]["confidence"] == float(90.436699)
|
assert event_data[0]["confidence"] == 90.436699
|
||||||
assert event_data[0]["entity_id"] == "image_processing.test_local"
|
assert event_data[0]["entity_id"] == "image_processing.test_local"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -56,8 +56,8 @@ MOCK_CONFIG_ADDITIONAL = {
|
||||||
CONF_CODE: MOCK_CODE,
|
CONF_CODE: MOCK_CODE,
|
||||||
}
|
}
|
||||||
MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]}
|
MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]}
|
||||||
MOCK_UDP_PORT = int(987)
|
MOCK_UDP_PORT = 987
|
||||||
MOCK_TCP_PORT = int(997)
|
MOCK_TCP_PORT = 997
|
||||||
|
|
||||||
MOCK_AUTO = {"Config Mode": "Auto Discover"}
|
MOCK_AUTO = {"Config Mode": "Auto Discover"}
|
||||||
MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST}
|
MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST}
|
||||||
|
|
|
@ -181,7 +181,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) -
|
||||||
await hass.services.async_call(domain, service, data, blocking=True)
|
await hass.services.async_call(domain, service, data, blocking=True)
|
||||||
if payload is None:
|
if payload is None:
|
||||||
mocked_method.assert_called_once()
|
mocked_method.assert_called_once()
|
||||||
elif type(payload) == list:
|
elif isinstance(payload, list):
|
||||||
mocked_method.assert_called_once_with(*payload)
|
mocked_method.assert_called_once_with(*payload)
|
||||||
else:
|
else:
|
||||||
mocked_method.assert_called_once_with(**payload)
|
mocked_method.assert_called_once_with(**payload)
|
||||||
|
|
|
@ -303,7 +303,7 @@ async def test_and_condition_shorthand(hass: HomeAssistant) -> None:
|
||||||
test = await condition.async_from_config(hass, config)
|
test = await condition.async_from_config(hass, config)
|
||||||
|
|
||||||
assert config["alias"] == "And Condition Shorthand"
|
assert config["alias"] == "And Condition Shorthand"
|
||||||
assert "and" not in config.keys()
|
assert "and" not in config
|
||||||
|
|
||||||
hass.states.async_set("sensor.temperature", 120)
|
hass.states.async_set("sensor.temperature", 120)
|
||||||
assert not test(hass)
|
assert not test(hass)
|
||||||
|
@ -345,7 +345,7 @@ async def test_and_condition_list_shorthand(hass: HomeAssistant) -> None:
|
||||||
test = await condition.async_from_config(hass, config)
|
test = await condition.async_from_config(hass, config)
|
||||||
|
|
||||||
assert config["alias"] == "And Condition List Shorthand"
|
assert config["alias"] == "And Condition List Shorthand"
|
||||||
assert "and" not in config.keys()
|
assert "and" not in config
|
||||||
|
|
||||||
hass.states.async_set("sensor.temperature", 120)
|
hass.states.async_set("sensor.temperature", 120)
|
||||||
assert not test(hass)
|
assert not test(hass)
|
||||||
|
@ -577,7 +577,7 @@ async def test_or_condition_shorthand(hass: HomeAssistant) -> None:
|
||||||
test = await condition.async_from_config(hass, config)
|
test = await condition.async_from_config(hass, config)
|
||||||
|
|
||||||
assert config["alias"] == "Or Condition Shorthand"
|
assert config["alias"] == "Or Condition Shorthand"
|
||||||
assert "or" not in config.keys()
|
assert "or" not in config
|
||||||
|
|
||||||
hass.states.async_set("sensor.temperature", 120)
|
hass.states.async_set("sensor.temperature", 120)
|
||||||
assert not test(hass)
|
assert not test(hass)
|
||||||
|
@ -809,7 +809,7 @@ async def test_not_condition_shorthand(hass: HomeAssistant) -> None:
|
||||||
test = await condition.async_from_config(hass, config)
|
test = await condition.async_from_config(hass, config)
|
||||||
|
|
||||||
assert config["alias"] == "Not Condition Shorthand"
|
assert config["alias"] == "Not Condition Shorthand"
|
||||||
assert "not" not in config.keys()
|
assert "not" not in config
|
||||||
|
|
||||||
hass.states.async_set("sensor.temperature", 101)
|
hass.states.async_set("sensor.temperature", 101)
|
||||||
assert test(hass)
|
assert test(hass)
|
||||||
|
|
|
@ -61,7 +61,7 @@ class MockUpdateEntity(MockEntity, UpdateEntity):
|
||||||
|
|
||||||
if version is not None:
|
if version is not None:
|
||||||
self._values["installed_version"] = version
|
self._values["installed_version"] = version
|
||||||
_LOGGER.info(f"Installed update with version: {version}")
|
_LOGGER.info("Installed update with version: %s", version)
|
||||||
else:
|
else:
|
||||||
self._values["installed_version"] = self.latest_version
|
self._values["installed_version"] = self.latest_version
|
||||||
_LOGGER.info("Installed latest update")
|
_LOGGER.info("Installed latest update")
|
||||||
|
|
Loading…
Add table
Reference in a new issue