prefer total_seconds over seconds (#49505)
This commit is contained in:
parent
020d456889
commit
a90d3a051f
31 changed files with 65 additions and 46 deletions
|
@ -82,7 +82,7 @@ class AquaLogicProcessor(threading.Thread):
|
|||
return
|
||||
|
||||
_LOGGER.error("Connection to %s:%d lost", self._host, self._port)
|
||||
time.sleep(RECONNECT_INTERVAL.seconds)
|
||||
time.sleep(RECONNECT_INTERVAL.total_seconds())
|
||||
|
||||
@property
|
||||
def panel(self):
|
||||
|
|
|
@ -52,7 +52,7 @@ class ActivityStream(AugustSubscriberMixin):
|
|||
return Debouncer(
|
||||
self._hass,
|
||||
_LOGGER,
|
||||
cooldown=ACTIVITY_UPDATE_INTERVAL.seconds,
|
||||
cooldown=ACTIVITY_UPDATE_INTERVAL.total_seconds(),
|
||||
immediate=True,
|
||||
function=_async_update_house_id,
|
||||
)
|
||||
|
@ -121,7 +121,9 @@ class ActivityStream(AugustSubscriberMixin):
|
|||
# we catch the case where the lock operator is
|
||||
# not updated or the lock failed
|
||||
self._schedule_updates[house_id] = async_call_later(
|
||||
self._hass, ACTIVITY_UPDATE_INTERVAL.seconds + 1, _update_house_activities
|
||||
self._hass,
|
||||
ACTIVITY_UPDATE_INTERVAL.total_seconds() + 1,
|
||||
_update_house_activities,
|
||||
)
|
||||
|
||||
async def _async_update_house_id(self, house_id):
|
||||
|
|
|
@ -21,8 +21,10 @@ from .entity import AugustEntityMixin
|
|||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TIME_TO_DECLARE_DETECTION = timedelta(seconds=ACTIVITY_UPDATE_INTERVAL.seconds)
|
||||
TIME_TO_RECHECK_DETECTION = timedelta(seconds=ACTIVITY_UPDATE_INTERVAL.seconds * 3)
|
||||
TIME_TO_DECLARE_DETECTION = timedelta(seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds())
|
||||
TIME_TO_RECHECK_DETECTION = timedelta(
|
||||
seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds() * 3
|
||||
)
|
||||
|
||||
|
||||
def _retrieve_online_state(data, detail):
|
||||
|
@ -257,7 +259,7 @@ class AugustDoorbellBinarySensor(AugustEntityMixin, BinarySensorEntity):
|
|||
self.async_write_ha_state()
|
||||
|
||||
self._check_for_off_update_listener = async_call_later(
|
||||
self.hass, TIME_TO_RECHECK_DETECTION.seconds, _scheduled_update
|
||||
self.hass, TIME_TO_RECHECK_DETECTION.total_seconds(), _scheduled_update
|
||||
)
|
||||
|
||||
def _cancel_any_pending_updates(self):
|
||||
|
|
|
@ -387,7 +387,7 @@ class BroadlinkRemote(RemoteEntity, RestoreEntity):
|
|||
|
||||
raise TimeoutError(
|
||||
"No infrared code received within "
|
||||
f"{LEARNING_TIMEOUT.seconds} seconds"
|
||||
f"{LEARNING_TIMEOUT.total_seconds()} seconds"
|
||||
)
|
||||
|
||||
finally:
|
||||
|
@ -425,7 +425,7 @@ class BroadlinkRemote(RemoteEntity, RestoreEntity):
|
|||
)
|
||||
raise TimeoutError(
|
||||
"No radiofrequency found within "
|
||||
f"{LEARNING_TIMEOUT.seconds} seconds"
|
||||
f"{LEARNING_TIMEOUT.total_seconds()} seconds"
|
||||
)
|
||||
|
||||
finally:
|
||||
|
@ -460,7 +460,7 @@ class BroadlinkRemote(RemoteEntity, RestoreEntity):
|
|||
|
||||
raise TimeoutError(
|
||||
"No radiofrequency code received within "
|
||||
f"{LEARNING_TIMEOUT.seconds} seconds"
|
||||
f"{LEARNING_TIMEOUT.total_seconds()} seconds"
|
||||
)
|
||||
|
||||
finally:
|
||||
|
|
|
@ -141,7 +141,7 @@ async def activate_automation(
|
|||
SERVICE_TURN_ON,
|
||||
{
|
||||
ATTR_ENTITY_ID: light_id,
|
||||
ATTR_TRANSITION: LIGHT_TRANSITION_TIME.seconds,
|
||||
ATTR_TRANSITION: LIGHT_TRANSITION_TIME.total_seconds(),
|
||||
ATTR_PROFILE: light_profile,
|
||||
},
|
||||
)
|
||||
|
|
|
@ -53,7 +53,7 @@ class GdacsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
self._abort_if_unique_id_configured()
|
||||
|
||||
scan_interval = user_input.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
|
||||
user_input[CONF_SCAN_INTERVAL] = scan_interval.seconds
|
||||
user_input[CONF_SCAN_INTERVAL] = scan_interval.total_seconds()
|
||||
|
||||
categories = user_input.get(CONF_CATEGORIES, [])
|
||||
user_input[CONF_CATEGORIES] = categories
|
||||
|
|
|
@ -66,7 +66,7 @@ class GeonetnzQuakesFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
self._abort_if_unique_id_configured()
|
||||
|
||||
scan_interval = user_input.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
|
||||
user_input[CONF_SCAN_INTERVAL] = scan_interval.seconds
|
||||
user_input[CONF_SCAN_INTERVAL] = scan_interval.total_seconds()
|
||||
|
||||
minimum_magnitude = user_input.get(
|
||||
CONF_MINIMUM_MAGNITUDE, DEFAULT_MINIMUM_MAGNITUDE
|
||||
|
|
|
@ -65,6 +65,6 @@ class GeonetnzVolcanoFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
user_input[CONF_UNIT_SYSTEM] = CONF_UNIT_SYSTEM_METRIC
|
||||
|
||||
scan_interval = user_input.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
|
||||
user_input[CONF_SCAN_INTERVAL] = scan_interval.seconds
|
||||
user_input[CONF_SCAN_INTERVAL] = scan_interval.total_seconds()
|
||||
|
||||
return self.async_create_entry(title=identifier, data=user_input)
|
||||
|
|
|
@ -527,7 +527,7 @@ class GTFSDepartureSensor(SensorEntity):
|
|||
name: Any | None,
|
||||
origin: Any,
|
||||
destination: Any,
|
||||
offset: cv.time_period,
|
||||
offset: datetime.timedelta,
|
||||
include_tomorrow: bool,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
|
@ -699,7 +699,7 @@ class GTFSDepartureSensor(SensorEntity):
|
|||
del self._attributes[ATTR_LAST]
|
||||
|
||||
# Add contextual information
|
||||
self._attributes[ATTR_OFFSET] = self._offset.seconds / 60
|
||||
self._attributes[ATTR_OFFSET] = self._offset.total_seconds() / 60
|
||||
|
||||
if self._state is None:
|
||||
self._attributes[ATTR_INFO] = (
|
||||
|
|
|
@ -9,7 +9,7 @@ ROUTER = "router"
|
|||
UNDO_UPDATE_LISTENER = "undo_update_listener"
|
||||
DEFAULT_TELNET_PORT = 23
|
||||
DEFAULT_SCAN_INTERVAL = 120
|
||||
DEFAULT_CONSIDER_HOME = _DEFAULT_CONSIDER_HOME.seconds
|
||||
DEFAULT_CONSIDER_HOME = _DEFAULT_CONSIDER_HOME.total_seconds()
|
||||
DEFAULT_INTERFACE = "Home"
|
||||
|
||||
CONF_CONSIDER_HOME = "consider_home"
|
||||
|
|
|
@ -92,6 +92,6 @@ class LuftDatenFlowHandler(config_entries.ConfigFlow):
|
|||
)
|
||||
|
||||
scan_interval = user_input.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
|
||||
user_input.update({CONF_SCAN_INTERVAL: scan_interval.seconds})
|
||||
user_input.update({CONF_SCAN_INTERVAL: scan_interval.total_seconds()})
|
||||
|
||||
return self.async_create_entry(title=str(sensor_id), data=user_input)
|
||||
|
|
|
@ -78,7 +78,9 @@ class MikrotikFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
async def async_step_import(self, import_config):
|
||||
"""Import Miktortik from config."""
|
||||
|
||||
import_config[CONF_DETECTION_TIME] = import_config[CONF_DETECTION_TIME].seconds
|
||||
import_config[CONF_DETECTION_TIME] = import_config[
|
||||
CONF_DETECTION_TIME
|
||||
].total_seconds()
|
||||
return await self.async_step_user(user_input=import_config)
|
||||
|
||||
|
||||
|
|
|
@ -120,7 +120,7 @@ class MQTTRoomSensor(SensorEntity):
|
|||
if (
|
||||
device.get(ATTR_ROOM) == self._state
|
||||
or device.get(ATTR_DISTANCE) < self._distance
|
||||
or timediff.seconds >= self._timeout
|
||||
or timediff.total_seconds() >= self._timeout
|
||||
):
|
||||
update_state(**device)
|
||||
|
||||
|
|
|
@ -145,11 +145,11 @@ class MyChevyHub(threading.Thread):
|
|||
_LOGGER.info("Starting mychevy loop")
|
||||
self.update()
|
||||
self.hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC)
|
||||
time.sleep(MIN_TIME_BETWEEN_UPDATES.seconds)
|
||||
time.sleep(MIN_TIME_BETWEEN_UPDATES.total_seconds())
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"Error updating mychevy data. "
|
||||
"This probably means the OnStar link is down again"
|
||||
)
|
||||
self.hass.helpers.dispatcher.dispatcher_send(ERROR_TOPIC)
|
||||
time.sleep(ERROR_SLEEP_TIME.seconds)
|
||||
time.sleep(ERROR_SLEEP_TIME.total_seconds())
|
||||
|
|
|
@ -69,7 +69,9 @@ class NZBGetConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
) -> dict[str, Any]:
|
||||
"""Handle a flow initiated by configuration file."""
|
||||
if CONF_SCAN_INTERVAL in user_input:
|
||||
user_input[CONF_SCAN_INTERVAL] = user_input[CONF_SCAN_INTERVAL].seconds
|
||||
user_input[CONF_SCAN_INTERVAL] = user_input[
|
||||
CONF_SCAN_INTERVAL
|
||||
].total_seconds()
|
||||
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
|
|
|
@ -418,7 +418,9 @@ class RachioZone(RachioSwitch):
|
|||
CONF_MANUAL_RUN_MINS, DEFAULT_MANUAL_RUN_MINS
|
||||
)
|
||||
)
|
||||
self._controller.rachio.zone.start(self.zone_id, manual_run_time.seconds)
|
||||
self._controller.rachio.zone.start(
|
||||
self.zone_id, manual_run_time.total_seconds()
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Watering %s on %s for %s",
|
||||
self.name,
|
||||
|
|
|
@ -114,21 +114,27 @@ SERVICE_SET_PIN_SCHEMA = SERVICE_BASE_SCHEMA.extend(
|
|||
SERVICE_SET_SYSTEM_PROPERTIES_SCHEMA = SERVICE_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(ATTR_ALARM_DURATION): vol.All(
|
||||
cv.time_period, lambda value: value.seconds, vol.Range(min=30, max=480)
|
||||
cv.time_period,
|
||||
lambda value: value.total_seconds(),
|
||||
vol.Range(min=30, max=480),
|
||||
),
|
||||
vol.Optional(ATTR_ALARM_VOLUME): vol.All(vol.Coerce(int), vol.In(VOLUMES)),
|
||||
vol.Optional(ATTR_CHIME_VOLUME): vol.All(vol.Coerce(int), vol.In(VOLUMES)),
|
||||
vol.Optional(ATTR_ENTRY_DELAY_AWAY): vol.All(
|
||||
cv.time_period, lambda value: value.seconds, vol.Range(min=30, max=255)
|
||||
cv.time_period,
|
||||
lambda value: value.total_seconds(),
|
||||
vol.Range(min=30, max=255),
|
||||
),
|
||||
vol.Optional(ATTR_ENTRY_DELAY_HOME): vol.All(
|
||||
cv.time_period, lambda value: value.seconds, vol.Range(max=255)
|
||||
cv.time_period, lambda value: value.total_seconds(), vol.Range(max=255)
|
||||
),
|
||||
vol.Optional(ATTR_EXIT_DELAY_AWAY): vol.All(
|
||||
cv.time_period, lambda value: value.seconds, vol.Range(min=45, max=255)
|
||||
cv.time_period,
|
||||
lambda value: value.total_seconds(),
|
||||
vol.Range(min=45, max=255),
|
||||
),
|
||||
vol.Optional(ATTR_EXIT_DELAY_HOME): vol.All(
|
||||
cv.time_period, lambda value: value.seconds, vol.Range(max=255)
|
||||
cv.time_period, lambda value: value.total_seconds(), vol.Range(max=255)
|
||||
),
|
||||
vol.Optional(ATTR_LIGHT): cv.boolean,
|
||||
vol.Optional(ATTR_VOICE_PROMPT_VOLUME): vol.All(
|
||||
|
|
|
@ -233,6 +233,6 @@ def resolve_slot_values(slot):
|
|||
minutes=slot["value"]["minutes"],
|
||||
seconds=slot["value"]["seconds"],
|
||||
)
|
||||
value = delta.seconds
|
||||
value = delta.total_seconds()
|
||||
|
||||
return value
|
||||
|
|
|
@ -50,7 +50,7 @@ class SpeedTestFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
return self.async_abort(reason="wrong_server_id")
|
||||
|
||||
import_config[CONF_SCAN_INTERVAL] = int(
|
||||
import_config[CONF_SCAN_INTERVAL].seconds / 60
|
||||
import_config[CONF_SCAN_INTERVAL].total_seconds() / 60
|
||||
)
|
||||
import_config.pop(CONF_MONITORED_CONDITIONS)
|
||||
|
||||
|
|
|
@ -431,7 +431,7 @@ def _update(
|
|||
state = round(
|
||||
(counter - data.value)
|
||||
/ 1000 ** 2
|
||||
/ (now - (data.update_time or now)).seconds,
|
||||
/ (now - (data.update_time or now)).total_seconds(),
|
||||
3,
|
||||
)
|
||||
else:
|
||||
|
|
|
@ -87,7 +87,7 @@ class FlowHandler(config_entries.ConfigFlow):
|
|||
title=host,
|
||||
data={
|
||||
CONF_HOST: host,
|
||||
KEY_SCAN_INTERVAL: self._scan_interval.seconds,
|
||||
KEY_SCAN_INTERVAL: self._scan_interval.total_seconds(),
|
||||
KEY_SESSION: session,
|
||||
},
|
||||
)
|
||||
|
@ -152,7 +152,7 @@ class FlowHandler(config_entries.ConfigFlow):
|
|||
title=host,
|
||||
data={
|
||||
CONF_HOST: host,
|
||||
KEY_SCAN_INTERVAL: self._scan_interval.seconds,
|
||||
KEY_SCAN_INTERVAL: self._scan_interval.total_seconds(),
|
||||
KEY_SESSION: next(iter(conf.values())),
|
||||
},
|
||||
)
|
||||
|
|
|
@ -130,7 +130,7 @@ async def async_attach_trigger(
|
|||
|
||||
trigger_variables["for"] = period
|
||||
|
||||
delay_cancel = async_call_later(hass, period.seconds, call_action)
|
||||
delay_cancel = async_call_later(hass, period.total_seconds(), call_action)
|
||||
|
||||
info = async_track_template_result(
|
||||
hass,
|
||||
|
|
|
@ -86,7 +86,9 @@ class TransmissionFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
|
||||
async def async_step_import(self, import_config):
|
||||
"""Import from Transmission client config."""
|
||||
import_config[CONF_SCAN_INTERVAL] = import_config[CONF_SCAN_INTERVAL].seconds
|
||||
import_config[CONF_SCAN_INTERVAL] = import_config[
|
||||
CONF_SCAN_INTERVAL
|
||||
].total_seconds()
|
||||
return await self.async_step_user(user_input=import_config)
|
||||
|
||||
|
||||
|
|
|
@ -279,5 +279,5 @@ def _delta_mins(hhmm_time_str):
|
|||
if hhmm_datetime < now:
|
||||
hhmm_datetime += timedelta(days=1)
|
||||
|
||||
delta_mins = (hhmm_datetime - now).seconds // 60
|
||||
delta_mins = (hhmm_datetime - now).total_seconds() // 60
|
||||
return delta_mins
|
||||
|
|
|
@ -187,12 +187,13 @@ async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry)
|
|||
)
|
||||
if migrated_scan_interval and (
|
||||
not config_entry.options.get(CONF_SCAN_INTERVAL)
|
||||
or config_entry.options[CONF_SCAN_INTERVAL] == DEFAULT_SCAN_INTERVAL.seconds
|
||||
or config_entry.options[CONF_SCAN_INTERVAL]
|
||||
== DEFAULT_SCAN_INTERVAL.total_seconds()
|
||||
):
|
||||
update_interval = migrated_scan_interval
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
options={CONF_SCAN_INTERVAL: update_interval.seconds},
|
||||
options={CONF_SCAN_INTERVAL: update_interval.total_seconds()},
|
||||
)
|
||||
elif config_entry.options.get(CONF_SCAN_INTERVAL):
|
||||
update_interval = timedelta(seconds=config_entry.options[CONF_SCAN_INTERVAL])
|
||||
|
|
|
@ -104,7 +104,7 @@ class UpCloudOptionsFlow(config_entries.OptionsFlow):
|
|||
vol.Optional(
|
||||
CONF_SCAN_INTERVAL,
|
||||
default=self.config_entry.options.get(CONF_SCAN_INTERVAL)
|
||||
or DEFAULT_SCAN_INTERVAL.seconds,
|
||||
or DEFAULT_SCAN_INTERVAL.total_seconds(),
|
||||
): vol.All(vol.Coerce(int), vol.Range(min=30)),
|
||||
}
|
||||
)
|
||||
|
|
|
@ -31,4 +31,4 @@ CONFIG_ENTRY_SCAN_INTERVAL = "scan_interval"
|
|||
CONFIG_ENTRY_ST = "st"
|
||||
CONFIG_ENTRY_UDN = "udn"
|
||||
CONFIG_ENTRY_HOSTNAME = "hostname"
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(seconds=30).seconds
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(seconds=30).total_seconds()
|
||||
|
|
|
@ -232,10 +232,10 @@ class DerivedUpnpSensor(UpnpSensor):
|
|||
if self._sensor_type["unit"] == DATA_BYTES:
|
||||
delta_value /= KIBIBYTE
|
||||
delta_time = current_timestamp - self._last_timestamp
|
||||
if delta_time.seconds == 0:
|
||||
if delta_time.total_seconds() == 0:
|
||||
# Prevent division by 0.
|
||||
return None
|
||||
derived = delta_value / delta_time.seconds
|
||||
derived = delta_value / delta_time.total_seconds()
|
||||
|
||||
# Store current values for future use.
|
||||
self._last_value = current_value
|
||||
|
|
|
@ -98,7 +98,7 @@ class WaterFurnaceData(threading.Thread):
|
|||
|
||||
# sleep first before the reconnect attempt
|
||||
_LOGGER.debug("Sleeping for fail # %s", self._fails)
|
||||
time.sleep(self._fails * ERROR_INTERVAL.seconds)
|
||||
time.sleep(self._fails * ERROR_INTERVAL.total_seconds())
|
||||
|
||||
try:
|
||||
self.client.login()
|
||||
|
@ -149,4 +149,4 @@ class WaterFurnaceData(threading.Thread):
|
|||
|
||||
else:
|
||||
self.hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC)
|
||||
time.sleep(SCAN_INTERVAL.seconds)
|
||||
time.sleep(SCAN_INTERVAL.total_seconds())
|
||||
|
|
|
@ -94,7 +94,7 @@ class WemoEntity(Entity):
|
|||
|
||||
try:
|
||||
async with async_timeout.timeout(
|
||||
self.platform.scan_interval.seconds - 0.1
|
||||
self.platform.scan_interval.total_seconds() - 0.1
|
||||
) as timeout:
|
||||
await asyncio.shield(self._async_locked_update(True, timeout))
|
||||
except asyncio.TimeoutError:
|
||||
|
|
|
@ -319,7 +319,7 @@ class YeelightScanner:
|
|||
if len(self._callbacks) == 0:
|
||||
self._async_stop_scan()
|
||||
|
||||
await asyncio.sleep(SCAN_INTERVAL.seconds)
|
||||
await asyncio.sleep(SCAN_INTERVAL.total_seconds())
|
||||
self._scan_task = self._hass.loop.create_task(self._async_scan())
|
||||
|
||||
@callback
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue