2020-06-04 10:07:27 -06:00
|
|
|
"""The Tile component."""
|
|
|
|
from datetime import timedelta
|
2021-01-27 04:06:09 -07:00
|
|
|
from functools import partial
|
2020-06-04 10:07:27 -06:00
|
|
|
|
|
|
|
from pytile import async_login
|
2021-01-29 00:57:36 -07:00
|
|
|
from pytile.errors import InvalidAuthError, SessionExpiredError, TileError
|
2020-06-04 10:07:27 -06:00
|
|
|
|
2021-01-27 04:06:09 -07:00
|
|
|
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
|
|
|
from homeassistant.exceptions import ConfigEntryNotReady
|
2021-06-28 14:36:18 -05:00
|
|
|
from homeassistant.helpers import aiohttp_client, entity_registry
|
2021-01-27 04:06:09 -07:00
|
|
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
|
|
|
from homeassistant.util.async_ import gather_with_concurrency
|
2020-06-04 10:07:27 -06:00
|
|
|
|
2021-01-27 04:06:09 -07:00
|
|
|
from .const import DATA_COORDINATOR, DATA_TILE, DOMAIN, LOGGER
|
2020-06-04 10:07:27 -06:00
|
|
|
|
|
|
|
PLATFORMS = ["device_tracker"]
|
|
|
|
DEVICE_TYPES = ["PHONE", "TILE"]
|
|
|
|
|
2021-01-27 04:06:09 -07:00
|
|
|
DEFAULT_INIT_TASK_LIMIT = 2
|
2020-06-04 10:07:27 -06:00
|
|
|
DEFAULT_UPDATE_INTERVAL = timedelta(minutes=2)
|
|
|
|
|
|
|
|
CONF_SHOW_INACTIVE = "show_inactive"
|
|
|
|
|
|
|
|
|
|
|
|
async def async_setup(hass, config):
|
|
|
|
"""Set up the Tile component."""
|
2021-01-27 04:06:09 -07:00
|
|
|
hass.data[DOMAIN] = {DATA_COORDINATOR: {}, DATA_TILE: {}}
|
2020-06-04 10:07:27 -06:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-01-27 04:06:09 -07:00
|
|
|
async def async_setup_entry(hass, entry):
|
2020-06-04 10:07:27 -06:00
|
|
|
"""Set up Tile as config entry."""
|
2021-01-27 04:06:09 -07:00
|
|
|
hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id] = {}
|
|
|
|
hass.data[DOMAIN][DATA_TILE][entry.entry_id] = {}
|
|
|
|
|
2021-06-28 14:36:18 -05:00
|
|
|
# The existence of shared Tiles across multiple accounts requires an entity ID
|
|
|
|
# change:
|
|
|
|
#
|
|
|
|
# Old: tile_{uuid}
|
|
|
|
# New: {username}_{uuid}
|
|
|
|
#
|
|
|
|
# Find any entities with the old format and update them:
|
|
|
|
ent_reg = entity_registry.async_get(hass)
|
|
|
|
for entity in [
|
|
|
|
e
|
|
|
|
for e in ent_reg.entities.values()
|
|
|
|
if e.config_entry_id == entry.entry_id
|
|
|
|
and not e.unique_id.startswith(entry.data[CONF_USERNAME])
|
|
|
|
]:
|
|
|
|
new_unique_id = f"{entry.data[CONF_USERNAME]}_".join(
|
|
|
|
entity.unique_id.split(f"{DOMAIN}_")
|
|
|
|
)
|
|
|
|
LOGGER.debug(
|
|
|
|
"Migrating entity %s from old unique ID '%s' to new unique ID '%s'",
|
|
|
|
entity.entity_id,
|
|
|
|
entity.unique_id,
|
|
|
|
new_unique_id,
|
|
|
|
)
|
|
|
|
ent_reg.async_update_entity(entity.entity_id, new_unique_id=new_unique_id)
|
|
|
|
|
2020-06-04 10:07:27 -06:00
|
|
|
websession = aiohttp_client.async_get_clientsession(hass)
|
|
|
|
|
2021-01-27 04:06:09 -07:00
|
|
|
try:
|
|
|
|
client = await async_login(
|
|
|
|
entry.data[CONF_USERNAME],
|
|
|
|
entry.data[CONF_PASSWORD],
|
|
|
|
session=websession,
|
|
|
|
)
|
|
|
|
hass.data[DOMAIN][DATA_TILE][entry.entry_id] = await client.async_get_tiles()
|
2021-01-29 00:57:36 -07:00
|
|
|
except InvalidAuthError:
|
|
|
|
LOGGER.error("Invalid credentials provided")
|
|
|
|
return False
|
2021-01-27 04:06:09 -07:00
|
|
|
except TileError as err:
|
|
|
|
raise ConfigEntryNotReady("Error during integration setup") from err
|
2020-06-04 10:07:27 -06:00
|
|
|
|
2021-01-27 04:06:09 -07:00
|
|
|
async def async_update_tile(tile):
|
|
|
|
"""Update the Tile."""
|
2020-06-04 10:07:27 -06:00
|
|
|
try:
|
2021-01-27 04:06:09 -07:00
|
|
|
return await tile.async_update()
|
2020-06-27 22:54:50 -06:00
|
|
|
except SessionExpiredError:
|
|
|
|
LOGGER.info("Tile session expired; creating a new one")
|
|
|
|
await client.async_init()
|
2020-06-04 10:07:27 -06:00
|
|
|
except TileError as err:
|
2020-08-28 14:50:32 +03:00
|
|
|
raise UpdateFailed(f"Error while retrieving data: {err}") from err
|
2020-06-04 10:07:27 -06:00
|
|
|
|
2021-01-27 04:06:09 -07:00
|
|
|
coordinator_init_tasks = []
|
|
|
|
for tile_uuid, tile in hass.data[DOMAIN][DATA_TILE][entry.entry_id].items():
|
|
|
|
coordinator = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id][
|
|
|
|
tile_uuid
|
|
|
|
] = DataUpdateCoordinator(
|
|
|
|
hass,
|
|
|
|
LOGGER,
|
|
|
|
name=tile.name,
|
|
|
|
update_interval=DEFAULT_UPDATE_INTERVAL,
|
|
|
|
update_method=partial(async_update_tile, tile),
|
|
|
|
)
|
|
|
|
coordinator_init_tasks.append(coordinator.async_refresh())
|
2020-06-04 10:07:27 -06:00
|
|
|
|
2021-01-27 04:06:09 -07:00
|
|
|
await gather_with_concurrency(DEFAULT_INIT_TASK_LIMIT, *coordinator_init_tasks)
|
2020-06-04 10:07:27 -06:00
|
|
|
|
2021-04-27 10:19:57 -10:00
|
|
|
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
2020-06-04 10:07:27 -06:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-01-27 04:06:09 -07:00
|
|
|
async def async_unload_entry(hass, entry):
|
2020-06-04 10:07:27 -06:00
|
|
|
"""Unload a Tile config entry."""
|
2021-04-27 10:19:57 -10:00
|
|
|
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
2020-06-04 10:07:27 -06:00
|
|
|
if unload_ok:
|
2021-01-27 04:06:09 -07:00
|
|
|
hass.data[DOMAIN][DATA_COORDINATOR].pop(entry.entry_id)
|
2020-06-04 10:07:27 -06:00
|
|
|
return unload_ok
|