Cleanup extra dict from hass data in Elgato (#59587)
This commit is contained in:
parent
8249959eac
commit
2841e402b9
3 changed files with 4 additions and 8 deletions
|
@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant
|
|||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DATA_ELGATO_CLIENT, DOMAIN
|
||||
from .const import DOMAIN
|
||||
|
||||
PLATFORMS = [LIGHT_DOMAIN]
|
||||
|
||||
|
@ -31,8 +31,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
logging.getLogger(__name__).debug("Unable to connect: %s", exception)
|
||||
raise ConfigEntryNotReady from exception
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = {DATA_ELGATO_CLIENT: elgato}
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = elgato
|
||||
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
|
|
@ -3,9 +3,6 @@
|
|||
# Integration domain
|
||||
DOMAIN = "elgato"
|
||||
|
||||
# Home Assistant data keys
|
||||
DATA_ELGATO_CLIENT = "elgato_client"
|
||||
|
||||
# Attributes
|
||||
ATTR_ON = "on"
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ from homeassistant.helpers.entity_platform import (
|
|||
async_get_current_platform,
|
||||
)
|
||||
|
||||
from .const import DATA_ELGATO_CLIENT, DOMAIN, SERVICE_IDENTIFY
|
||||
from .const import DOMAIN, SERVICE_IDENTIFY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -37,7 +37,7 @@ async def async_setup_entry(
|
|||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Elgato Light based on a config entry."""
|
||||
elgato: Elgato = hass.data[DOMAIN][entry.entry_id][DATA_ELGATO_CLIENT]
|
||||
elgato: Elgato = hass.data[DOMAIN][entry.entry_id]
|
||||
info = await elgato.info()
|
||||
settings = await elgato.settings()
|
||||
async_add_entities([ElgatoLight(elgato, info, settings)], True)
|
||||
|
|
Loading…
Add table
Reference in a new issue