2018-02-28 22:31:38 -05:00
|
|
|
"""Translation string lookup helpers."""
|
2020-04-16 08:38:54 -07:00
|
|
|
import asyncio
|
2018-02-28 22:31:38 -05:00
|
|
|
import logging
|
2020-04-18 17:13:13 -07:00
|
|
|
from typing import Any, Dict, Optional, Set
|
2018-02-28 22:31:38 -05:00
|
|
|
|
2020-04-16 08:38:54 -07:00
|
|
|
from homeassistant.core import callback
|
2019-07-09 01:19:37 +02:00
|
|
|
from homeassistant.loader import (
|
2020-04-16 08:38:54 -07:00
|
|
|
Integration,
|
2019-12-09 16:42:10 +01:00
|
|
|
async_get_config_flows,
|
2019-07-31 12:25:30 -07:00
|
|
|
async_get_integration,
|
|
|
|
bind_hass,
|
|
|
|
)
|
2018-02-28 22:31:38 -05:00
|
|
|
from homeassistant.util.json import load_json
|
2019-12-09 16:42:10 +01:00
|
|
|
|
2018-10-28 21:12:52 +02:00
|
|
|
from .typing import HomeAssistantType
|
2018-02-28 22:31:38 -05:00
|
|
|
|
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2020-04-18 17:13:13 -07:00
|
|
|
TRANSLATION_LOAD_LOCK = "translation_load_lock"
|
2019-07-31 12:25:30 -07:00
|
|
|
TRANSLATION_STRING_CACHE = "translation_string_cache"
|
2018-02-28 22:31:38 -05:00
|
|
|
|
2020-04-22 01:11:05 +02:00
|
|
|
MOVED_TRANSLATIONS_DIRECTORY_MSG = (
|
|
|
|
"%s: the '.translations' directory has been moved, the new name is 'translations', "
|
|
|
|
"starting with Home Assistant 0.111 your translations will no longer "
|
|
|
|
"load if you do not move/rename this "
|
|
|
|
)
|
|
|
|
|
2018-02-28 22:31:38 -05:00
|
|
|
|
2018-10-28 21:12:52 +02:00
|
|
|
def recursive_flatten(prefix: Any, data: Dict) -> Dict[str, Any]:
|
2018-02-28 22:31:38 -05:00
|
|
|
"""Return a flattened representation of dict data."""
|
|
|
|
output = {}
|
|
|
|
for key, value in data.items():
|
|
|
|
if isinstance(value, dict):
|
2019-08-23 18:53:33 +02:00
|
|
|
output.update(recursive_flatten(f"{prefix}{key}.", value))
|
2018-02-28 22:31:38 -05:00
|
|
|
else:
|
2019-08-23 18:53:33 +02:00
|
|
|
output[f"{prefix}{key}"] = value
|
2018-02-28 22:31:38 -05:00
|
|
|
return output
|
|
|
|
|
|
|
|
|
2018-10-28 21:12:52 +02:00
|
|
|
def flatten(data: Dict) -> Dict[str, Any]:
|
2018-02-28 22:31:38 -05:00
|
|
|
"""Return a flattened representation of dict data."""
|
2019-07-31 12:25:30 -07:00
|
|
|
return recursive_flatten("", data)
|
2018-02-28 22:31:38 -05:00
|
|
|
|
|
|
|
|
2020-04-16 08:38:54 -07:00
|
|
|
@callback
|
2020-04-18 17:13:13 -07:00
|
|
|
def component_translation_path(
|
2020-04-16 08:38:54 -07:00
|
|
|
component: str, language: str, integration: Integration
|
2019-07-31 12:25:30 -07:00
|
|
|
) -> Optional[str]:
|
2019-01-15 16:06:04 -08:00
|
|
|
"""Return the translation json file location for a component.
|
|
|
|
|
2019-04-12 17:10:19 -07:00
|
|
|
For component:
|
2020-04-22 01:11:05 +02:00
|
|
|
- components/hue/translations/nl.json
|
2019-01-15 16:06:04 -08:00
|
|
|
|
2019-04-12 17:10:19 -07:00
|
|
|
For platform:
|
2020-04-22 01:11:05 +02:00
|
|
|
- components/hue/translations/light.nl.json
|
2019-01-15 16:06:04 -08:00
|
|
|
|
2019-04-12 17:10:19 -07:00
|
|
|
If component is just a single file, will return None.
|
|
|
|
"""
|
2019-07-31 12:25:30 -07:00
|
|
|
parts = component.split(".")
|
2019-04-12 17:10:19 -07:00
|
|
|
domain = parts[-1]
|
|
|
|
is_platform = len(parts) == 2
|
2019-01-15 16:06:04 -08:00
|
|
|
|
2019-04-12 17:10:19 -07:00
|
|
|
# If it's a component that is just one file, we don't support translations
|
|
|
|
# Example custom_components/my_component.py
|
2019-04-18 11:11:43 -07:00
|
|
|
if integration.file_path.name != domain:
|
2019-04-12 17:10:19 -07:00
|
|
|
return None
|
|
|
|
|
2020-04-22 01:11:05 +02:00
|
|
|
if is_platform:
|
|
|
|
filename = f"{parts[0]}.{language}.json"
|
|
|
|
else:
|
|
|
|
filename = f"{language}.json"
|
|
|
|
|
|
|
|
translation_legacy_path = integration.file_path / ".translations"
|
|
|
|
translation_path = integration.file_path / "translations"
|
|
|
|
|
|
|
|
if translation_legacy_path.is_dir() and not translation_path.is_dir():
|
|
|
|
_LOGGER.warning(MOVED_TRANSLATIONS_DIRECTORY_MSG, domain)
|
|
|
|
return str(translation_legacy_path / filename)
|
|
|
|
|
|
|
|
return str(translation_path / filename)
|
2018-02-28 22:31:38 -05:00
|
|
|
|
|
|
|
|
2019-07-31 12:25:30 -07:00
|
|
|
def load_translations_files(
|
|
|
|
translation_files: Dict[str, str]
|
|
|
|
) -> Dict[str, Dict[str, Any]]:
|
2018-02-28 22:31:38 -05:00
|
|
|
"""Load and parse translation.json files."""
|
|
|
|
loaded = {}
|
|
|
|
for component, translation_file in translation_files.items():
|
2018-10-28 21:12:52 +02:00
|
|
|
loaded_json = load_json(translation_file)
|
|
|
|
assert isinstance(loaded_json, dict)
|
|
|
|
loaded[component] = loaded_json
|
2018-02-28 22:31:38 -05:00
|
|
|
|
|
|
|
return loaded
|
|
|
|
|
|
|
|
|
2019-07-31 12:25:30 -07:00
|
|
|
def build_resources(
|
2020-04-19 20:35:49 -07:00
|
|
|
translation_cache: Dict[str, Dict[str, Any]], components: Set[str], category: str,
|
2019-07-31 12:25:30 -07:00
|
|
|
) -> Dict[str, Dict[str, Any]]:
|
2018-02-28 22:31:38 -05:00
|
|
|
"""Build the resources response for the given components."""
|
|
|
|
# Build response
|
2019-09-04 05:36:04 +02:00
|
|
|
resources: Dict[str, Dict[str, Any]] = {}
|
2018-02-28 22:31:38 -05:00
|
|
|
for component in components:
|
2019-07-31 12:25:30 -07:00
|
|
|
if "." not in component:
|
2018-02-28 22:31:38 -05:00
|
|
|
domain = component
|
|
|
|
else:
|
2019-07-31 12:25:30 -07:00
|
|
|
domain = component.split(".", 1)[0]
|
2018-02-28 22:31:38 -05:00
|
|
|
|
2020-04-18 17:13:13 -07:00
|
|
|
domain_resources = resources.setdefault(domain, {})
|
2018-02-28 22:31:38 -05:00
|
|
|
|
2020-04-19 20:35:49 -07:00
|
|
|
# Integrations are able to provide translations for their entities under other
|
|
|
|
# integrations if they don't have an existing device class. This is done by
|
|
|
|
# using a custom device class prefixed with their domain and two underscores.
|
|
|
|
# These files are in platform specific files in the integration folder with
|
|
|
|
# names like `strings.sensor.json`.
|
|
|
|
# We are going to merge the translations for the custom device classes into
|
|
|
|
# the translations of sensor.
|
2018-02-28 22:31:38 -05:00
|
|
|
|
2020-04-19 12:37:44 -07:00
|
|
|
new_value = translation_cache[component].get(category)
|
|
|
|
|
|
|
|
if new_value is None:
|
2020-04-18 17:13:13 -07:00
|
|
|
continue
|
2018-02-28 22:31:38 -05:00
|
|
|
|
2020-04-19 20:35:49 -07:00
|
|
|
cur_value = domain_resources.get(category)
|
|
|
|
|
|
|
|
# If not exists, set value.
|
|
|
|
if cur_value is None:
|
2020-04-19 12:37:44 -07:00
|
|
|
domain_resources[category] = new_value
|
2020-04-16 08:38:54 -07:00
|
|
|
|
2020-04-19 20:35:49 -07:00
|
|
|
# If exists, and a list, append
|
|
|
|
elif isinstance(cur_value, list):
|
|
|
|
cur_value.append(new_value)
|
|
|
|
|
|
|
|
# If exists, and a dict make it a list with 2 entries.
|
|
|
|
else:
|
|
|
|
domain_resources[category] = [cur_value, new_value]
|
|
|
|
|
|
|
|
# Merge all the lists
|
|
|
|
for domain, domain_resources in list(resources.items()):
|
|
|
|
if not isinstance(domain_resources.get(category), list):
|
|
|
|
continue
|
|
|
|
|
|
|
|
merged = {}
|
|
|
|
for entry in domain_resources[category]:
|
|
|
|
if isinstance(entry, dict):
|
|
|
|
merged.update(entry)
|
|
|
|
else:
|
|
|
|
_LOGGER.error(
|
|
|
|
"An integration providing translations for %s provided invalid data: %s",
|
|
|
|
domain,
|
|
|
|
entry,
|
|
|
|
)
|
|
|
|
domain_resources[category] = merged
|
|
|
|
|
2020-04-18 17:13:13 -07:00
|
|
|
return {"component": resources}
|
2020-04-16 08:38:54 -07:00
|
|
|
|
2020-04-18 17:13:13 -07:00
|
|
|
|
|
|
|
async def async_get_component_cache(
|
|
|
|
hass: HomeAssistantType, language: str, components: Set[str]
|
|
|
|
) -> Dict[str, Any]:
|
|
|
|
"""Return translation cache that includes all specified components."""
|
|
|
|
# Get cache for this language
|
|
|
|
cache: Dict[str, Dict[str, Any]] = hass.data.setdefault(
|
|
|
|
TRANSLATION_STRING_CACHE, {}
|
|
|
|
)
|
|
|
|
translation_cache: Dict[str, Any] = cache.setdefault(language, {})
|
2018-02-28 22:31:38 -05:00
|
|
|
|
2020-04-16 08:38:54 -07:00
|
|
|
# Calculate the missing components and platforms
|
|
|
|
missing_loaded = components - set(translation_cache)
|
|
|
|
|
2020-04-18 17:13:13 -07:00
|
|
|
if not missing_loaded:
|
|
|
|
return translation_cache
|
|
|
|
|
|
|
|
missing_domains = list({loaded.split(".")[-1] for loaded in missing_loaded})
|
2020-04-16 08:38:54 -07:00
|
|
|
missing_integrations = dict(
|
|
|
|
zip(
|
|
|
|
missing_domains,
|
|
|
|
await asyncio.gather(
|
|
|
|
*[async_get_integration(hass, domain) for domain in missing_domains]
|
|
|
|
),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Determine paths of missing components/platforms
|
2018-02-28 22:31:38 -05:00
|
|
|
missing_files = {}
|
2020-04-16 08:38:54 -07:00
|
|
|
for loaded in missing_loaded:
|
|
|
|
parts = loaded.split(".")
|
|
|
|
domain = parts[-1]
|
|
|
|
integration = missing_integrations[domain]
|
|
|
|
|
2020-04-18 17:13:13 -07:00
|
|
|
path = component_translation_path(loaded, language, integration)
|
2019-04-12 17:10:19 -07:00
|
|
|
# No translation available
|
|
|
|
if path is None:
|
2020-04-16 08:38:54 -07:00
|
|
|
translation_cache[loaded] = {}
|
2019-04-12 17:10:19 -07:00
|
|
|
else:
|
2020-04-16 08:38:54 -07:00
|
|
|
missing_files[loaded] = path
|
2018-02-28 22:31:38 -05:00
|
|
|
|
|
|
|
# Load missing files
|
|
|
|
if missing_files:
|
2018-10-28 21:12:52 +02:00
|
|
|
load_translations_job = hass.async_add_job(
|
2019-07-31 12:25:30 -07:00
|
|
|
load_translations_files, missing_files
|
|
|
|
)
|
2018-10-28 21:12:52 +02:00
|
|
|
assert load_translations_job is not None
|
|
|
|
loaded_translations = await load_translations_job
|
2018-02-28 22:31:38 -05:00
|
|
|
|
2020-04-16 08:38:54 -07:00
|
|
|
# Translations that miss "title" will get integration put in.
|
|
|
|
for loaded, translations in loaded_translations.items():
|
|
|
|
if "." in loaded:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if "title" not in translations:
|
|
|
|
translations["title"] = missing_integrations[loaded].name
|
|
|
|
|
2018-02-28 22:31:38 -05:00
|
|
|
# Update cache
|
2018-10-28 21:12:52 +02:00
|
|
|
translation_cache.update(loaded_translations)
|
2018-02-28 22:31:38 -05:00
|
|
|
|
2020-04-18 17:13:13 -07:00
|
|
|
return translation_cache
|
2018-02-28 22:31:38 -05:00
|
|
|
|
|
|
|
|
|
|
|
@bind_hass
|
2019-07-31 12:25:30 -07:00
|
|
|
async def async_get_translations(
|
2020-04-18 17:13:13 -07:00
|
|
|
hass: HomeAssistantType,
|
|
|
|
language: str,
|
2020-04-19 20:35:49 -07:00
|
|
|
category: str,
|
2020-04-18 17:13:13 -07:00
|
|
|
integration: Optional[str] = None,
|
|
|
|
config_flow: Optional[bool] = None,
|
2019-07-31 12:25:30 -07:00
|
|
|
) -> Dict[str, Any]:
|
2020-04-18 17:13:13 -07:00
|
|
|
"""Return all backend translations.
|
|
|
|
|
|
|
|
If integration specified, load it for that one.
|
|
|
|
Otherwise default to loaded intgrations combined with config flow
|
|
|
|
integrations if config_flow is true.
|
|
|
|
"""
|
|
|
|
if integration is not None:
|
|
|
|
components = {integration}
|
|
|
|
elif config_flow:
|
|
|
|
components = hass.config.components | await async_get_config_flows(hass)
|
|
|
|
else:
|
|
|
|
components = set(hass.config.components)
|
|
|
|
|
|
|
|
lock = hass.data.get(TRANSLATION_LOAD_LOCK)
|
|
|
|
if lock is None:
|
|
|
|
lock = hass.data[TRANSLATION_LOAD_LOCK] = asyncio.Lock()
|
|
|
|
|
|
|
|
tasks = [async_get_component_cache(hass, language, components)]
|
|
|
|
|
|
|
|
# Fetch the English resources, as a fallback for missing keys
|
|
|
|
if language != "en":
|
|
|
|
tasks.append(async_get_component_cache(hass, "en", components))
|
|
|
|
|
|
|
|
async with lock:
|
|
|
|
results = await asyncio.gather(*tasks)
|
|
|
|
|
|
|
|
resources = flatten(build_resources(results[0], components, category))
|
|
|
|
|
2019-07-31 12:25:30 -07:00
|
|
|
if language != "en":
|
2020-04-18 17:13:13 -07:00
|
|
|
base_resources = flatten(build_resources(results[1], components, category))
|
2018-02-28 22:31:38 -05:00
|
|
|
resources = {**base_resources, **resources}
|
|
|
|
|
|
|
|
return resources
|