Add foundation for state translations (#34443)

This commit is contained in:
Paulus Schoutsen 2020-04-19 20:35:49 -07:00 committed by GitHub
parent 75e5f085d3
commit 4720a7a891
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 225 additions and 89 deletions

View file

@ -1,6 +0,0 @@
{
"state": {
"day": "Day",
"night": "Night"
}
}

View file

@ -1,12 +1,14 @@
{
"state": {
"first_quarter": "First quarter",
"full_moon": "Full moon",
"last_quarter": "Last quarter",
"new_moon": "New moon",
"waning_crescent": "Waning crescent",
"waning_gibbous": "Waning gibbous",
"waxing_crescent": "Waxing crescent",
"waxing_gibbous": "Waxing gibbous"
"moon__phase": {
"first_quarter": "First quarter",
"full_moon": "Full moon",
"last_quarter": "Last quarter",
"new_moon": "New moon",
"waning_crescent": "Waning crescent",
"waning_gibbous": "Waning gibbous",
"waxing_crescent": "Waxing crescent",
"waxing_gibbous": "Waxing gibbous"
}
}
}

View file

@ -56,9 +56,14 @@ class MoonSensor(Entity):
@property
def name(self):
"""Return the name of the device."""
"""Return the name of the entity."""
return self._name
@property
def device_class(self):
"""Return the device class of the entity."""
return "moon__phase"
@property
def state(self):
"""Return the state of the device."""

View file

@ -1,12 +1,14 @@
{
"state": {
"new_moon": "New moon",
"waxing_crescent": "Waxing crescent",
"first_quarter": "First quarter",
"waxing_gibbous": "Waxing gibbous",
"full_moon": "Full moon",
"waning_gibbous": "Waning gibbous",
"last_quarter": "Last quarter",
"waning_crescent": "Waning crescent"
"moon__phase": {
"new_moon": "New moon",
"waxing_crescent": "Waxing crescent",
"first_quarter": "First quarter",
"waxing_gibbous": "Waxing gibbous",
"full_moon": "Full moon",
"waning_gibbous": "Waning gibbous",
"last_quarter": "Last quarter",
"waning_crescent": "Waning crescent"
}
}
}

View file

@ -117,7 +117,7 @@ class UserOnboardingView(_BaseOnboardingView):
# Create default areas using the users supplied language.
translations = await hass.helpers.translation.async_get_translations(
data["language"], integration=DOMAIN
data["language"], "area", DOMAIN
)
area_registry = await hass.helpers.area_registry.async_get_registry()

View file

@ -1,8 +1,10 @@
{
"state": {
"autumn": "Autumn",
"spring": "Spring",
"summer": "Summer",
"winter": "Winter"
"season__season": {
"autumn": "Autumn",
"spring": "Spring",
"summer": "Summer",
"winter": "Winter"
}
}
}

View file

@ -131,6 +131,11 @@ class Season(Entity):
"""Return the current season."""
return self.season
@property
def device_class(self):
"""Return the device class."""
return "season__season"
@property
def icon(self):
"""Icon to use in the frontend, if any."""

View file

@ -1,8 +1,10 @@
{
"state": {
"spring": "Spring",
"summer": "Summer",
"autumn": "Autumn",
"winter": "Winter"
"season__season": {
"spring": "Spring",
"summer": "Summer",
"autumn": "Autumn",
"winter": "Winter"
}
}
}

View file

@ -81,9 +81,7 @@ def load_translations_files(
def build_resources(
translation_cache: Dict[str, Dict[str, Any]],
components: Set[str],
category: Optional[str],
translation_cache: Dict[str, Dict[str, Any]], components: Set[str], category: str,
) -> Dict[str, Dict[str, Any]]:
"""Build the resources response for the given components."""
# Build response
@ -96,24 +94,50 @@ def build_resources(
domain_resources = resources.setdefault(domain, {})
# Add the translations for this component to the domain resources.
# Since clients cannot determine which platform an entity belongs to,
# all translations for a domain will be returned together.
if category is None:
domain_resources.update(translation_cache[component])
continue
# Integrations are able to provide translations for their entities under other
# integrations if they don't have an existing device class. This is done by
# using a custom device class prefixed with their domain and two underscores.
# These files are in platform specific files in the integration folder with
# names like `strings.sensor.json`.
# We are going to merge the translations for the custom device classes into
# the translations of sensor.
new_value = translation_cache[component].get(category)
if new_value is None:
continue
if isinstance(new_value, dict):
domain_resources.setdefault(category, {}).update(new_value)
else:
cur_value = domain_resources.get(category)
# If not exists, set value.
if cur_value is None:
domain_resources[category] = new_value
# If exists, and a list, append
elif isinstance(cur_value, list):
cur_value.append(new_value)
# If exists, and a dict make it a list with 2 entries.
else:
domain_resources[category] = [cur_value, new_value]
# Merge all the lists
for domain, domain_resources in list(resources.items()):
if not isinstance(domain_resources.get(category), list):
continue
merged = {}
for entry in domain_resources[category]:
if isinstance(entry, dict):
merged.update(entry)
else:
_LOGGER.error(
"An integration providing translations for %s provided invalid data: %s",
domain,
entry,
)
domain_resources[category] = merged
return {"component": resources}
@ -183,7 +207,7 @@ async def async_get_component_cache(
async def async_get_translations(
hass: HomeAssistantType,
language: str,
category: Optional[str] = None,
category: str,
integration: Optional[str] = None,
config_flow: Optional[bool] = None,
) -> Dict[str, Any]:

View file

@ -7,6 +7,9 @@ from typing import Dict
import voluptuous as vol
from voluptuous.humanize import humanize_error
import homeassistant.helpers.config_validation as cv
from homeassistant.util import slugify
from .model import Config, Integration
_LOGGER = logging.getLogger(__name__)
@ -88,7 +91,9 @@ def gen_strings_schema(config: Config, integration: Integration):
vol.Optional("trigger_type"): {str: str},
vol.Optional("trigger_subtype"): {str: str},
},
vol.Optional("state"): {str: str},
vol.Optional("state"): cv.schema_with_slug_keys(
cv.schema_with_slug_keys(str)
),
}
)
@ -109,6 +114,33 @@ def gen_auth_schema(config: Config, integration: Integration):
)
def gen_platform_strings_schema(config: Config, integration: Integration):
"""Generate platform strings schema like strings.sensor.json."""
def device_class_validator(value):
"""Key validator."""
if not value.startswith(f"{integration.domain}__"):
raise vol.Invalid(
f"Device class need to start with '{integration.domain}__'. Key {value} is invalid"
)
slug_friendly = value.replace("__", "_", 1)
slugged = slugify(slug_friendly)
if slug_friendly != slugged:
raise vol.Invalid(f"invalid device class {value}")
return value
return vol.Schema(
{
vol.Optional("state"): cv.schema_with_slug_keys(
cv.schema_with_slug_keys(str), slug_validator=device_class_validator
)
}
)
ONBOARDING_SCHEMA = vol.Schema({vol.Required("area"): {str: str}})
@ -116,24 +148,35 @@ def validate_translation_file(config: Config, integration: Integration):
"""Validate translation files for integration."""
strings_file = integration.path / "strings.json"
if not strings_file.is_file():
return
if strings_file.is_file():
strings = json.loads(strings_file.read_text())
strings = json.loads(strings_file.read_text())
if integration.domain == "auth":
schema = gen_auth_schema(config, integration)
elif integration.domain == "onboarding":
schema = ONBOARDING_SCHEMA
else:
schema = gen_strings_schema(config, integration)
if integration.domain == "auth":
schema = gen_auth_schema(config, integration)
elif integration.domain == "onboarding":
schema = ONBOARDING_SCHEMA
else:
schema = gen_strings_schema(config, integration)
try:
schema(strings)
except vol.Invalid as err:
integration.add_error(
"translations", f"Invalid strings.json: {humanize_error(strings, err)}"
)
try:
schema(strings)
except vol.Invalid as err:
integration.add_error(
"translations", f"Invalid strings.json: {humanize_error(strings, err)}"
)
for path in integration.path.glob("strings.*.json"):
strings = json.loads(path.read_text())
schema = gen_platform_strings_schema(config, integration)
try:
schema(strings)
except vol.Invalid as err:
msg = f"Invalid {path.name}: {humanize_error(strings, err)}"
if config.specific_integrations:
integration.add_warning("translations", msg)
else:
integration.add_error("translations", msg)
def validate(integrations: Dict[str, Integration], config: Config):

View file

@ -3,11 +3,10 @@ from pprint import pprint
import requests
from .const import CORE_PROJECT_ID
from .util import get_lokalise_token
def get_api(project_id=CORE_PROJECT_ID, debug=False) -> "Lokalise":
def get_api(project_id, debug=False) -> "Lokalise":
"""Get Lokalise API."""
return Lokalise(project_id, get_lokalise_token(), debug)

View file

@ -11,29 +11,38 @@ def create_lookup(results):
return {key["key_name"]["web"]: key for key in results}
def rename_keys(to_migrate):
def rename_keys(project_id, to_migrate):
"""Rename keys.
to_migrate is Dict[from_key] = to_key.
"""
updates = []
lokalise = get_api()
lokalise = get_api(project_id)
from_key_data = lokalise.keys_list({"filter_keys": ",".join(to_migrate)})
if len(from_key_data) != len(to_migrate):
print(
f"Lookin up keys in Lokalise returns {len(from_key_data)} results, expected {len(to_migrate)}"
)
return
from_key_lookup = create_lookup(from_key_data)
print("Gathering IDs")
for from_key, to_key in to_migrate.items():
key_data = lokalise.keys_list({"filter_keys": from_key})
if len(key_data) != 1:
print(
f"Lookin up {from_key} key in Lokalise returns {len(key_data)} results, expected 1"
)
continue
updates.append({"key_id": key_data[0]["key_id"], "key_name": to_key})
updates.append(
{"key_id": from_key_lookup[from_key]["key_id"], "key_name": to_key}
)
pprint(updates)
print()
while input("Type YES to confirm: ") != "YES":
pass
return
print()
print("Updating keys")
pprint(lokalise.keys_bulk_update(updates).json())
@ -123,7 +132,7 @@ def find_and_rename_keys():
to_key = f"component::{integration.name}::title"
to_migrate[from_key] = to_key
rename_keys(to_migrate)
rename_keys(CORE_PROJECT_ID, to_migrate)
def find_different_languages():
@ -163,6 +172,22 @@ def interactive_update():
def run():
"""Migrate translations."""
interactive_update()
rename_keys(
CORE_PROJECT_ID,
{
"component::moon::platform::sensor::state::new_moon": "component::moon::platform::sensor::state::moon__phase::new_moon",
"component::moon::platform::sensor::state::waxing_crescent": "component::moon::platform::sensor::state::moon__phase::waxing_crescent",
"component::moon::platform::sensor::state::first_quarter": "component::moon::platform::sensor::state::moon__phase::first_quarter",
"component::moon::platform::sensor::state::waxing_gibbous": "component::moon::platform::sensor::state::moon__phase::waxing_gibbous",
"component::moon::platform::sensor::state::full_moon": "component::moon::platform::sensor::state::moon__phase::full_moon",
"component::moon::platform::sensor::state::waning_gibbous": "component::moon::platform::sensor::state::moon__phase::waning_gibbous",
"component::moon::platform::sensor::state::last_quarter": "component::moon::platform::sensor::state::moon__phase::last_quarter",
"component::moon::platform::sensor::state::waning_crescent": "component::moon::platform::sensor::state::moon__phase::waning_crescent",
"component::season::platform::sensor::state::spring": "component::season::platform::sensor::state::season__season__::spring",
"component::season::platform::sensor::state::summer": "component::season::platform::sensor::state::season__season__::summer",
"component::season::platform::sensor::state::autumn": "component::season::platform::sensor::state::season__season__::autumn",
"component::season::platform::sensor::state::winter": "component::season::platform::sensor::state::season__season__::winter",
},
)
return 0

View file

@ -11,8 +11,6 @@ import homeassistant.helpers.translation as translation
from homeassistant.loader import async_get_integration
from homeassistant.setup import async_setup_component
from tests.common import mock_coro
@pytest.fixture
def mock_config_flows():
@ -111,14 +109,13 @@ def test_load_translations_files(hass):
async def test_get_translations(hass, mock_config_flows):
"""Test the get translations helper."""
translations = await translation.async_get_translations(hass, "en")
translations = await translation.async_get_translations(hass, "en", "state")
assert translations == {}
assert await async_setup_component(hass, "switch", {"switch": {"platform": "test"}})
translations = await translation.async_get_translations(hass, "en")
translations = await translation.async_get_translations(hass, "en", "state")
assert translations["component.switch.something"] == "else"
assert translations["component.switch.state.string1"] == "Value 1"
assert translations["component.switch.state.string2"] == "Value 2"
@ -128,12 +125,14 @@ async def test_get_translations(hass, mock_config_flows):
assert translations["component.switch.state.string2"] == "German Value 2"
# Test a partial translation
translations = await translation.async_get_translations(hass, "es")
translations = await translation.async_get_translations(hass, "es", "state")
assert translations["component.switch.state.string1"] == "Spanish Value 1"
assert translations["component.switch.state.string2"] == "Value 2"
# Test that an untranslated language falls back to English.
translations = await translation.async_get_translations(hass, "invalid-language")
translations = await translation.async_get_translations(
hass, "invalid-language", "state"
)
assert translations["component.switch.state.string1"] == "Value 1"
assert translations["component.switch.state.string2"] == "Value 2"
@ -145,7 +144,7 @@ async def test_get_translations_loads_config_flows(hass, mock_config_flows):
integration.name = "Component 1"
with patch.object(
translation, "component_translation_path", return_value=mock_coro("bla.json")
translation, "component_translation_path", return_value="bla.json"
), patch.object(
translation,
"load_translations_files",
@ -155,11 +154,10 @@ async def test_get_translations_loads_config_flows(hass, mock_config_flows):
return_value=integration,
):
translations = await translation.async_get_translations(
hass, "en", config_flow=True
hass, "en", "hello", config_flow=True
)
assert translations == {
"component.component1.title": "Component 1",
"component.component1.hello": "world",
}
@ -179,17 +177,16 @@ async def test_get_translations_while_loading_components(hass):
return {"component1": {"hello": "world"}}
with patch.object(
translation, "component_translation_path", return_value=mock_coro("bla.json")
translation, "component_translation_path", return_value="bla.json"
), patch.object(
translation, "load_translations_files", side_effect=mock_load_translation_files,
), patch(
"homeassistant.helpers.translation.async_get_integration",
return_value=integration,
):
translations = await translation.async_get_translations(hass, "en")
translations = await translation.async_get_translations(hass, "en", "hello")
assert translations == {
"component.component1.title": "Component 1",
"component.component1.hello": "world",
}
@ -206,3 +203,39 @@ async def test_get_translation_categories(hass):
hass, "en", "device_automation", None, True
)
assert "component.light.device_automation.action_type.turn_on" in translations
async def test_translation_merging(hass, caplog):
"""Test we merge translations of two integrations."""
hass.config.components.add("sensor.moon")
hass.config.components.add("sensor.season")
hass.config.components.add("sensor")
translations = await translation.async_get_translations(hass, "en", "state")
assert "component.sensor.state.moon__phase.first_quarter" in translations
assert "component.sensor.state.season__season.summer" in translations
# Merge in some bad translation data
integration = Mock(file_path=pathlib.Path(__file__))
hass.config.components.add("sensor.bad_translations")
with patch.object(
translation, "component_translation_path", return_value="bla.json"
), patch.object(
translation,
"load_translations_files",
return_value={"sensor.bad_translations": {"state": "bad data"}},
), patch(
"homeassistant.helpers.translation.async_get_integration",
return_value=integration,
):
translations = await translation.async_get_translations(hass, "en", "state")
assert "component.sensor.state.moon__phase.first_quarter" in translations
assert "component.sensor.state.season__season.summer" in translations
assert (
"An integration providing translations for sensor provided invalid data: bad data"
in caplog.text
)