Migrate translations_develop script (#33933)
* Migrate translations_develop script * Fix lint
This commit is contained in:
parent
294a2d2460
commit
8e6e8dfbe0
6 changed files with 76 additions and 68 deletions
|
@ -84,7 +84,15 @@ def main():
|
||||||
|
|
||||||
print("Running script/translations_develop to pick up new translation strings.")
|
print("Running script/translations_develop to pick up new translation strings.")
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
["script/translations_develop", "--integration", info.domain], **pipe_null
|
[
|
||||||
|
"python",
|
||||||
|
"-m",
|
||||||
|
"script.translations",
|
||||||
|
"develop",
|
||||||
|
"--integration",
|
||||||
|
info.domain,
|
||||||
|
],
|
||||||
|
**pipe_null,
|
||||||
)
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,15 @@
|
||||||
"""Validate manifests."""
|
"""Validate manifests."""
|
||||||
import argparse
|
import argparse
|
||||||
|
import importlib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from . import clean, download, error, upload
|
from . import error, util
|
||||||
|
|
||||||
|
|
||||||
def get_arguments() -> argparse.Namespace:
|
def get_arguments() -> argparse.Namespace:
|
||||||
"""Get parsed passed in arguments."""
|
"""Get parsed passed in arguments."""
|
||||||
parser = argparse.ArgumentParser(description="Home Assistant Scaffolder")
|
return util.get_base_arg_parser().parse_known_args()[0]
|
||||||
parser.add_argument("action", type=str, choices=["download", "clean", "upload"])
|
|
||||||
parser.add_argument("--debug", action="store_true", help="Enable log output")
|
|
||||||
|
|
||||||
arguments = parser.parse_args()
|
|
||||||
|
|
||||||
return arguments
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -25,12 +20,8 @@ def main():
|
||||||
|
|
||||||
args = get_arguments()
|
args = get_arguments()
|
||||||
|
|
||||||
if args.action == "download":
|
module = importlib.import_module(f".{args.action}", "script.translations")
|
||||||
download.run(args)
|
module.run()
|
||||||
elif args.action == "upload":
|
|
||||||
upload.run(args)
|
|
||||||
elif args.action == "clean":
|
|
||||||
clean.run()
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
@ -42,3 +33,7 @@ if __name__ == "__main__":
|
||||||
print()
|
print()
|
||||||
print(f"Fatal Error: {err.reason}")
|
print(f"Fatal Error: {err.reason}")
|
||||||
sys.exit(err.exit_code)
|
sys.exit(err.exit_code)
|
||||||
|
except (KeyboardInterrupt, EOFError):
|
||||||
|
print()
|
||||||
|
print("Aborted!")
|
||||||
|
sys.exit(2)
|
||||||
|
|
56
script/translations_develop → script/translations/develop.py
Executable file → Normal file
56
script/translations_develop → script/translations/develop.py
Executable file → Normal file
|
@ -1,19 +1,18 @@
|
||||||
#!/usr/bin/env python
|
"""Compile the current translation strings files for testing."""
|
||||||
|
|
||||||
# Compile the current translation strings files for testing
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import os
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from . import download, upload
|
||||||
|
from .const import INTEGRATIONS_DIR
|
||||||
|
from .util import get_base_arg_parser
|
||||||
|
|
||||||
|
|
||||||
def valid_integration(integration):
|
def valid_integration(integration):
|
||||||
"""Test if it's a valid integration."""
|
"""Test if it's a valid integration."""
|
||||||
if not Path(f"homeassistant/components/{integration}").exists():
|
if not (INTEGRATIONS_DIR / integration).is_dir():
|
||||||
raise argparse.ArgumentTypeError(
|
raise argparse.ArgumentTypeError(
|
||||||
f"The integration {integration} does not exist."
|
f"The integration {integration} does not exist."
|
||||||
)
|
)
|
||||||
|
@ -23,22 +22,15 @@ def valid_integration(integration):
|
||||||
|
|
||||||
def get_arguments() -> argparse.Namespace:
|
def get_arguments() -> argparse.Namespace:
|
||||||
"""Get parsed passed in arguments."""
|
"""Get parsed passed in arguments."""
|
||||||
parser = argparse.ArgumentParser(description="Develop Translations")
|
parser = get_base_arg_parser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--integration", type=valid_integration, help="Integration to process."
|
"--integration", type=valid_integration, help="Integration to process."
|
||||||
)
|
)
|
||||||
|
return parser.parse_args()
|
||||||
arguments = parser.parse_args()
|
|
||||||
|
|
||||||
return arguments
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def run():
|
||||||
"""Run the script."""
|
"""Run the script."""
|
||||||
if not os.path.isfile("requirements_all.txt"):
|
|
||||||
print("Run this from HA root dir")
|
|
||||||
return
|
|
||||||
|
|
||||||
args = get_arguments()
|
args = get_arguments()
|
||||||
if args.integration:
|
if args.integration:
|
||||||
integration = args.integration
|
integration = args.integration
|
||||||
|
@ -53,29 +45,19 @@ def main():
|
||||||
print()
|
print()
|
||||||
integration = input("Integration to process: ")
|
integration = input("Integration to process: ")
|
||||||
|
|
||||||
download_dir = Path("build/translations-download")
|
translations = upload.generate_upload_data()
|
||||||
|
|
||||||
if download_dir.is_dir():
|
if integration not in translations["component"]:
|
||||||
rmtree(str(download_dir))
|
|
||||||
|
|
||||||
download_dir.mkdir(parents=True)
|
|
||||||
|
|
||||||
subprocess.run("script/translations_upload_merge.py")
|
|
||||||
|
|
||||||
raw_data = json.loads(Path("build/translations-upload.json").read_text())
|
|
||||||
|
|
||||||
if integration not in raw_data["component"]:
|
|
||||||
print("Integration has no strings.json")
|
print("Integration has no strings.json")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
Path("build/translations-download/en.json").write_text(
|
if download.DOWNLOAD_DIR.is_dir():
|
||||||
json.dumps({"component": {integration: raw_data["component"][integration]}})
|
rmtree(str(download.DOWNLOAD_DIR))
|
||||||
|
|
||||||
|
download.DOWNLOAD_DIR.mkdir(parents=True)
|
||||||
|
|
||||||
|
(download.DOWNLOAD_DIR / "en.json").write_text(
|
||||||
|
json.dumps({"component": {integration: translations["component"][integration]}})
|
||||||
)
|
)
|
||||||
|
|
||||||
subprocess.run(
|
download.write_integration_translations()
|
||||||
["script/translations_download_split.py", "--integration", "{integration}"]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -13,7 +13,7 @@ from .error import ExitApp
|
||||||
from .util import get_lokalise_token
|
from .util import get_lokalise_token
|
||||||
|
|
||||||
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
||||||
LOCAL_DIR = pathlib.Path("build/translations-download").absolute()
|
DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute()
|
||||||
|
|
||||||
|
|
||||||
def run_download_docker():
|
def run_download_docker():
|
||||||
|
@ -24,7 +24,7 @@ def run_download_docker():
|
||||||
"docker",
|
"docker",
|
||||||
"run",
|
"run",
|
||||||
"-v",
|
"-v",
|
||||||
f"{LOCAL_DIR}:/opt/dest/locale",
|
f"{DOWNLOAD_DIR}:/opt/dest/locale",
|
||||||
"--rm",
|
"--rm",
|
||||||
f"lokalise/lokalise-cli@sha256:{DOCKER_IMAGE}",
|
f"lokalise/lokalise-cli@sha256:{DOCKER_IMAGE}",
|
||||||
# Lokalise command
|
# Lokalise command
|
||||||
|
@ -133,14 +133,19 @@ def save_language_translations(lang, translations):
|
||||||
save_json(path, platform_translations)
|
save_json(path, platform_translations)
|
||||||
|
|
||||||
|
|
||||||
def run(args):
|
def write_integration_translations():
|
||||||
"""Run the script."""
|
"""Write integration translations."""
|
||||||
LOCAL_DIR.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
run_download_docker()
|
|
||||||
|
|
||||||
paths = glob.iglob("build/translations-download/*.json")
|
paths = glob.iglob("build/translations-download/*.json")
|
||||||
for path in paths:
|
for path in paths:
|
||||||
lang = get_language(path)
|
lang = get_language(path)
|
||||||
translations = load_json(path)
|
translations = load_json(path)
|
||||||
save_language_translations(lang, translations)
|
save_language_translations(lang, translations)
|
||||||
|
|
||||||
|
|
||||||
|
def run():
|
||||||
|
"""Run the script."""
|
||||||
|
DOWNLOAD_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
run_download_docker()
|
||||||
|
|
||||||
|
write_integration_translations()
|
||||||
|
|
|
@ -49,13 +49,8 @@ def run_upload_docker():
|
||||||
raise ExitApp("Failed to download translations")
|
raise ExitApp("Failed to download translations")
|
||||||
|
|
||||||
|
|
||||||
def run(args):
|
def generate_upload_data():
|
||||||
"""Run the script."""
|
"""Generate the data for uploading."""
|
||||||
if get_current_branch() != "dev" and os.environ.get("AZURE_BRANCH") != "dev":
|
|
||||||
raise ExitApp(
|
|
||||||
"Please only run the translations upload script from a clean checkout of dev."
|
|
||||||
)
|
|
||||||
|
|
||||||
translations = {"component": {}}
|
translations = {"component": {}}
|
||||||
|
|
||||||
for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"):
|
for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"):
|
||||||
|
@ -71,7 +66,19 @@ def run(args):
|
||||||
|
|
||||||
parent.update(json.loads(path.read_text()))
|
parent.update(json.loads(path.read_text()))
|
||||||
|
|
||||||
|
return translations
|
||||||
|
|
||||||
|
|
||||||
|
def run():
|
||||||
|
"""Run the script."""
|
||||||
|
if get_current_branch() != "dev" and os.environ.get("AZURE_BRANCH") != "dev":
|
||||||
|
raise ExitApp(
|
||||||
|
"Please only run the translations upload script from a clean checkout of dev."
|
||||||
|
)
|
||||||
|
|
||||||
|
translations = generate_upload_data()
|
||||||
|
|
||||||
LOCAL_FILE.parent.mkdir(parents=True, exist_ok=True)
|
LOCAL_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||||
LOCAL_FILE.write_text(json.dumps(translations, indent=4, sort_keys=True))
|
LOCAL_FILE.write_text(json.dumps(translations, indent=4, sort_keys=True))
|
||||||
|
|
||||||
# run_upload_docker()
|
run_upload_docker()
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
"""Translation utils."""
|
"""Translation utils."""
|
||||||
|
import argparse
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@ -6,6 +7,16 @@ import subprocess
|
||||||
from .error import ExitApp
|
from .error import ExitApp
|
||||||
|
|
||||||
|
|
||||||
|
def get_base_arg_parser():
|
||||||
|
"""Get a base argument parser."""
|
||||||
|
parser = argparse.ArgumentParser(description="Home Assistant Translations")
|
||||||
|
parser.add_argument(
|
||||||
|
"action", type=str, choices=["download", "clean", "upload", "develop"]
|
||||||
|
)
|
||||||
|
parser.add_argument("--debug", action="store_true", help="Enable log output")
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def get_lokalise_token():
|
def get_lokalise_token():
|
||||||
"""Get lokalise token."""
|
"""Get lokalise token."""
|
||||||
token = os.environ.get("LOKALISE_TOKEN")
|
token = os.environ.get("LOKALISE_TOKEN")
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue