Enable Ruff PTH for the script directory (#124441)
* Enable Ruff PTH for the script directory * Address review comments * Fix translations script * Update script/hassfest/config_flow.py Co-authored-by: Martin Hjelmare <marhje52@gmail.com> --------- Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
This commit is contained in:
parent
7752789c3a
commit
1db68327f9
18 changed files with 125 additions and 163 deletions
|
@ -734,6 +734,7 @@ select = [
|
||||||
"PIE", # flake8-pie
|
"PIE", # flake8-pie
|
||||||
"PL", # pylint
|
"PL", # pylint
|
||||||
"PT", # flake8-pytest-style
|
"PT", # flake8-pytest-style
|
||||||
|
"PTH", # flake8-pathlib
|
||||||
"PYI", # flake8-pyi
|
"PYI", # flake8-pyi
|
||||||
"RET", # flake8-return
|
"RET", # flake8-return
|
||||||
"RSE", # flake8-raise
|
"RSE", # flake8-raise
|
||||||
|
@ -905,5 +906,9 @@ split-on-trailing-comma = false
|
||||||
"homeassistant/scripts/*" = ["T201"]
|
"homeassistant/scripts/*" = ["T201"]
|
||||||
"script/*" = ["T20"]
|
"script/*" = ["T20"]
|
||||||
|
|
||||||
|
# Temporary
|
||||||
|
"homeassistant/**" = ["PTH"]
|
||||||
|
"tests/**" = ["PTH"]
|
||||||
|
|
||||||
[tool.ruff.lint.mccabe]
|
[tool.ruff.lint.mccabe]
|
||||||
max-complexity = 25
|
max-complexity = 25
|
||||||
|
|
|
@ -6,7 +6,6 @@ from __future__ import annotations
|
||||||
import difflib
|
import difflib
|
||||||
import importlib
|
import importlib
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
import os
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import pkgutil
|
import pkgutil
|
||||||
import re
|
import re
|
||||||
|
@ -82,8 +81,8 @@ URL_PIN = (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
CONSTRAINT_PATH = os.path.join(
|
CONSTRAINT_PATH = (
|
||||||
os.path.dirname(__file__), "../homeassistant/package_constraints.txt"
|
Path(__file__).parent.parent / "homeassistant" / "package_constraints.txt"
|
||||||
)
|
)
|
||||||
CONSTRAINT_BASE = """
|
CONSTRAINT_BASE = """
|
||||||
# Constrain pycryptodome to avoid vulnerability
|
# Constrain pycryptodome to avoid vulnerability
|
||||||
|
@ -256,8 +255,7 @@ def explore_module(package: str, explore_children: bool) -> list[str]:
|
||||||
|
|
||||||
def core_requirements() -> list[str]:
|
def core_requirements() -> list[str]:
|
||||||
"""Gather core requirements out of pyproject.toml."""
|
"""Gather core requirements out of pyproject.toml."""
|
||||||
with open("pyproject.toml", "rb") as fp:
|
data = tomllib.loads(Path("pyproject.toml").read_text())
|
||||||
data = tomllib.load(fp)
|
|
||||||
dependencies: list[str] = data["project"]["dependencies"]
|
dependencies: list[str] = data["project"]["dependencies"]
|
||||||
return dependencies
|
return dependencies
|
||||||
|
|
||||||
|
@ -528,7 +526,7 @@ def diff_file(filename: str, content: str) -> list[str]:
|
||||||
|
|
||||||
def main(validate: bool, ci: bool) -> int:
|
def main(validate: bool, ci: bool) -> int:
|
||||||
"""Run the script."""
|
"""Run the script."""
|
||||||
if not os.path.isfile("requirements_all.txt"):
|
if not Path("requirements_all.txt").is_file():
|
||||||
print("Run this from HA root dir")
|
print("Run this from HA root dir")
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
@ -590,7 +588,7 @@ def main(validate: bool, ci: bool) -> int:
|
||||||
def _get_hassfest_config() -> Config:
|
def _get_hassfest_config() -> Config:
|
||||||
"""Get hassfest config."""
|
"""Get hassfest config."""
|
||||||
return Config(
|
return Config(
|
||||||
root=Path(".").absolute(),
|
root=Path().absolute(),
|
||||||
specific_integrations=None,
|
specific_integrations=None,
|
||||||
action="validate",
|
action="validate",
|
||||||
requirements=True,
|
requirements=True,
|
||||||
|
|
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
from operator import attrgetter
|
from operator import attrgetter
|
||||||
import pathlib
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
from time import monotonic
|
from time import monotonic
|
||||||
|
|
||||||
|
@ -63,9 +63,9 @@ ALL_PLUGIN_NAMES = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def valid_integration_path(integration_path: pathlib.Path | str) -> pathlib.Path:
|
def valid_integration_path(integration_path: Path | str) -> Path:
|
||||||
"""Test if it's a valid integration."""
|
"""Test if it's a valid integration."""
|
||||||
path = pathlib.Path(integration_path)
|
path = Path(integration_path)
|
||||||
if not path.is_dir():
|
if not path.is_dir():
|
||||||
raise argparse.ArgumentTypeError(f"{integration_path} is not a directory.")
|
raise argparse.ArgumentTypeError(f"{integration_path} is not a directory.")
|
||||||
|
|
||||||
|
@ -109,8 +109,8 @@ def get_config() -> Config:
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--core-integrations-path",
|
"--core-integrations-path",
|
||||||
type=pathlib.Path,
|
type=Path,
|
||||||
default=pathlib.Path("homeassistant/components"),
|
default=Path("homeassistant/components"),
|
||||||
help="Path to core integrations",
|
help="Path to core integrations",
|
||||||
)
|
)
|
||||||
parsed = parser.parse_args()
|
parsed = parser.parse_args()
|
||||||
|
@ -123,14 +123,11 @@ def get_config() -> Config:
|
||||||
"Generate is not allowed when limiting to specific integrations"
|
"Generate is not allowed when limiting to specific integrations"
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
if not parsed.integration_path and not Path("requirements_all.txt").is_file():
|
||||||
not parsed.integration_path
|
|
||||||
and not pathlib.Path("requirements_all.txt").is_file()
|
|
||||||
):
|
|
||||||
raise RuntimeError("Run from Home Assistant root")
|
raise RuntimeError("Run from Home Assistant root")
|
||||||
|
|
||||||
return Config(
|
return Config(
|
||||||
root=pathlib.Path(".").absolute(),
|
root=Path().absolute(),
|
||||||
specific_integrations=parsed.integration_path,
|
specific_integrations=parsed.integration_path,
|
||||||
action=parsed.action,
|
action=parsed.action,
|
||||||
requirements=parsed.requirements,
|
requirements=parsed.requirements,
|
||||||
|
|
|
@ -34,19 +34,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
if config.specific_integrations:
|
if config.specific_integrations:
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(str(bluetooth_path)) as fp:
|
if bluetooth_path.read_text() != content:
|
||||||
current = fp.read()
|
config.add_error(
|
||||||
if current != content:
|
"bluetooth",
|
||||||
config.add_error(
|
"File bluetooth.py is not up to date. Run python3 -m script.hassfest",
|
||||||
"bluetooth",
|
fixable=True,
|
||||||
"File bluetooth.py is not up to date. Run python3 -m script.hassfest",
|
)
|
||||||
fixable=True,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
"""Generate bluetooth file."""
|
"""Generate bluetooth file."""
|
||||||
bluetooth_path = config.root / "homeassistant/generated/bluetooth.py"
|
bluetooth_path = config.root / "homeassistant/generated/bluetooth.py"
|
||||||
with open(str(bluetooth_path), "w") as fp:
|
bluetooth_path.write_text(f"{config.cache['bluetooth']}")
|
||||||
fp.write(f"{config.cache['bluetooth']}")
|
|
||||||
|
|
|
@ -98,18 +98,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
if config.specific_integrations:
|
if config.specific_integrations:
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(str(codeowners_path)) as fp:
|
if codeowners_path.read_text() != content + "\n":
|
||||||
if fp.read().strip() != content:
|
config.add_error(
|
||||||
config.add_error(
|
"codeowners",
|
||||||
"codeowners",
|
"File CODEOWNERS is not up to date. Run python3 -m script.hassfest",
|
||||||
"File CODEOWNERS is not up to date. Run python3 -m script.hassfest",
|
fixable=True,
|
||||||
fixable=True,
|
)
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
"""Generate CODEOWNERS."""
|
"""Generate CODEOWNERS."""
|
||||||
codeowners_path = config.root / "CODEOWNERS"
|
codeowners_path = config.root / "CODEOWNERS"
|
||||||
with open(str(codeowners_path), "w") as fp:
|
codeowners_path.write_text(f"{config.cache['codeowners']}\n")
|
||||||
fp.write(f"{config.cache['codeowners']}\n")
|
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import pathlib
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from .brand import validate as validate_brands
|
from .brand import validate as validate_brands
|
||||||
|
@ -216,36 +215,31 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
if config.specific_integrations:
|
if config.specific_integrations:
|
||||||
return
|
return
|
||||||
|
|
||||||
brands = Brand.load_dir(pathlib.Path(config.root / "homeassistant/brands"), config)
|
brands = Brand.load_dir(config.root / "homeassistant/brands", config)
|
||||||
validate_brands(brands, integrations, config)
|
validate_brands(brands, integrations, config)
|
||||||
|
|
||||||
with open(str(config_flow_path)) as fp:
|
if config_flow_path.read_text() != content:
|
||||||
if fp.read() != content:
|
config.add_error(
|
||||||
config.add_error(
|
"config_flow",
|
||||||
"config_flow",
|
"File config_flows.py is not up to date. Run python3 -m script.hassfest",
|
||||||
"File config_flows.py is not up to date. "
|
fixable=True,
|
||||||
"Run python3 -m script.hassfest",
|
)
|
||||||
fixable=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
config.cache["integrations"] = content = _generate_integrations(
|
config.cache["integrations"] = content = _generate_integrations(
|
||||||
brands, integrations, config
|
brands, integrations, config
|
||||||
)
|
)
|
||||||
with open(str(integrations_path)) as fp:
|
if integrations_path.read_text() != content + "\n":
|
||||||
if fp.read() != content + "\n":
|
config.add_error(
|
||||||
config.add_error(
|
"config_flow",
|
||||||
"config_flow",
|
"File integrations.json is not up to date. "
|
||||||
"File integrations.json is not up to date. "
|
"Run python3 -m script.hassfest",
|
||||||
"Run python3 -m script.hassfest",
|
fixable=True,
|
||||||
fixable=True,
|
)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
"""Generate config flow file."""
|
"""Generate config flow file."""
|
||||||
config_flow_path = config.root / "homeassistant/generated/config_flows.py"
|
config_flow_path = config.root / "homeassistant/generated/config_flows.py"
|
||||||
integrations_path = config.root / "homeassistant/generated/integrations.json"
|
integrations_path = config.root / "homeassistant/generated/integrations.json"
|
||||||
with open(str(config_flow_path), "w") as fp:
|
config_flow_path.write_text(f"{config.cache['config_flow']}")
|
||||||
fp.write(f"{config.cache['config_flow']}")
|
integrations_path.write_text(f"{config.cache['integrations']}\n")
|
||||||
with open(str(integrations_path), "w") as fp:
|
|
||||||
fp.write(f"{config.cache['integrations']}\n")
|
|
||||||
|
|
|
@ -32,19 +32,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
if config.specific_integrations:
|
if config.specific_integrations:
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(str(dhcp_path)) as fp:
|
if dhcp_path.read_text() != content:
|
||||||
current = fp.read()
|
config.add_error(
|
||||||
if current != content:
|
"dhcp",
|
||||||
config.add_error(
|
"File dhcp.py is not up to date. Run python3 -m script.hassfest",
|
||||||
"dhcp",
|
fixable=True,
|
||||||
"File dhcp.py is not up to date. Run python3 -m script.hassfest",
|
)
|
||||||
fixable=True,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
"""Generate dhcp file."""
|
"""Generate dhcp file."""
|
||||||
dhcp_path = config.root / "homeassistant/generated/dhcp.py"
|
dhcp_path = config.root / "homeassistant/generated/dhcp.py"
|
||||||
with open(str(dhcp_path), "w") as fp:
|
dhcp_path.write_text(f"{config.cache['dhcp']}")
|
||||||
fp.write(f"{config.cache['dhcp']}")
|
|
||||||
|
|
|
@ -103,9 +103,9 @@ LABEL "com.github.actions.color"="gray-dark"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def _get_package_versions(file: str, packages: set[str]) -> dict[str, str]:
|
def _get_package_versions(file: Path, packages: set[str]) -> dict[str, str]:
|
||||||
package_versions: dict[str, str] = {}
|
package_versions: dict[str, str] = {}
|
||||||
with open(file, encoding="UTF-8") as fp:
|
with file.open(encoding="UTF-8") as fp:
|
||||||
for _, line in enumerate(fp):
|
for _, line in enumerate(fp):
|
||||||
if package_versions.keys() == packages:
|
if package_versions.keys() == packages:
|
||||||
return package_versions
|
return package_versions
|
||||||
|
@ -173,10 +173,10 @@ def _generate_files(config: Config) -> list[File]:
|
||||||
) * 1000
|
) * 1000
|
||||||
|
|
||||||
package_versions = _get_package_versions(
|
package_versions = _get_package_versions(
|
||||||
"requirements_test.txt", {"pipdeptree", "tqdm", "uv"}
|
Path("requirements_test.txt"), {"pipdeptree", "tqdm", "uv"}
|
||||||
)
|
)
|
||||||
package_versions |= _get_package_versions(
|
package_versions |= _get_package_versions(
|
||||||
"requirements_test_pre_commit.txt", {"ruff"}
|
Path("requirements_test_pre_commit.txt"), {"ruff"}
|
||||||
)
|
)
|
||||||
|
|
||||||
return [
|
return [
|
||||||
|
|
|
@ -10,8 +10,7 @@ from .model import Config, Integration
|
||||||
def validate(integrations: dict[str, Integration], config: Config) -> None:
|
def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
"""Validate project metadata keys."""
|
"""Validate project metadata keys."""
|
||||||
metadata_path = config.root / "pyproject.toml"
|
metadata_path = config.root / "pyproject.toml"
|
||||||
with open(metadata_path, "rb") as fp:
|
data = tomllib.loads(metadata_path.read_text())
|
||||||
data = tomllib.load(fp)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if data["project"]["version"] != __version__:
|
if data["project"]["version"] != __version__:
|
||||||
|
|
|
@ -33,17 +33,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
if config.specific_integrations:
|
if config.specific_integrations:
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(str(mqtt_path)) as fp:
|
if mqtt_path.read_text() != content:
|
||||||
if fp.read() != content:
|
config.add_error(
|
||||||
config.add_error(
|
"mqtt",
|
||||||
"mqtt",
|
"File mqtt.py is not up to date. Run python3 -m script.hassfest",
|
||||||
"File mqtt.py is not up to date. Run python3 -m script.hassfest",
|
fixable=True,
|
||||||
fixable=True,
|
)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
"""Generate MQTT file."""
|
"""Generate MQTT file."""
|
||||||
mqtt_path = config.root / "homeassistant/generated/mqtt.py"
|
mqtt_path = config.root / "homeassistant/generated/mqtt.py"
|
||||||
with open(str(mqtt_path), "w") as fp:
|
mqtt_path.write_text(f"{config.cache['mqtt']}")
|
||||||
fp.write(f"{config.cache['mqtt']}")
|
|
||||||
|
|
|
@ -33,17 +33,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
if config.specific_integrations:
|
if config.specific_integrations:
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(str(ssdp_path)) as fp:
|
if ssdp_path.read_text() != content:
|
||||||
if fp.read() != content:
|
config.add_error(
|
||||||
config.add_error(
|
"ssdp",
|
||||||
"ssdp",
|
"File ssdp.py is not up to date. Run python3 -m script.hassfest",
|
||||||
"File ssdp.py is not up to date. Run python3 -m script.hassfest",
|
fixable=True,
|
||||||
fixable=True,
|
)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
"""Generate ssdp file."""
|
"""Generate ssdp file."""
|
||||||
ssdp_path = config.root / "homeassistant/generated/ssdp.py"
|
ssdp_path = config.root / "homeassistant/generated/ssdp.py"
|
||||||
with open(str(ssdp_path), "w") as fp:
|
ssdp_path.write_text(f"{config.cache['ssdp']}")
|
||||||
fp.write(f"{config.cache['ssdp']}")
|
|
||||||
|
|
|
@ -35,19 +35,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
if config.specific_integrations:
|
if config.specific_integrations:
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(str(usb_path)) as fp:
|
if usb_path.read_text() != content:
|
||||||
current = fp.read()
|
config.add_error(
|
||||||
if current != content:
|
"usb",
|
||||||
config.add_error(
|
"File usb.py is not up to date. Run python3 -m script.hassfest",
|
||||||
"usb",
|
fixable=True,
|
||||||
"File usb.py is not up to date. Run python3 -m script.hassfest",
|
)
|
||||||
fixable=True,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
"""Generate usb file."""
|
"""Generate usb file."""
|
||||||
usb_path = config.root / "homeassistant/generated/usb.py"
|
usb_path = config.root / "homeassistant/generated/usb.py"
|
||||||
with open(str(usb_path), "w") as fp:
|
usb_path.write_text(f"{config.cache['usb']}")
|
||||||
fp.write(f"{config.cache['usb']}")
|
|
||||||
|
|
|
@ -90,19 +90,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
if config.specific_integrations:
|
if config.specific_integrations:
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(str(zeroconf_path)) as fp:
|
if zeroconf_path.read_text() != content:
|
||||||
current = fp.read()
|
config.add_error(
|
||||||
if current != content:
|
"zeroconf",
|
||||||
config.add_error(
|
"File zeroconf.py is not up to date. Run python3 -m script.hassfest",
|
||||||
"zeroconf",
|
fixable=True,
|
||||||
"File zeroconf.py is not up to date. Run python3 -m script.hassfest",
|
)
|
||||||
fixable=True,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
def generate(integrations: dict[str, Integration], config: Config) -> None:
|
||||||
"""Generate zeroconf file."""
|
"""Generate zeroconf file."""
|
||||||
zeroconf_path = config.root / "homeassistant/generated/zeroconf.py"
|
zeroconf_path = config.root / "homeassistant/generated/zeroconf.py"
|
||||||
with open(str(zeroconf_path), "w") as fp:
|
zeroconf_path.write_text(f"{config.cache['zeroconf']}")
|
||||||
fp.write(f"{config.cache['zeroconf']}")
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
"""Inspect all component SCHEMAS."""
|
"""Inspect all component SCHEMAS."""
|
||||||
|
|
||||||
import importlib
|
import importlib
|
||||||
import os
|
from pathlib import Path
|
||||||
import pkgutil
|
import pkgutil
|
||||||
|
|
||||||
from homeassistant.config import _identify_config_schema
|
from homeassistant.config import _identify_config_schema
|
||||||
|
@ -20,7 +20,7 @@ def explore_module(package):
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Run the script."""
|
"""Run the script."""
|
||||||
if not os.path.isfile("requirements_all.txt"):
|
if not Path("requirements_all.txt").is_file():
|
||||||
print("Run this from HA root dir")
|
print("Run this from HA root dir")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@ from collections import namedtuple
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
|
@ -63,7 +64,7 @@ async def async_exec(*args, display=False):
|
||||||
"""Execute, return code & log."""
|
"""Execute, return code & log."""
|
||||||
argsp = []
|
argsp = []
|
||||||
for arg in args:
|
for arg in args:
|
||||||
if os.path.isfile(arg):
|
if Path(arg).is_file():
|
||||||
argsp.append(f"\\\n {shlex.quote(arg)}")
|
argsp.append(f"\\\n {shlex.quote(arg)}")
|
||||||
else:
|
else:
|
||||||
argsp.append(shlex.quote(arg))
|
argsp.append(shlex.quote(arg))
|
||||||
|
@ -132,7 +133,7 @@ async def ruff(files):
|
||||||
|
|
||||||
async def lint(files):
|
async def lint(files):
|
||||||
"""Perform lint."""
|
"""Perform lint."""
|
||||||
files = [file for file in files if os.path.isfile(file)]
|
files = [file for file in files if Path(file).is_file()]
|
||||||
res = sorted(
|
res = sorted(
|
||||||
itertools.chain(
|
itertools.chain(
|
||||||
*await asyncio.gather(
|
*await asyncio.gather(
|
||||||
|
@ -164,7 +165,7 @@ async def lint(files):
|
||||||
async def main():
|
async def main():
|
||||||
"""Run the main loop."""
|
"""Run the main loop."""
|
||||||
# Ensure we are in the homeassistant root
|
# Ensure we are in the homeassistant root
|
||||||
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
|
os.chdir(Path(__file__).parent.parent)
|
||||||
|
|
||||||
files = await git()
|
files = await git()
|
||||||
if not files:
|
if not files:
|
||||||
|
@ -194,7 +195,7 @@ async def main():
|
||||||
gen_req = True # requirements script for components
|
gen_req = True # requirements script for components
|
||||||
# Find test files...
|
# Find test files...
|
||||||
if fname.startswith("tests/"):
|
if fname.startswith("tests/"):
|
||||||
if "/test_" in fname and os.path.isfile(fname):
|
if "/test_" in fname and Path(fname).is_file():
|
||||||
# All test helpers should be excluded
|
# All test helpers should be excluded
|
||||||
test_files.add(fname)
|
test_files.add(fname)
|
||||||
else:
|
else:
|
||||||
|
@ -207,7 +208,7 @@ async def main():
|
||||||
else:
|
else:
|
||||||
parts[-1] = f"test_{parts[-1]}"
|
parts[-1] = f"test_{parts[-1]}"
|
||||||
fname = "/".join(parts)
|
fname = "/".join(parts)
|
||||||
if os.path.isfile(fname):
|
if Path(fname).is_file():
|
||||||
test_files.add(fname)
|
test_files.add(fname)
|
||||||
|
|
||||||
if gen_req:
|
if gen_req:
|
||||||
|
|
|
@ -66,7 +66,7 @@ class BucketHolder:
|
||||||
|
|
||||||
def create_ouput_file(self) -> None:
|
def create_ouput_file(self) -> None:
|
||||||
"""Create output file."""
|
"""Create output file."""
|
||||||
with open("pytest_buckets.txt", "w") as file:
|
with Path("pytest_buckets.txt").open("w") as file:
|
||||||
for idx, bucket in enumerate(self._buckets):
|
for idx, bucket in enumerate(self._buckets):
|
||||||
print(f"Bucket {idx+1} has {bucket.total_tests} tests")
|
print(f"Bucket {idx+1} has {bucket.total_tests} tests")
|
||||||
file.write(bucket.get_paths_line())
|
file.write(bucket.get_paths_line())
|
||||||
|
|
|
@ -4,8 +4,7 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
from pathlib import Path
|
||||||
import pathlib
|
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
@ -14,7 +13,7 @@ from .error import ExitApp
|
||||||
from .util import get_lokalise_token, load_json_from_path
|
from .util import get_lokalise_token, load_json_from_path
|
||||||
|
|
||||||
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
||||||
DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute()
|
DOWNLOAD_DIR = Path("build/translations-download").absolute()
|
||||||
|
|
||||||
|
|
||||||
def run_download_docker():
|
def run_download_docker():
|
||||||
|
@ -56,35 +55,32 @@ def run_download_docker():
|
||||||
raise ExitApp("Failed to download translations")
|
raise ExitApp("Failed to download translations")
|
||||||
|
|
||||||
|
|
||||||
def save_json(filename: str, data: list | dict):
|
def save_json(filename: Path, data: list | dict) -> None:
|
||||||
"""Save JSON data to a file.
|
"""Save JSON data to a file."""
|
||||||
|
filename.write_text(json.dumps(data, sort_keys=True, indent=4), encoding="utf-8")
|
||||||
Returns True on success.
|
|
||||||
"""
|
|
||||||
data = json.dumps(data, sort_keys=True, indent=4)
|
|
||||||
with open(filename, "w", encoding="utf-8") as fdesc:
|
|
||||||
fdesc.write(data)
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_component_path(lang, component):
|
def get_component_path(lang, component) -> Path | None:
|
||||||
"""Get the component translation path."""
|
"""Get the component translation path."""
|
||||||
if os.path.isdir(os.path.join("homeassistant", "components", component)):
|
if (Path("homeassistant") / "components" / component).is_dir():
|
||||||
return os.path.join(
|
return (
|
||||||
"homeassistant", "components", component, "translations", f"{lang}.json"
|
Path("homeassistant")
|
||||||
|
/ "components"
|
||||||
|
/ component
|
||||||
|
/ "translations"
|
||||||
|
/ f"{lang}.json"
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_platform_path(lang, component, platform):
|
def get_platform_path(lang, component, platform) -> Path:
|
||||||
"""Get the platform translation path."""
|
"""Get the platform translation path."""
|
||||||
return os.path.join(
|
return (
|
||||||
"homeassistant",
|
Path("homeassistant")
|
||||||
"components",
|
/ "components"
|
||||||
component,
|
/ component
|
||||||
"translations",
|
/ "translations"
|
||||||
f"{platform}.{lang}.json",
|
/ f"{platform}.{lang}.json"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -107,7 +103,7 @@ def save_language_translations(lang, translations):
|
||||||
f"Skipping {lang} for {component}, as the integration doesn't seem to exist."
|
f"Skipping {lang} for {component}, as the integration doesn't seem to exist."
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
save_json(path, base_translations)
|
save_json(path, base_translations)
|
||||||
|
|
||||||
if "platform" not in component_translations:
|
if "platform" not in component_translations:
|
||||||
|
@ -117,7 +113,7 @@ def save_language_translations(lang, translations):
|
||||||
"platform"
|
"platform"
|
||||||
].items():
|
].items():
|
||||||
path = get_platform_path(lang, component, platform)
|
path = get_platform_path(lang, component, platform)
|
||||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
save_json(path, platform_translations)
|
save_json(path, platform_translations)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
"""Helper script to bump the current version."""
|
"""Helper script to bump the current version."""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
@ -110,8 +111,7 @@ def bump_version(
|
||||||
|
|
||||||
def write_version(version):
|
def write_version(version):
|
||||||
"""Update Home Assistant constant file with new version."""
|
"""Update Home Assistant constant file with new version."""
|
||||||
with open("homeassistant/const.py") as fil:
|
content = Path("homeassistant/const.py").read_text()
|
||||||
content = fil.read()
|
|
||||||
|
|
||||||
major, minor, patch = str(version).split(".", 2)
|
major, minor, patch = str(version).split(".", 2)
|
||||||
|
|
||||||
|
@ -125,25 +125,21 @@ def write_version(version):
|
||||||
"PATCH_VERSION: Final = .*\n", f'PATCH_VERSION: Final = "{patch}"\n', content
|
"PATCH_VERSION: Final = .*\n", f'PATCH_VERSION: Final = "{patch}"\n', content
|
||||||
)
|
)
|
||||||
|
|
||||||
with open("homeassistant/const.py", "w") as fil:
|
Path("homeassistant/const.py").write_text(content)
|
||||||
fil.write(content)
|
|
||||||
|
|
||||||
|
|
||||||
def write_version_metadata(version: Version) -> None:
|
def write_version_metadata(version: Version) -> None:
|
||||||
"""Update pyproject.toml file with new version."""
|
"""Update pyproject.toml file with new version."""
|
||||||
with open("pyproject.toml", encoding="utf8") as fp:
|
content = Path("pyproject.toml").read_text(encoding="utf8")
|
||||||
content = fp.read()
|
|
||||||
|
|
||||||
content = re.sub(r"(version\W+=\W).+\n", f'\\g<1>"{version}"\n', content, count=1)
|
content = re.sub(r"(version\W+=\W).+\n", f'\\g<1>"{version}"\n', content, count=1)
|
||||||
|
|
||||||
with open("pyproject.toml", "w", encoding="utf8") as fp:
|
Path("pyproject.toml").write_text(content, encoding="utf8")
|
||||||
fp.write(content)
|
|
||||||
|
|
||||||
|
|
||||||
def write_ci_workflow(version: Version) -> None:
|
def write_ci_workflow(version: Version) -> None:
|
||||||
"""Update ci workflow with new version."""
|
"""Update ci workflow with new version."""
|
||||||
with open(".github/workflows/ci.yaml") as fp:
|
content = Path(".github/workflows/ci.yaml").read_text()
|
||||||
content = fp.read()
|
|
||||||
|
|
||||||
short_version = ".".join(str(version).split(".", maxsplit=2)[:2])
|
short_version = ".".join(str(version).split(".", maxsplit=2)[:2])
|
||||||
content = re.sub(
|
content = re.sub(
|
||||||
|
@ -153,8 +149,7 @@ def write_ci_workflow(version: Version) -> None:
|
||||||
count=1,
|
count=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(".github/workflows/ci.yaml", "w") as fp:
|
Path(".github/workflows/ci.yaml").write_text(content)
|
||||||
fp.write(content)
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
|
|
Loading…
Add table
Reference in a new issue