diff --git a/.core_files.yaml b/.core_files.yaml index 6fd3a74df92..e211b8ca5ec 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -79,7 +79,6 @@ components: &components - homeassistant/components/group/** - homeassistant/components/hassio/** - homeassistant/components/homeassistant/** - - homeassistant/components/homeassistant_hardware/** - homeassistant/components/http/** - homeassistant/components/image/** - homeassistant/components/input_boolean/** diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index cc100c48fd8..7c08df39000 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -10,7 +10,7 @@ on: env: BUILD_TYPE: core - DEFAULT_PYTHON: "3.13" + DEFAULT_PYTHON: "3.12" PIP_TIMEOUT: 60 UV_HTTP_TIMEOUT: 60 UV_SYSTEM_PYTHON: "true" diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index fa05f6082a2..cae9795d715 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -42,7 +42,7 @@ env: MYPY_CACHE_VERSION: 9 HA_SHORT_VERSION: "2024.12" DEFAULT_PYTHON: "3.12" - ALL_PYTHON_VERSIONS: "['3.12', '3.13']" + ALL_PYTHON_VERSIONS: "['3.12']" # 10.3 is the oldest supported version # - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022) # 10.6 is the current long-term-support @@ -622,13 +622,13 @@ jobs: steps: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.3.0 with: - python-version: ${{ matrix.python-version }} + python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - - name: Restore full Python ${{ matrix.python-version }} virtual environment + - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv uses: actions/cache/restore@v4.1.2 with: @@ -819,7 +819,11 @@ jobs: needs: - info - base - name: Split tests for full run + strategy: + fail-fast: false + matrix: + python-version: ${{ fromJson(needs.info.outputs.python_versions) }} + name: Split tests for full run Python ${{ matrix.python-version }} steps: - name: Install additional OS dependencies run: | @@ -832,11 +836,11 @@ jobs: libgammu-dev - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - - name: Set up Python ${{ env.DEFAULT_PYTHON }} + - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.3.0 with: - python-version: ${{ env.DEFAULT_PYTHON }} + python-version: ${{ matrix.python-version }} check-latest: true - name: Restore base Python virtual environment id: cache-venv @@ -854,7 +858,7 @@ jobs: - name: Upload pytest_buckets uses: actions/upload-artifact@v4.4.3 with: - name: pytest_buckets + name: pytest_buckets-${{ matrix.python-version }} path: pytest_buckets.txt overwrite: true @@ -919,7 +923,7 @@ jobs: - name: Download pytest_buckets uses: actions/download-artifact@v4.1.8 with: - name: pytest_buckets + name: pytest_buckets-${{ matrix.python-version }} - name: Compile English translations run: | . venv/bin/activate diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 48e37717232..176e010c5b9 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.27.3 + uses: github/codeql-action/init@v3.27.0 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.27.3 + uses: github/codeql-action/analyze@v3.27.0 with: category: "/language:python" diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index b9f54bba081..0c8df57d5a2 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -112,7 +112,7 @@ jobs: strategy: fail-fast: false matrix: - abi: ["cp312", "cp313"] + abi: ["cp312"] arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository @@ -135,14 +135,14 @@ jobs: sed -i "/uv/d" requirements_diff.txt - name: Build wheels - uses: home-assistant/wheels@2024.11.0 + uses: home-assistant/wheels@2024.07.1 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev" + apk: "libffi-dev;openssl-dev;yaml-dev;nasm" skip-binary: aiohttp;multidict;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" @@ -156,7 +156,7 @@ jobs: strategy: fail-fast: false matrix: - abi: ["cp312", "cp313"] + abi: ["cp312"] arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository @@ -198,7 +198,6 @@ jobs: split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt - name: Create requirements for cython<3 - if: matrix.abi == 'cp312' run: | # Some dependencies still require 'cython<3' # and don't yet use isolated build environments. @@ -209,8 +208,7 @@ jobs: cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt - name: Build wheels (old cython) - uses: home-assistant/wheels@2024.11.0 - if: matrix.abi == 'cp312' + uses: home-assistant/wheels@2024.07.1 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 @@ -225,43 +223,43 @@ jobs: pip: "'cython<3'" - name: Build wheels (part 1) - uses: home-assistant/wheels@2024.11.0 + uses: home-assistant/wheels@2024.07.1 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" - skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtaa" - name: Build wheels (part 2) - uses: home-assistant/wheels@2024.11.0 + uses: home-assistant/wheels@2024.07.1 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" - skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtab" - name: Build wheels (part 3) - uses: home-assistant/wheels@2024.11.0 + uses: home-assistant/wheels@2024.07.1 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" - skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtac" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 56fbabe8087..f89dadda43d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.3 + rev: v0.7.2 hooks: - id: ruff args: @@ -90,7 +90,7 @@ repos: pass_filenames: false language: script types: [text] - files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$ + files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$ - id: hassfest-mypy-config name: hassfest-mypy-config entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config diff --git a/.strict-typing b/.strict-typing index b0fd74bce54..6a6918543ad 100644 --- a/.strict-typing +++ b/.strict-typing @@ -330,7 +330,6 @@ homeassistant.components.mysensors.* homeassistant.components.myuplink.* homeassistant.components.nam.* homeassistant.components.nanoleaf.* -homeassistant.components.nasweb.* homeassistant.components.neato.* homeassistant.components.nest.* homeassistant.components.netatmo.* @@ -340,7 +339,6 @@ homeassistant.components.nfandroidtv.* homeassistant.components.nightscout.* homeassistant.components.nissan_leaf.* homeassistant.components.no_ip.* -homeassistant.components.nordpool.* homeassistant.components.notify.* homeassistant.components.notion.* homeassistant.components.number.* diff --git a/CODEOWNERS b/CODEOWNERS index e204463695e..d039097fc82 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -40,8 +40,6 @@ build.json @home-assistant/supervisor # Integrations /homeassistant/components/abode/ @shred86 /tests/components/abode/ @shred86 -/homeassistant/components/acaia/ @zweckj -/tests/components/acaia/ @zweckj /homeassistant/components/accuweather/ @bieniu /tests/components/accuweather/ @bieniu /homeassistant/components/acmeda/ @atmurray @@ -972,8 +970,6 @@ build.json @home-assistant/supervisor /tests/components/nam/ @bieniu /homeassistant/components/nanoleaf/ @milanmeu @joostlek /tests/components/nanoleaf/ @milanmeu @joostlek -/homeassistant/components/nasweb/ @nasWebio -/tests/components/nasweb/ @nasWebio /homeassistant/components/neato/ @Santobert /tests/components/neato/ @Santobert /homeassistant/components/nederlandse_spoorwegen/ @YarmoM @@ -1014,8 +1010,6 @@ build.json @home-assistant/supervisor /homeassistant/components/noaa_tides/ @jdelaney72 /homeassistant/components/nobo_hub/ @echoromeo @oyvindwe /tests/components/nobo_hub/ @echoromeo @oyvindwe -/homeassistant/components/nordpool/ @gjohansson-ST -/tests/components/nordpool/ @gjohansson-ST /homeassistant/components/notify/ @home-assistant/core /tests/components/notify/ @home-assistant/core /homeassistant/components/notify_events/ @matrozov @papajojo @@ -1346,8 +1340,6 @@ build.json @home-assistant/supervisor /tests/components/siren/ @home-assistant/core @raman325 /homeassistant/components/sisyphus/ @jkeljo /homeassistant/components/sky_hub/ @rogerselwyn -/homeassistant/components/sky_remote/ @dunnmj @saty9 -/tests/components/sky_remote/ @dunnmj @saty9 /homeassistant/components/skybell/ @tkdrob /tests/components/skybell/ @tkdrob /homeassistant/components/slack/ @tkdrob @fletcherau @@ -1489,8 +1481,8 @@ build.json @home-assistant/supervisor /tests/components/tedee/ @patrickhilker @zweckj /homeassistant/components/tellduslive/ @fredrike /tests/components/tellduslive/ @fredrike -/homeassistant/components/template/ @PhracturedBlue @home-assistant/core -/tests/components/template/ @PhracturedBlue @home-assistant/core +/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core +/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core /homeassistant/components/tesla_fleet/ @Bre77 /tests/components/tesla_fleet/ @Bre77 /homeassistant/components/tesla_wall_connector/ @einarhauks diff --git a/Dockerfile b/Dockerfile index 15574192093..b6d571f308e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ ENV \ ARG QEMU_CPU # Install uv -RUN pip3 install uv==0.5.0 +RUN pip3 install uv==0.4.28 WORKDIR /usr/src @@ -55,7 +55,7 @@ RUN \ "armv7") go2rtc_suffix='arm' ;; \ *) go2rtc_suffix=${BUILD_ARCH} ;; \ esac \ - && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ + && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.6/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ && chmod +x /bin/go2rtc \ # Verify go2rtc can be executed && go2rtc --version diff --git a/Dockerfile.dev b/Dockerfile.dev index 48f582a1581..d05c6df425c 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -35,9 +35,6 @@ RUN \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* -# Add go2rtc binary -COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc - # Install uv RUN pip3 install uv diff --git a/build.yaml b/build.yaml index a8755bbbf5c..13618740ab8 100644 --- a/build.yaml +++ b/build.yaml @@ -1,10 +1,10 @@ image: ghcr.io/home-assistant/{arch}-homeassistant build_from: - aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0 - armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0 - armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0 - amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0 - i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0 + aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1 + armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1 + armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1 + amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1 + i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1 codenotary: signer: notary@home-assistant.io base_image: notary@home-assistant.io diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 1034223051c..dcfb6685627 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -515,7 +515,7 @@ async def async_from_config_dict( issue_registry.async_create_issue( hass, core.DOMAIN, - f"python_version_{required_python_version}", + "python_version", is_fixable=False, severity=issue_registry.IssueSeverity.WARNING, breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE, diff --git a/homeassistant/brands/sky.json b/homeassistant/brands/sky.json deleted file mode 100644 index 3ab0cbbe5bd..00000000000 --- a/homeassistant/brands/sky.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "domain": "sky", - "name": "Sky", - "integrations": ["sky_hub", "sky_remote"] -} diff --git a/homeassistant/components/acaia/__init__.py b/homeassistant/components/acaia/__init__.py deleted file mode 100644 index dfdb4cb935d..00000000000 --- a/homeassistant/components/acaia/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Initialize the Acaia component.""" - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from .coordinator import AcaiaConfigEntry, AcaiaCoordinator - -PLATFORMS = [ - Platform.BUTTON, -] - - -async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool: - """Set up acaia as config entry.""" - - coordinator = AcaiaCoordinator(hass, entry) - await coordinator.async_config_entry_first_refresh() - - entry.runtime_data = coordinator - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool: - """Unload a config entry.""" - - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/acaia/button.py b/homeassistant/components/acaia/button.py deleted file mode 100644 index 50671eecbba..00000000000 --- a/homeassistant/components/acaia/button.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Button entities for Acaia scales.""" - -from collections.abc import Callable, Coroutine -from dataclasses import dataclass -from typing import Any - -from aioacaia.acaiascale import AcaiaScale - -from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .coordinator import AcaiaConfigEntry -from .entity import AcaiaEntity - - -@dataclass(kw_only=True, frozen=True) -class AcaiaButtonEntityDescription(ButtonEntityDescription): - """Description for acaia button entities.""" - - press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]] - - -BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = ( - AcaiaButtonEntityDescription( - key="tare", - translation_key="tare", - press_fn=lambda scale: scale.tare(), - ), - AcaiaButtonEntityDescription( - key="reset_timer", - translation_key="reset_timer", - press_fn=lambda scale: scale.reset_timer(), - ), - AcaiaButtonEntityDescription( - key="start_stop", - translation_key="start_stop", - press_fn=lambda scale: scale.start_stop_timer(), - ), -) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: AcaiaConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up button entities and services.""" - - coordinator = entry.runtime_data - async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS) - - -class AcaiaButton(AcaiaEntity, ButtonEntity): - """Representation of an Acaia button.""" - - entity_description: AcaiaButtonEntityDescription - - async def async_press(self) -> None: - """Handle the button press.""" - await self.entity_description.press_fn(self._scale) diff --git a/homeassistant/components/acaia/config_flow.py b/homeassistant/components/acaia/config_flow.py deleted file mode 100644 index 36727059c8a..00000000000 --- a/homeassistant/components/acaia/config_flow.py +++ /dev/null @@ -1,149 +0,0 @@ -"""Config flow for Acaia integration.""" - -import logging -from typing import Any - -from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice -from aioacaia.helpers import is_new_scale -import voluptuous as vol - -from homeassistant.components.bluetooth import ( - BluetoothServiceInfoBleak, - async_discovered_service_info, -) -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_ADDRESS, CONF_NAME -from homeassistant.helpers.device_registry import format_mac -from homeassistant.helpers.selector import ( - SelectOptionDict, - SelectSelector, - SelectSelectorConfig, - SelectSelectorMode, -) - -from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN - -_LOGGER = logging.getLogger(__name__) - - -class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN): - """Handle a config flow for acaia.""" - - def __init__(self) -> None: - """Initialize the config flow.""" - self._discovered: dict[str, Any] = {} - self._discovered_devices: dict[str, str] = {} - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle a flow initialized by the user.""" - - errors: dict[str, str] = {} - - if user_input is not None: - mac = format_mac(user_input[CONF_ADDRESS]) - try: - is_new_style_scale = await is_new_scale(mac) - except AcaiaDeviceNotFound: - errors["base"] = "device_not_found" - except AcaiaError: - _LOGGER.exception("Error occurred while connecting to the scale") - errors["base"] = "unknown" - except AcaiaUnknownDevice: - return self.async_abort(reason="unsupported_device") - else: - await self.async_set_unique_id(mac) - self._abort_if_unique_id_configured() - - if not errors: - return self.async_create_entry( - title=self._discovered_devices[user_input[CONF_ADDRESS]], - data={ - CONF_ADDRESS: mac, - CONF_IS_NEW_STYLE_SCALE: is_new_style_scale, - }, - ) - - for device in async_discovered_service_info(self.hass): - self._discovered_devices[device.address] = device.name - - if not self._discovered_devices: - return self.async_abort(reason="no_devices_found") - - options = [ - SelectOptionDict( - value=device_mac, - label=f"{device_name} ({device_mac})", - ) - for device_mac, device_name in self._discovered_devices.items() - ] - - return self.async_show_form( - step_id="user", - data_schema=vol.Schema( - { - vol.Required(CONF_ADDRESS): SelectSelector( - SelectSelectorConfig( - options=options, - mode=SelectSelectorMode.DROPDOWN, - ) - ) - } - ), - errors=errors, - ) - - async def async_step_bluetooth( - self, discovery_info: BluetoothServiceInfoBleak - ) -> ConfigFlowResult: - """Handle a discovered Bluetooth device.""" - - self._discovered[CONF_ADDRESS] = mac = format_mac(discovery_info.address) - self._discovered[CONF_NAME] = discovery_info.name - - await self.async_set_unique_id(mac) - self._abort_if_unique_id_configured() - - try: - self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale( - discovery_info.address - ) - except AcaiaDeviceNotFound: - _LOGGER.debug("Device not found during discovery") - return self.async_abort(reason="device_not_found") - except AcaiaError: - _LOGGER.debug( - "Error occurred while connecting to the scale during discovery", - exc_info=True, - ) - return self.async_abort(reason="unknown") - except AcaiaUnknownDevice: - _LOGGER.debug("Unsupported device during discovery") - return self.async_abort(reason="unsupported_device") - - return await self.async_step_bluetooth_confirm() - - async def async_step_bluetooth_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle confirmation of Bluetooth discovery.""" - - if user_input is not None: - return self.async_create_entry( - title=self._discovered[CONF_NAME], - data={ - CONF_ADDRESS: self._discovered[CONF_ADDRESS], - CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE], - }, - ) - - self.context["title_placeholders"] = placeholders = { - CONF_NAME: self._discovered[CONF_NAME] - } - - self._set_confirm_only() - return self.async_show_form( - step_id="bluetooth_confirm", - description_placeholders=placeholders, - ) diff --git a/homeassistant/components/acaia/const.py b/homeassistant/components/acaia/const.py deleted file mode 100644 index c603578763d..00000000000 --- a/homeassistant/components/acaia/const.py +++ /dev/null @@ -1,4 +0,0 @@ -"""Constants for component.""" - -DOMAIN = "acaia" -CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale" diff --git a/homeassistant/components/acaia/coordinator.py b/homeassistant/components/acaia/coordinator.py deleted file mode 100644 index bd915b42408..00000000000 --- a/homeassistant/components/acaia/coordinator.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Coordinator for Acaia integration.""" - -from __future__ import annotations - -from datetime import timedelta -import logging - -from aioacaia.acaiascale import AcaiaScale -from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ADDRESS -from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator - -from .const import CONF_IS_NEW_STYLE_SCALE - -SCAN_INTERVAL = timedelta(seconds=15) - -_LOGGER = logging.getLogger(__name__) - -type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator] - - -class AcaiaCoordinator(DataUpdateCoordinator[None]): - """Class to handle fetching data from the scale.""" - - config_entry: AcaiaConfigEntry - - def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None: - """Initialize coordinator.""" - super().__init__( - hass, - _LOGGER, - name="acaia coordinator", - update_interval=SCAN_INTERVAL, - config_entry=entry, - ) - - self._scale = AcaiaScale( - address_or_ble_device=entry.data[CONF_ADDRESS], - name=entry.title, - is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE], - notify_callback=self.async_update_listeners, - ) - - @property - def scale(self) -> AcaiaScale: - """Return the scale object.""" - return self._scale - - async def _async_update_data(self) -> None: - """Fetch data.""" - - # scale is already connected, return - if self._scale.connected: - return - - # scale is not connected, try to connect - try: - await self._scale.connect(setup_tasks=False) - except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex: - _LOGGER.debug( - "Could not connect to scale: %s, Error: %s", - self.config_entry.data[CONF_ADDRESS], - ex, - ) - self._scale.device_disconnected_handler(notify=False) - return - - # connected, set up background tasks - if not self._scale.heartbeat_task or self._scale.heartbeat_task.done(): - self._scale.heartbeat_task = self.config_entry.async_create_background_task( - hass=self.hass, - target=self._scale.send_heartbeats(), - name="acaia_heartbeat_task", - ) - - if not self._scale.process_queue_task or self._scale.process_queue_task.done(): - self._scale.process_queue_task = ( - self.config_entry.async_create_background_task( - hass=self.hass, - target=self._scale.process_queue(), - name="acaia_process_queue_task", - ) - ) diff --git a/homeassistant/components/acaia/entity.py b/homeassistant/components/acaia/entity.py deleted file mode 100644 index 8a2108d2687..00000000000 --- a/homeassistant/components/acaia/entity.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Base class for Acaia entities.""" - -from dataclasses import dataclass - -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import DOMAIN -from .coordinator import AcaiaCoordinator - - -@dataclass -class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]): - """Common elements for all entities.""" - - _attr_has_entity_name = True - - def __init__( - self, - coordinator: AcaiaCoordinator, - entity_description: EntityDescription, - ) -> None: - """Initialize the entity.""" - super().__init__(coordinator) - self.entity_description = entity_description - self._scale = coordinator.scale - self._attr_unique_id = f"{self._scale.mac}_{entity_description.key}" - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._scale.mac)}, - manufacturer="Acaia", - model=self._scale.model, - suggested_area="Kitchen", - ) - - @property - def available(self) -> bool: - """Returns whether entity is available.""" - return super().available and self._scale.connected diff --git a/homeassistant/components/acaia/icons.json b/homeassistant/components/acaia/icons.json deleted file mode 100644 index aeab07ee912..00000000000 --- a/homeassistant/components/acaia/icons.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "entity": { - "button": { - "tare": { - "default": "mdi:scale-balance" - }, - "reset_timer": { - "default": "mdi:timer-refresh" - }, - "start_stop": { - "default": "mdi:timer-play" - } - } - } -} diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json deleted file mode 100644 index c907a70a38e..00000000000 --- a/homeassistant/components/acaia/manifest.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "domain": "acaia", - "name": "Acaia", - "bluetooth": [ - { - "manufacturer_id": 16962 - }, - { - "local_name": "ACAIA*" - }, - { - "local_name": "PYXIS-*" - }, - { - "local_name": "LUNAR-*" - }, - { - "local_name": "PROCHBT001" - } - ], - "codeowners": ["@zweckj"], - "config_flow": true, - "dependencies": ["bluetooth_adapters"], - "documentation": "https://www.home-assistant.io/integrations/acaia", - "integration_type": "device", - "iot_class": "local_push", - "loggers": ["aioacaia"], - "requirements": ["aioacaia==0.1.6"] -} diff --git a/homeassistant/components/acaia/strings.json b/homeassistant/components/acaia/strings.json deleted file mode 100644 index f6a1aeb66fd..00000000000 --- a/homeassistant/components/acaia/strings.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "config": { - "flow_title": "{name}", - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", - "unsupported_device": "This device is not supported." - }, - "error": { - "device_not_found": "Device could not be found.", - "unknown": "[%key:common::config_flow::error::unknown%]" - }, - "step": { - "bluetooth_confirm": { - "description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]" - }, - "user": { - "description": "[%key:component::bluetooth::config::step::user::description%]", - "data": { - "address": "[%key:common::config_flow::data::device%]" - } - } - } - }, - "entity": { - "button": { - "tare": { - "name": "Tare" - }, - "reset_timer": { - "name": "Reset timer" - }, - "start_stop": { - "name": "Start/stop timer" - } - } - } -} diff --git a/homeassistant/components/agent_dvr/manifest.json b/homeassistant/components/agent_dvr/manifest.json index 4ec14296363..9a6c528c336 100644 --- a/homeassistant/components/agent_dvr/manifest.json +++ b/homeassistant/components/agent_dvr/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/agent_dvr", "iot_class": "local_polling", "loggers": ["agent"], - "requirements": ["agent-py==0.0.24"] + "requirements": ["agent-py==0.0.23"] } diff --git a/homeassistant/components/airzone/manifest.json b/homeassistant/components/airzone/manifest.json index 6bf374087a6..10fb20bb2ce 100644 --- a/homeassistant/components/airzone/manifest.json +++ b/homeassistant/components/airzone/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone", "iot_class": "local_polling", "loggers": ["aioairzone"], - "requirements": ["aioairzone==0.9.6"] + "requirements": ["aioairzone==0.9.5"] } diff --git a/homeassistant/components/alarm_control_panel/__init__.py b/homeassistant/components/alarm_control_panel/__init__.py index a9e433a3650..2946fc64941 100644 --- a/homeassistant/components/alarm_control_panel/__init__.py +++ b/homeassistant/components/alarm_control_panel/__init__.py @@ -6,7 +6,7 @@ import asyncio from datetime import timedelta from functools import partial import logging -from typing import TYPE_CHECKING, Any, Final, final +from typing import Any, Final, final from propcache import cached_property import voluptuous as vol @@ -221,15 +221,9 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A @property def state(self) -> str | None: """Return the current state.""" - if (alarm_state := self.alarm_state) is not None: - return alarm_state - if self._attr_state is not None: - # Backwards compatibility for integrations that set state directly - # Should be removed in 2025.11 - if TYPE_CHECKING: - assert isinstance(self._attr_state, str) - return self._attr_state - return None + if (alarm_state := self.alarm_state) is None: + return None + return alarm_state @cached_property def alarm_state(self) -> AlarmControlPanelState | None: diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index 907fda4c7f8..200cb4a3f65 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -32,9 +32,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_handle_create_service(call: ServiceCall) -> None: """Service handler for creating backups.""" - await backup_manager.async_create_backup(on_progress=None) - if backup_task := backup_manager.backup_task: - await backup_task + await backup_manager.async_create_backup() hass.services.async_register(DOMAIN, "create", async_handle_create_service) diff --git a/homeassistant/components/backup/http.py b/homeassistant/components/backup/http.py index 42693035bd3..4cc4e61c9e4 100644 --- a/homeassistant/components/backup/http.py +++ b/homeassistant/components/backup/http.py @@ -2,26 +2,23 @@ from __future__ import annotations -import asyncio from http import HTTPStatus -from typing import cast -from aiohttp import BodyPartReader from aiohttp.hdrs import CONTENT_DISPOSITION from aiohttp.web import FileResponse, Request, Response -from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin +from homeassistant.components.http import KEY_HASS, HomeAssistantView from homeassistant.core import HomeAssistant, callback from homeassistant.util import slugify -from .const import DATA_MANAGER +from .const import DOMAIN +from .manager import BaseBackupManager @callback def async_register_http_views(hass: HomeAssistant) -> None: """Register the http views.""" hass.http.register_view(DownloadBackupView) - hass.http.register_view(UploadBackupView) class DownloadBackupView(HomeAssistantView): @@ -39,7 +36,7 @@ class DownloadBackupView(HomeAssistantView): if not request["hass_user"].is_admin: return Response(status=HTTPStatus.UNAUTHORIZED) - manager = request.app[KEY_HASS].data[DATA_MANAGER] + manager: BaseBackupManager = request.app[KEY_HASS].data[DOMAIN] backup = await manager.async_get_backup(slug=slug) if backup is None or not backup.path.exists(): @@ -51,29 +48,3 @@ class DownloadBackupView(HomeAssistantView): CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar" }, ) - - -class UploadBackupView(HomeAssistantView): - """Generate backup view.""" - - url = "/api/backup/upload" - name = "api:backup:upload" - - @require_admin - async def post(self, request: Request) -> Response: - """Upload a backup file.""" - manager = request.app[KEY_HASS].data[DATA_MANAGER] - reader = await request.multipart() - contents = cast(BodyPartReader, await reader.next()) - - try: - await manager.async_receive_backup(contents=contents) - except OSError as err: - return Response( - body=f"Can't write backup file {err}", - status=HTTPStatus.INTERNAL_SERVER_ERROR, - ) - except asyncio.CancelledError: - return Response(status=HTTPStatus.INTERNAL_SERVER_ERROR) - - return Response(status=HTTPStatus.CREATED) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index ddc0a1eac3f..b3cb69861b9 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -4,21 +4,16 @@ from __future__ import annotations import abc import asyncio -from collections.abc import Callable from dataclasses import asdict, dataclass import hashlib import io import json from pathlib import Path -from queue import SimpleQueue -import shutil import tarfile from tarfile import TarError -from tempfile import TemporaryDirectory import time from typing import Any, Protocol, cast -import aiohttp from securetar import SecureTarFile, atomic_contents_add from homeassistant.backup_restore import RESTORE_BACKUP_FILE @@ -35,13 +30,6 @@ from .const import DOMAIN, EXCLUDE_FROM_BACKUP, LOGGER BUF_SIZE = 2**20 * 4 # 4MB -@dataclass(slots=True) -class NewBackup: - """New backup class.""" - - slug: str - - @dataclass(slots=True) class Backup: """Backup class.""" @@ -57,15 +45,6 @@ class Backup: return {**asdict(self), "path": self.path.as_posix()} -@dataclass(slots=True) -class BackupProgress: - """Backup progress class.""" - - done: bool - stage: str | None - success: bool | None - - class BackupPlatformProtocol(Protocol): """Define the format that backup platforms can have.""" @@ -82,7 +61,7 @@ class BaseBackupManager(abc.ABC): def __init__(self, hass: HomeAssistant) -> None: """Initialize the backup manager.""" self.hass = hass - self.backup_task: asyncio.Task | None = None + self.backing_up = False self.backups: dict[str, Backup] = {} self.loaded_platforms = False self.platforms: dict[str, BackupPlatformProtocol] = {} @@ -147,15 +126,10 @@ class BaseBackupManager(abc.ABC): @abc.abstractmethod async def async_restore_backup(self, slug: str, **kwargs: Any) -> None: - """Restore a backup.""" + """Restpre a backup.""" @abc.abstractmethod - async def async_create_backup( - self, - *, - on_progress: Callable[[BackupProgress], None] | None, - **kwargs: Any, - ) -> NewBackup: + async def async_create_backup(self, **kwargs: Any) -> Backup: """Generate a backup.""" @abc.abstractmethod @@ -173,15 +147,6 @@ class BaseBackupManager(abc.ABC): async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None: """Remove a backup.""" - @abc.abstractmethod - async def async_receive_backup( - self, - *, - contents: aiohttp.BodyPartReader, - **kwargs: Any, - ) -> None: - """Receive and store a backup file from upload.""" - class BackupManager(BaseBackupManager): """Backup manager for the Backup integration.""" @@ -257,93 +222,17 @@ class BackupManager(BaseBackupManager): LOGGER.debug("Removed backup located at %s", backup.path) self.backups.pop(slug) - async def async_receive_backup( - self, - *, - contents: aiohttp.BodyPartReader, - **kwargs: Any, - ) -> None: - """Receive and store a backup file from upload.""" - queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = ( - SimpleQueue() - ) - temp_dir_handler = await self.hass.async_add_executor_job(TemporaryDirectory) - target_temp_file = Path( - temp_dir_handler.name, contents.filename or "backup.tar" - ) - - def _sync_queue_consumer() -> None: - with target_temp_file.open("wb") as file_handle: - while True: - if (_chunk_future := queue.get()) is None: - break - _chunk, _future = _chunk_future - if _future is not None: - self.hass.loop.call_soon_threadsafe(_future.set_result, None) - file_handle.write(_chunk) - - fut: asyncio.Future[None] | None = None - try: - fut = self.hass.async_add_executor_job(_sync_queue_consumer) - megabytes_sending = 0 - while chunk := await contents.read_chunk(BUF_SIZE): - megabytes_sending += 1 - if megabytes_sending % 5 != 0: - queue.put_nowait((chunk, None)) - continue - - chunk_future = self.hass.loop.create_future() - queue.put_nowait((chunk, chunk_future)) - await asyncio.wait( - (fut, chunk_future), - return_when=asyncio.FIRST_COMPLETED, - ) - if fut.done(): - # The executor job failed - break - - queue.put_nowait(None) # terminate queue consumer - finally: - if fut is not None: - await fut - - def _move_and_cleanup() -> None: - shutil.move(target_temp_file, self.backup_dir / target_temp_file.name) - temp_dir_handler.cleanup() - - await self.hass.async_add_executor_job(_move_and_cleanup) - await self.load_backups() - - async def async_create_backup( - self, - *, - on_progress: Callable[[BackupProgress], None] | None, - **kwargs: Any, - ) -> NewBackup: + async def async_create_backup(self, **kwargs: Any) -> Backup: """Generate a backup.""" - if self.backup_task: + if self.backing_up: raise HomeAssistantError("Backup already in progress") - backup_name = f"Core {HAVERSION}" - date_str = dt_util.now().isoformat() - slug = _generate_slug(date_str, backup_name) - self.backup_task = self.hass.async_create_task( - self._async_create_backup(backup_name, date_str, slug, on_progress), - name="backup_manager_create_backup", - eager_start=False, # To ensure the task is not started before we return - ) - return NewBackup(slug=slug) - async def _async_create_backup( - self, - backup_name: str, - date_str: str, - slug: str, - on_progress: Callable[[BackupProgress], None] | None, - ) -> Backup: - """Generate a backup.""" - success = False try: + self.backing_up = True await self.async_pre_backup_actions() + backup_name = f"Core {HAVERSION}" + date_str = dt_util.now().isoformat() + slug = _generate_slug(date_str, backup_name) backup_data = { "slug": slug, @@ -370,12 +259,9 @@ class BackupManager(BaseBackupManager): if self.loaded_backups: self.backups[slug] = backup LOGGER.debug("Generated new backup with slug %s", slug) - success = True return backup finally: - if on_progress: - on_progress(BackupProgress(done=True, stage=None, success=success)) - self.backup_task = None + self.backing_up = False await self.async_post_backup_actions() def _mkdir_and_generate_backup_contents( diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index a7c61b7c66c..3ac8a7ace3e 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -8,7 +8,6 @@ from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback from .const import DATA_MANAGER, LOGGER -from .manager import BackupProgress @callback @@ -41,7 +40,7 @@ async def handle_info( msg["id"], { "backups": list(backups.values()), - "backing_up": manager.backup_task is not None, + "backing_up": manager.backing_up, }, ) @@ -114,11 +113,7 @@ async def handle_create( msg: dict[str, Any], ) -> None: """Generate a backup.""" - - def on_progress(progress: BackupProgress) -> None: - connection.send_message(websocket_api.event_message(msg["id"], progress)) - - backup = await hass.data[DATA_MANAGER].async_create_backup(on_progress=on_progress) + backup = await hass.data[DATA_MANAGER].async_create_backup() connection.send_result(msg["id"], backup) @@ -132,6 +127,7 @@ async def handle_backup_start( ) -> None: """Backup start notification.""" manager = hass.data[DATA_MANAGER] + manager.backing_up = True LOGGER.debug("Backup start notification") try: @@ -153,6 +149,7 @@ async def handle_backup_end( ) -> None: """Backup end notification.""" manager = hass.data[DATA_MANAGER] + manager.backing_up = False LOGGER.debug("Backup end notification") try: diff --git a/homeassistant/components/bang_olufsen/const.py b/homeassistant/components/bang_olufsen/const.py index 209311d3e8a..1e06f153cdb 100644 --- a/homeassistant/components/bang_olufsen/const.py +++ b/homeassistant/components/bang_olufsen/const.py @@ -17,9 +17,62 @@ from homeassistant.components.media_player import ( class BangOlufsenSource: """Class used for associating device source ids with friendly names. May not include all sources.""" - LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn") - SPDIF: Final[Source] = Source(name="Optical", id="spdif") - URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer") + URI_STREAMER: Final[Source] = Source( + name="Audio Streamer", + id="uriStreamer", + is_seekable=False, + is_enabled=True, + is_playable=True, + ) + BLUETOOTH: Final[Source] = Source( + name="Bluetooth", + id="bluetooth", + is_seekable=False, + is_enabled=True, + is_playable=True, + ) + CHROMECAST: Final[Source] = Source( + name="Chromecast built-in", + id="chromeCast", + is_seekable=False, + is_enabled=True, + is_playable=True, + ) + LINE_IN: Final[Source] = Source( + name="Line-In", + id="lineIn", + is_seekable=False, + is_enabled=True, + is_playable=True, + ) + SPDIF: Final[Source] = Source( + name="Optical", + id="spdif", + is_seekable=False, + is_enabled=True, + is_playable=True, + ) + NET_RADIO: Final[Source] = Source( + name="B&O Radio", + id="netRadio", + is_seekable=False, + is_enabled=True, + is_playable=True, + ) + DEEZER: Final[Source] = Source( + name="Deezer", + id="deezer", + is_seekable=True, + is_enabled=True, + is_playable=True, + ) + TIDAL: Final[Source] = Source( + name="Tidal", + id="tidal", + is_seekable=True, + is_enabled=True, + is_playable=True, + ) BANG_OLUFSEN_STATES: dict[str, MediaPlayerState] = { diff --git a/homeassistant/components/bang_olufsen/icons.json b/homeassistant/components/bang_olufsen/icons.json deleted file mode 100644 index fec0bf20937..00000000000 --- a/homeassistant/components/bang_olufsen/icons.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "services": { - "beolink_join": { "service": "mdi:location-enter" }, - "beolink_expand": { "service": "mdi:location-enter" }, - "beolink_unexpand": { "service": "mdi:location-exit" }, - "beolink_leave": { "service": "mdi:close-circle-outline" }, - "beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" } - } -} diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index 56aa66d32e8..e8108ee2cf7 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -11,7 +11,7 @@ from typing import TYPE_CHECKING, Any, cast from aiohttp import ClientConnectorError from mozart_api import __version__ as MOZART_API_VERSION -from mozart_api.exceptions import ApiException, NotFoundException +from mozart_api.exceptions import ApiException from mozart_api.models import ( Action, Art, @@ -38,7 +38,6 @@ from mozart_api.models import ( VolumeState, ) from mozart_api.mozart_client import MozartClient, get_highest_resolution_artwork -import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( @@ -56,17 +55,10 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MODEL, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import ( - config_validation as cv, - device_registry as dr, - entity_registry as er, -) +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity_platform import ( - AddEntitiesCallback, - async_get_current_platform, -) +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.dt import utcnow from . import BangOlufsenConfigEntry @@ -124,58 +116,6 @@ async def async_setup_entry( ] ) - # Register actions. - platform = async_get_current_platform() - - jid_regex = vol.Match( - r"(^\d{4})[.](\d{7})[.](\d{8})(@products\.bang-olufsen\.com)$" - ) - - platform.async_register_entity_service( - name="beolink_join", - schema={vol.Optional("beolink_jid"): jid_regex}, - func="async_beolink_join", - ) - - platform.async_register_entity_service( - name="beolink_expand", - schema={ - vol.Exclusive("all_discovered", "devices", ""): cv.boolean, - vol.Exclusive( - "beolink_jids", - "devices", - "Define either specific Beolink JIDs or all discovered", - ): vol.All( - cv.ensure_list, - [jid_regex], - ), - }, - func="async_beolink_expand", - ) - - platform.async_register_entity_service( - name="beolink_unexpand", - schema={ - vol.Required("beolink_jids"): vol.All( - cv.ensure_list, - [jid_regex], - ), - }, - func="async_beolink_unexpand", - ) - - platform.async_register_entity_service( - name="beolink_leave", - schema=None, - func="async_beolink_leave", - ) - - platform.async_register_entity_service( - name="beolink_allstandby", - schema=None, - func="async_beolink_allstandby", - ) - class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): """Representation of a media player.""" @@ -216,8 +156,6 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Beolink compatible sources self._beolink_sources: dict[str, bool] = {} self._remote_leader: BeolinkLeader | None = None - # Extra state attributes for showing Beolink: peer(s), listener(s), leader and self - self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {} async def async_added_to_hass(self) -> None: """Turn on the dispatchers.""" @@ -227,7 +165,6 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): CONNECTION_STATUS: self._async_update_connection_state, WebsocketNotification.ACTIVE_LISTENING_MODE: self._async_update_sound_modes, WebsocketNotification.BEOLINK: self._async_update_beolink, - WebsocketNotification.CONFIGURATION: self._async_update_name_and_beolink, WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error, WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata_and_beolink, WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress, @@ -293,9 +230,6 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): await self._async_update_sound_modes() - # Update beolink attributes and device name. - await self._async_update_name_and_beolink() - async def async_update(self) -> None: """Update queue settings.""" # The WebSocket event listener is the main handler for connection state. @@ -438,44 +372,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): self.async_write_ha_state() - async def _async_update_name_and_beolink(self) -> None: - """Update the device friendly name.""" - beolink_self = await self._client.get_beolink_self() - - # Update device name - device_registry = dr.async_get(self.hass) - assert self.device_entry is not None - - device_registry.async_update_device( - device_id=self.device_entry.id, - name=beolink_self.friendly_name, - ) - - await self._async_update_beolink() - async def _async_update_beolink(self) -> None: """Update the current Beolink leader, listeners, peers and self.""" - self._beolink_attributes = {} - - assert self.device_entry is not None - assert self.device_entry.name is not None - - # Add Beolink self - self._beolink_attributes = { - "beolink": {"self": {self.device_entry.name: self._beolink_jid}} - } - - # Add Beolink peers - peers = await self._client.get_beolink_peers() - - if len(peers) > 0: - self._beolink_attributes["beolink"]["peers"] = {} - for peer in peers: - self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = ( - peer.jid - ) - # Add Beolink listeners / leader self._remote_leader = self._playback_metadata.remote_leader @@ -495,14 +394,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Add self group_members.append(self.entity_id) - self._beolink_attributes["beolink"]["leader"] = { - self._remote_leader.friendly_name: self._remote_leader.jid, - } - # If not listener, check if leader. else: beolink_listeners = await self._client.get_beolink_listeners() - beolink_listeners_attribute = {} # Check if the device is a leader. if len(beolink_listeners) > 0: @@ -523,18 +417,6 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): for beolink_listener in beolink_listeners ] ) - # Update Beolink attributes - for beolink_listener in beolink_listeners: - for peer in peers: - if peer.jid == beolink_listener.jid: - # Get the friendly names for the listeners from the peers - beolink_listeners_attribute[peer.friendly_name] = ( - beolink_listener.jid - ) - break - self._beolink_attributes["beolink"]["listeners"] = ( - beolink_listeners_attribute - ) self._attr_group_members = group_members @@ -688,19 +570,38 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): @property def source(self) -> str | None: """Return the current audio source.""" + + # Try to fix some of the source_change chromecast weirdness. + if hasattr(self._playback_metadata, "title"): + # source_change is chromecast but line in is selected. + if self._playback_metadata.title == BangOlufsenSource.LINE_IN.name: + return BangOlufsenSource.LINE_IN.name + + # source_change is chromecast but bluetooth is selected. + if self._playback_metadata.title == BangOlufsenSource.BLUETOOTH.name: + return BangOlufsenSource.BLUETOOTH.name + + # source_change is line in, bluetooth or optical but stale metadata is sent through the WebSocket, + # And the source has not changed. + if self._source_change.id in ( + BangOlufsenSource.BLUETOOTH.id, + BangOlufsenSource.LINE_IN.id, + BangOlufsenSource.SPDIF.id, + ): + return BangOlufsenSource.CHROMECAST.name + + # source_change is chromecast and there is metadata but no artwork. Bluetooth does support metadata but not artwork + # So i assume that it is bluetooth and not chromecast + if ( + hasattr(self._playback_metadata, "art") + and self._playback_metadata.art is not None + and len(self._playback_metadata.art) == 0 + and self._source_change.id == BangOlufsenSource.CHROMECAST.id + ): + return BangOlufsenSource.BLUETOOTH.name + return self._source_change.name - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return information that is not returned anywhere else.""" - attributes: dict[str, Any] = {} - - # Add Beolink attributes - if self._beolink_attributes: - attributes.update(self._beolink_attributes) - - return attributes - async def async_turn_off(self) -> None: """Set the device to "networkStandby".""" await self._client.post_standby() @@ -972,30 +873,23 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Beolink compatible B&O device. # Repeated presses / calls will cycle between compatible playing devices. if len(group_members) == 0: - await self.async_beolink_join() + await self._async_beolink_join() return # Get JID for each group member jids = [self._get_beolink_jid(group_member) for group_member in group_members] - await self.async_beolink_expand(jids) + await self._async_beolink_expand(jids) async def async_unjoin_player(self) -> None: """Unjoin Beolink session. End session if leader.""" - await self.async_beolink_leave() + await self._async_beolink_leave() - # Custom actions: - async def async_beolink_join(self, beolink_jid: str | None = None) -> None: + async def _async_beolink_join(self) -> None: """Join a Beolink multi-room experience.""" - if beolink_jid is None: - await self._client.join_latest_beolink_experience() - else: - await self._client.join_beolink_peer(jid=beolink_jid) + await self._client.join_latest_beolink_experience() - async def async_beolink_expand( - self, beolink_jids: list[str] | None = None, all_discovered: bool = False - ) -> None: + async def _async_beolink_expand(self, beolink_jids: list[str]) -> None: """Expand a Beolink multi-room experience with a device or devices.""" - # Ensure that the current source is expandable if not self._beolink_sources[cast(str, self._source_change.id)]: raise ServiceValidationError( @@ -1007,37 +901,10 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): }, ) - # Expand to all discovered devices - if all_discovered: - peers = await self._client.get_beolink_peers() - - for peer in peers: - try: - await self._client.post_beolink_expand(jid=peer.jid) - except NotFoundException: - _LOGGER.warning("Unable to expand to %s", peer.jid) - # Try to expand to all defined devices - elif beolink_jids: - for beolink_jid in beolink_jids: - try: - await self._client.post_beolink_expand(jid=beolink_jid) - except NotFoundException: - _LOGGER.warning( - "Unable to expand to %s. Is the device available on the network?", - beolink_jid, - ) - - async def async_beolink_unexpand(self, beolink_jids: list[str]) -> None: - """Unexpand a Beolink multi-room experience with a device or devices.""" - # Unexpand all defined devices for beolink_jid in beolink_jids: - await self._client.post_beolink_unexpand(jid=beolink_jid) + await self._client.post_beolink_expand(jid=beolink_jid) - async def async_beolink_leave(self) -> None: + async def _async_beolink_leave(self) -> None: """Leave the current Beolink experience.""" await self._client.post_beolink_leave() - - async def async_beolink_allstandby(self) -> None: - """Set all connected Beolink devices to standby.""" - await self._client.post_beolink_allstandby() diff --git a/homeassistant/components/bang_olufsen/services.yaml b/homeassistant/components/bang_olufsen/services.yaml deleted file mode 100644 index e5d61420dff..00000000000 --- a/homeassistant/components/bang_olufsen/services.yaml +++ /dev/null @@ -1,79 +0,0 @@ -beolink_allstandby: - target: - entity: - integration: bang_olufsen - domain: media_player - device: - integration: bang_olufsen - -beolink_expand: - target: - entity: - integration: bang_olufsen - domain: media_player - device: - integration: bang_olufsen - fields: - all_discovered: - required: false - example: false - selector: - boolean: - jid_options: - collapsed: false - fields: - beolink_jids: - required: false - example: >- - [ - 1111.2222222.33333333@products.bang-olufsen.com, - 4444.5555555.66666666@products.bang-olufsen.com - ] - selector: - object: - -beolink_join: - target: - entity: - integration: bang_olufsen - domain: media_player - device: - integration: bang_olufsen - fields: - jid_options: - collapsed: false - fields: - beolink_jid: - required: false - example: 1111.2222222.33333333@products.bang-olufsen.com - selector: - text: - -beolink_leave: - target: - entity: - integration: bang_olufsen - domain: media_player - device: - integration: bang_olufsen - -beolink_unexpand: - target: - entity: - integration: bang_olufsen - domain: media_player - device: - integration: bang_olufsen - fields: - jid_options: - collapsed: false - fields: - beolink_jids: - required: true - example: >- - [ - 1111.2222222.33333333@products.bang-olufsen.com, - 4444.5555555.66666666@products.bang-olufsen.com - ] - selector: - object: diff --git a/homeassistant/components/bang_olufsen/strings.json b/homeassistant/components/bang_olufsen/strings.json index aef6f953524..3e336f7d2d8 100644 --- a/homeassistant/components/bang_olufsen/strings.json +++ b/homeassistant/components/bang_olufsen/strings.json @@ -1,8 +1,4 @@ { - "common": { - "jid_options_name": "JID options", - "jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity." - }, "config": { "error": { "api_exception": "[%key:common::config_flow::error::cannot_connect%]", @@ -29,68 +25,6 @@ } } }, - "services": { - "beolink_allstandby": { - "name": "Beolink all standby", - "description": "Set all Connected Beolink devices to standby." - }, - "beolink_expand": { - "name": "Beolink expand", - "description": "Expand current Beolink experience.", - "fields": { - "all_discovered": { - "name": "All discovered", - "description": "Expand Beolink experience to all discovered devices." - }, - "beolink_jids": { - "name": "Beolink JIDs", - "description": "Specify which Beolink JIDs will join current Beolink experience." - } - }, - "sections": { - "jid_options": { - "name": "[%key:component::bang_olufsen::common::jid_options_name%]", - "description": "[%key:component::bang_olufsen::common::jid_options_description%]" - } - } - }, - "beolink_join": { - "name": "Beolink join", - "description": "Join a Beolink experience.", - "fields": { - "beolink_jid": { - "name": "Beolink JID", - "description": "Manually specify Beolink JID to join." - } - }, - "sections": { - "jid_options": { - "name": "[%key:component::bang_olufsen::common::jid_options_name%]", - "description": "[%key:component::bang_olufsen::common::jid_options_description%]" - } - } - }, - "beolink_leave": { - "name": "Beolink leave", - "description": "Leave a Beolink experience." - }, - "beolink_unexpand": { - "name": "Beolink unexpand", - "description": "Unexpand from current Beolink experience.", - "fields": { - "beolink_jids": { - "name": "Beolink JIDs", - "description": "Specify which Beolink JIDs will leave from current Beolink experience." - } - }, - "sections": { - "jid_options": { - "name": "[%key:component::bang_olufsen::common::jid_options_name%]", - "description": "[%key:component::bang_olufsen::common::jid_options_description%]" - } - } - } - }, "exceptions": { "m3u_invalid_format": { "message": "Media sources with the .m3u extension are not supported." diff --git a/homeassistant/components/bang_olufsen/websocket.py b/homeassistant/components/bang_olufsen/websocket.py index 913f7cb3241..94b84189ccc 100644 --- a/homeassistant/components/bang_olufsen/websocket.py +++ b/homeassistant/components/bang_olufsen/websocket.py @@ -120,11 +120,6 @@ class BangOlufsenWebsocket(BangOlufsenBase): self.hass, f"{self._unique_id}_{WebsocketNotification.BEOLINK}", ) - elif notification_type is WebsocketNotification.CONFIGURATION: - async_dispatcher_send( - self.hass, - f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}", - ) elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED: async_dispatcher_send( self.hass, diff --git a/homeassistant/components/blink/sensor.py b/homeassistant/components/blink/sensor.py index e0b5989cc80..f20f8188b42 100644 --- a/homeassistant/components/blink/sensor.py +++ b/homeassistant/components/blink/sensor.py @@ -10,11 +10,7 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import ( - SIGNAL_STRENGTH_DECIBELS_MILLIWATT, - EntityCategory, - UnitOfTemperature, -) +from homeassistant.const import EntityCategory, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -36,8 +32,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key=TYPE_WIFI_STRENGTH, translation_key="wifi_strength", - native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, - device_class=SensorDeviceClass.SIGNAL_STRENGTH, entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, ), diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 97985a74300..1d46af2cc4b 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -770,7 +770,7 @@ class BluesoundPlayer(MediaPlayerEntity): async def async_set_volume_level(self, volume: float) -> None: """Send volume_up command to media player.""" - volume = int(round(volume * 100)) + volume = int(volume * 100) volume = min(100, volume) volume = max(0, volume) diff --git a/homeassistant/components/bring/icons.json b/homeassistant/components/bring/icons.json index c670ef87700..74c3b2e393b 100644 --- a/homeassistant/components/bring/icons.json +++ b/homeassistant/components/bring/icons.json @@ -16,8 +16,7 @@ "list_access": { "default": "mdi:account-lock", "state": { - "shared": "mdi:account-group", - "invitation": "mdi:account-multiple-plus" + "shared": "mdi:account-group" } } }, diff --git a/homeassistant/components/bring/sensor.py b/homeassistant/components/bring/sensor.py index 746ed397e1b..57ceb099535 100644 --- a/homeassistant/components/bring/sensor.py +++ b/homeassistant/components/bring/sensor.py @@ -79,7 +79,7 @@ SENSOR_DESCRIPTIONS: tuple[BringSensorEntityDescription, ...] = ( translation_key=BringSensor.LIST_ACCESS, value_fn=lambda lst, _: lst["status"].lower(), entity_category=EntityCategory.DIAGNOSTIC, - options=["registered", "shared", "invitation"], + options=["registered", "shared"], device_class=SensorDeviceClass.ENUM, ), ) diff --git a/homeassistant/components/bring/strings.json b/homeassistant/components/bring/strings.json index 9a93881b5d2..61121cdca60 100644 --- a/homeassistant/components/bring/strings.json +++ b/homeassistant/components/bring/strings.json @@ -66,8 +66,7 @@ "name": "List access", "state": { "registered": "Private", - "shared": "Shared", - "invitation": "Invitation pending" + "shared": "Shared" } } } diff --git a/homeassistant/components/cambridge_audio/manifest.json b/homeassistant/components/cambridge_audio/manifest.json index c359ca14a21..edacd17f54d 100644 --- a/homeassistant/components/cambridge_audio/manifest.json +++ b/homeassistant/components/cambridge_audio/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aiostreammagic"], - "requirements": ["aiostreammagic==2.8.5"], + "requirements": ["aiostreammagic==2.8.4"], "zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."] } diff --git a/homeassistant/components/cambridge_audio/select.py b/homeassistant/components/cambridge_audio/select.py index c99abc853e5..ca6eebdec6b 100644 --- a/homeassistant/components/cambridge_audio/select.py +++ b/homeassistant/components/cambridge_audio/select.py @@ -51,13 +51,8 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSelectEntityDescription, ...] = ( CambridgeAudioSelectEntityDescription( key="display_brightness", translation_key="display_brightness", - options=[ - DisplayBrightness.BRIGHT.value, - DisplayBrightness.DIM.value, - DisplayBrightness.OFF.value, - ], + options=[x.value for x in DisplayBrightness], entity_category=EntityCategory.CONFIG, - load_fn=lambda client: client.display.brightness != DisplayBrightness.NONE, value_fn=lambda client: client.display.brightness, set_value_fn=lambda client, value: client.set_display_brightness( DisplayBrightness(value) diff --git a/homeassistant/components/camera/__init__.py b/homeassistant/components/camera/__init__.py index d31d21d424c..b80241846b8 100644 --- a/homeassistant/components/camera/__init__.py +++ b/homeassistant/components/camera/__init__.py @@ -421,12 +421,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if hass.config.webrtc.ice_servers: return hass.config.webrtc.ice_servers return [ - RTCIceServer( - urls=[ - "stun:stun.home-assistant.io:80", - "stun:stun.home-assistant.io:3478", - ] - ), + RTCIceServer(urls="stun:stun.home-assistant.io:80"), + RTCIceServer(urls="stun:stun.home-assistant.io:3478"), ] async_register_ice_servers(hass, get_ice_servers) @@ -476,8 +472,6 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): _attr_state: None = None # State is determined by is_on _attr_supported_features: CameraEntityFeature = CameraEntityFeature(0) - __supports_stream: CameraEntityFeature | None = None - def __init__(self) -> None: """Initialize a camera.""" self._cache: dict[str, Any] = {} @@ -789,9 +783,6 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): async def async_internal_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" await super().async_internal_added_to_hass() - self.__supports_stream = ( - self.supported_features_compat & CameraEntityFeature.STREAM - ) await self.async_refresh_providers(write_state=False) async def async_refresh_providers(self, *, write_state: bool = True) -> None: @@ -820,9 +811,18 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): async_get_supported_legacy_provider ) - if old_provider != new_provider or old_legacy_provider != new_legacy_provider: + changed = False + if old_provider != new_provider: + changed = True self._webrtc_provider = new_provider + if new_provider: + new_provider.async_provider_added(self) + + if old_legacy_provider != new_legacy_provider: + changed = True self._legacy_webrtc_provider = new_legacy_provider + + if changed: self._invalidate_camera_capabilities_cache() if write_state: self.async_write_ha_state() @@ -901,21 +901,6 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): return CameraCapabilities(frontend_stream_types) - @callback - def async_write_ha_state(self) -> None: - """Write the state to the state machine. - - Schedules async_refresh_providers if support of streams have changed. - """ - super().async_write_ha_state() - if self.__supports_stream != ( - supports_stream := self.supported_features_compat - & CameraEntityFeature.STREAM - ): - self.__supports_stream = supports_stream - self._invalidate_camera_capabilities_cache() - self.hass.async_create_task(self.async_refresh_providers()) - class CameraView(HomeAssistantView): """Base CameraView.""" diff --git a/homeassistant/components/camera/webrtc.py b/homeassistant/components/camera/webrtc.py index d627a888169..53993d8e765 100644 --- a/homeassistant/components/camera/webrtc.py +++ b/homeassistant/components/camera/webrtc.py @@ -6,7 +6,7 @@ from abc import ABC, abstractmethod import asyncio from collections.abc import Awaitable, Callable, Iterable from dataclasses import asdict, dataclass, field -from functools import cache, partial, wraps +from functools import cache, partial import logging from typing import TYPE_CHECKING, Any, Protocol @@ -155,6 +155,11 @@ class CameraWebRTCProvider(ABC): """Close the session.""" return ## This is an optional method so we need a default here. + @callback + def async_provider_added(self, camera: Camera) -> None: + """Notify the provider that the provider was added to the given camera.""" + return ## This is an optional method so we need a default here. + class CameraWebRTCLegacyProvider(Protocol): """WebRTC provider.""" @@ -205,49 +210,6 @@ async def _async_refresh_providers(hass: HomeAssistant) -> None: ) -type WsCommandWithCamera = Callable[ - [websocket_api.ActiveConnection, dict[str, Any], Camera], - Awaitable[None], -] - - -def require_webrtc_support( - error_code: str, -) -> Callable[[WsCommandWithCamera], websocket_api.AsyncWebSocketCommandHandler]: - """Validate that the camera supports WebRTC.""" - - def decorate( - func: WsCommandWithCamera, - ) -> websocket_api.AsyncWebSocketCommandHandler: - """Decorate func.""" - - @wraps(func) - async def validate( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], - ) -> None: - """Validate that the camera supports WebRTC.""" - entity_id = msg["entity_id"] - camera = get_camera_from_entity_id(hass, entity_id) - if camera.frontend_stream_type != StreamType.WEB_RTC: - connection.send_error( - msg["id"], - error_code, - ( - "Camera does not support WebRTC," - f" frontend_stream_type={camera.frontend_stream_type}" - ), - ) - return - - await func(connection, msg, camera) - - return validate - - return decorate - - @websocket_api.websocket_command( { vol.Required("type"): "camera/webrtc/offer", @@ -256,9 +218,8 @@ def require_webrtc_support( } ) @websocket_api.async_response -@require_webrtc_support("webrtc_offer_failed") async def ws_webrtc_offer( - connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Handle the signal path for a WebRTC stream. @@ -270,7 +231,20 @@ async def ws_webrtc_offer( Async friendly. """ + entity_id = msg["entity_id"] offer = msg["offer"] + camera = get_camera_from_entity_id(hass, entity_id) + if camera.frontend_stream_type != StreamType.WEB_RTC: + connection.send_error( + msg["id"], + "webrtc_offer_failed", + ( + "Camera does not support WebRTC," + f" frontend_stream_type={camera.frontend_stream_type}" + ), + ) + return + session_id = ulid() connection.subscriptions[msg["id"]] = partial( camera.close_webrtc_session, session_id @@ -309,11 +283,23 @@ async def ws_webrtc_offer( } ) @websocket_api.async_response -@require_webrtc_support("webrtc_get_client_config_failed") async def ws_get_client_config( - connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Handle get WebRTC client config websocket command.""" + entity_id = msg["entity_id"] + camera = get_camera_from_entity_id(hass, entity_id) + if camera.frontend_stream_type != StreamType.WEB_RTC: + connection.send_error( + msg["id"], + "webrtc_get_client_config_failed", + ( + "Camera does not support WebRTC," + f" frontend_stream_type={camera.frontend_stream_type}" + ), + ) + return + config = camera.async_get_webrtc_client_configuration().to_frontend_dict() connection.send_result( msg["id"], @@ -330,11 +316,23 @@ async def ws_get_client_config( } ) @websocket_api.async_response -@require_webrtc_support("webrtc_candidate_failed") async def ws_candidate( - connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Handle WebRTC candidate websocket command.""" + entity_id = msg["entity_id"] + camera = get_camera_from_entity_id(hass, entity_id) + if camera.frontend_stream_type != StreamType.WEB_RTC: + connection.send_error( + msg["id"], + "webrtc_candidate_failed", + ( + "Camera does not support WebRTC," + f" frontend_stream_type={camera.frontend_stream_type}" + ), + ) + return + await camera.async_on_webrtc_candidate( msg["session_id"], RTCIceCandidate(msg["candidate"]) ) diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index 4f2ad0ddcf7..844f0e9f11d 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -440,16 +440,16 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]: @websocket_api.websocket_command( { vol.Required("type"): "cloud/update_prefs", - vol.Optional(PREF_ALEXA_REPORT_STATE): bool, - vol.Optional(PREF_ENABLE_ALEXA): bool, - vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool, vol.Optional(PREF_ENABLE_GOOGLE): bool, + vol.Optional(PREF_ENABLE_ALEXA): bool, + vol.Optional(PREF_ALEXA_REPORT_STATE): bool, vol.Optional(PREF_GOOGLE_REPORT_STATE): bool, vol.Optional(PREF_GOOGLE_SECURE_DEVICES_PIN): vol.Any(None, str), - vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool, vol.Optional(PREF_TTS_DEFAULT_VOICE): vol.All( vol.Coerce(tuple), validate_language_voice ), + vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool, + vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool, } ) @websocket_api.async_response diff --git a/homeassistant/components/cloud/prefs.py b/homeassistant/components/cloud/prefs.py index ae4b2794e1b..a0811393097 100644 --- a/homeassistant/components/cloud/prefs.py +++ b/homeassistant/components/cloud/prefs.py @@ -163,21 +163,21 @@ class CloudPreferences: async def async_update( self, *, - alexa_enabled: bool | UndefinedType = UNDEFINED, - alexa_report_state: bool | UndefinedType = UNDEFINED, - alexa_settings_version: int | UndefinedType = UNDEFINED, - cloud_ice_servers_enabled: bool | UndefinedType = UNDEFINED, - cloud_user: str | UndefinedType = UNDEFINED, - cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED, - google_connected: bool | UndefinedType = UNDEFINED, google_enabled: bool | UndefinedType = UNDEFINED, - google_report_state: bool | UndefinedType = UNDEFINED, - google_secure_devices_pin: str | None | UndefinedType = UNDEFINED, - google_settings_version: int | UndefinedType = UNDEFINED, - remote_allow_remote_enable: bool | UndefinedType = UNDEFINED, - remote_domain: str | None | UndefinedType = UNDEFINED, + alexa_enabled: bool | UndefinedType = UNDEFINED, remote_enabled: bool | UndefinedType = UNDEFINED, + google_secure_devices_pin: str | None | UndefinedType = UNDEFINED, + cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED, + cloud_user: str | UndefinedType = UNDEFINED, + alexa_report_state: bool | UndefinedType = UNDEFINED, + google_report_state: bool | UndefinedType = UNDEFINED, tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED, + remote_domain: str | None | UndefinedType = UNDEFINED, + alexa_settings_version: int | UndefinedType = UNDEFINED, + google_settings_version: int | UndefinedType = UNDEFINED, + google_connected: bool | UndefinedType = UNDEFINED, + remote_allow_remote_enable: bool | UndefinedType = UNDEFINED, + cloud_ice_servers_enabled: bool | UndefinedType = UNDEFINED, ) -> None: """Update user preferences.""" prefs = {**self._prefs} @@ -186,21 +186,21 @@ class CloudPreferences: { key: value for key, value in ( - (PREF_ALEXA_REPORT_STATE, alexa_report_state), - (PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version), - (PREF_CLOUD_USER, cloud_user), - (PREF_CLOUDHOOKS, cloudhooks), - (PREF_ENABLE_ALEXA, alexa_enabled), - (PREF_ENABLE_CLOUD_ICE_SERVERS, cloud_ice_servers_enabled), (PREF_ENABLE_GOOGLE, google_enabled), + (PREF_ENABLE_ALEXA, alexa_enabled), (PREF_ENABLE_REMOTE, remote_enabled), - (PREF_GOOGLE_CONNECTED, google_connected), - (PREF_GOOGLE_REPORT_STATE, google_report_state), (PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin), + (PREF_CLOUDHOOKS, cloudhooks), + (PREF_CLOUD_USER, cloud_user), + (PREF_ALEXA_REPORT_STATE, alexa_report_state), + (PREF_GOOGLE_REPORT_STATE, google_report_state), + (PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version), (PREF_GOOGLE_SETTINGS_VERSION, google_settings_version), - (PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable), - (PREF_REMOTE_DOMAIN, remote_domain), (PREF_TTS_DEFAULT_VOICE, tts_default_voice), + (PREF_REMOTE_DOMAIN, remote_domain), + (PREF_GOOGLE_CONNECTED, google_connected), + (PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable), + (PREF_ENABLE_CLOUD_ICE_SERVERS, cloud_ice_servers_enabled), ) if value is not UNDEFINED } @@ -242,7 +242,6 @@ class CloudPreferences: PREF_ALEXA_REPORT_STATE: self.alexa_report_state, PREF_CLOUDHOOKS: self.cloudhooks, PREF_ENABLE_ALEXA: self.alexa_enabled, - PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled, PREF_ENABLE_GOOGLE: self.google_enabled, PREF_ENABLE_REMOTE: self.remote_enabled, PREF_GOOGLE_DEFAULT_EXPOSE: self.google_default_expose, @@ -250,6 +249,7 @@ class CloudPreferences: PREF_GOOGLE_SECURE_DEVICES_PIN: self.google_secure_devices_pin, PREF_REMOTE_ALLOW_REMOTE_ENABLE: self.remote_allow_remote_enable, PREF_TTS_DEFAULT_VOICE: self.tts_default_voice, + PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled, } @property diff --git a/homeassistant/components/co2signal/config_flow.py b/homeassistant/components/co2signal/config_flow.py index 0d357cce199..622c09f0d38 100644 --- a/homeassistant/components/co2signal/config_flow.py +++ b/homeassistant/components/co2signal/config_flow.py @@ -168,7 +168,7 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_create_entry( - title=get_extra_name(data) or "Electricity Maps", + title=get_extra_name(data) or "CO2 Signal", data=data, ) diff --git a/homeassistant/components/compensation/manifest.json b/homeassistant/components/compensation/manifest.json index 775bde3c859..caae9190bca 100644 --- a/homeassistant/components/compensation/manifest.json +++ b/homeassistant/components/compensation/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@Petro31"], "documentation": "https://www.home-assistant.io/integrations/compensation", "iot_class": "calculated", - "requirements": ["numpy==2.1.3"] + "requirements": ["numpy==1.26.4"] } diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 4838d19537a..6b5cef89fd6 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -16,11 +16,11 @@ from hassil.expression import Expression, ListReference, Sequence from hassil.intents import Intents, SlotList, TextSlotList, WildcardSlotList from hassil.recognize import ( MISSING_ENTITY, + MatchEntity, RecognizeResult, + UnmatchedTextEntity, recognize_all, - recognize_best, ) -from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity from hassil.util import merge_dict from home_assistant_intents import ErrorKey, get_intents, get_languages import yaml @@ -294,7 +294,7 @@ class DefaultAgent(ConversationEntity): self.hass, language, DOMAIN, [DOMAIN] ) response_text = translations.get( - f"component.{DOMAIN}.conversation.agent.done", "Done" + f"component.{DOMAIN}.agent.done", "Done" ) response.async_set_speech(response_text) @@ -499,7 +499,6 @@ class DefaultAgent(ConversationEntity): maybe_result: RecognizeResult | None = None best_num_matched_entities = 0 best_num_unmatched_entities = 0 - best_num_unmatched_ranges = 0 for result in recognize_all( user_input.text, lang_intents.intents, @@ -518,14 +517,10 @@ class DefaultAgent(ConversationEntity): num_matched_entities += 1 num_unmatched_entities = 0 - num_unmatched_ranges = 0 for unmatched_entity in result.unmatched_entities_list: if isinstance(unmatched_entity, UnmatchedTextEntity): if unmatched_entity.text != MISSING_ENTITY: num_unmatched_entities += 1 - elif isinstance(unmatched_entity, UnmatchedRangeEntity): - num_unmatched_ranges += 1 - num_unmatched_entities += 1 else: num_unmatched_entities += 1 @@ -537,24 +532,15 @@ class DefaultAgent(ConversationEntity): (num_matched_entities == best_num_matched_entities) and (num_unmatched_entities < best_num_unmatched_entities) ) - or ( - # Prefer unmatched ranges - (num_matched_entities == best_num_matched_entities) - and (num_unmatched_entities == best_num_unmatched_entities) - and (num_unmatched_ranges > best_num_unmatched_ranges) - ) or ( # More literal text matched (num_matched_entities == best_num_matched_entities) and (num_unmatched_entities == best_num_unmatched_entities) - and (num_unmatched_ranges == best_num_unmatched_ranges) and (result.text_chunks_matched > maybe_result.text_chunks_matched) ) or ( # Prefer match failures with entities (result.text_chunks_matched == maybe_result.text_chunks_matched) - and (num_unmatched_entities == best_num_unmatched_entities) - and (num_unmatched_ranges == best_num_unmatched_ranges) and ( ("name" in result.entities) or ("name" in result.unmatched_entities) @@ -564,7 +550,6 @@ class DefaultAgent(ConversationEntity): maybe_result = result best_num_matched_entities = num_matched_entities best_num_unmatched_entities = num_unmatched_entities - best_num_unmatched_ranges = num_unmatched_ranges return maybe_result @@ -577,15 +562,76 @@ class DefaultAgent(ConversationEntity): language: str, ) -> RecognizeResult | None: """Search intents for a strict match to user input.""" - return recognize_best( + custom_found = False + name_found = False + best_results: list[RecognizeResult] = [] + best_name_quality: int | None = None + best_text_chunks_matched: int | None = None + for result in recognize_all( user_input.text, lang_intents.intents, slot_lists=slot_lists, intent_context=intent_context, language=language, - best_metadata_key=METADATA_CUSTOM_SENTENCE, - best_slot_name="name", - ) + ): + # Prioritize user intents + is_custom = ( + result.intent_metadata is not None + and result.intent_metadata.get(METADATA_CUSTOM_SENTENCE) + ) + + if custom_found and not is_custom: + continue + + if not custom_found and is_custom: + custom_found = True + # Clear builtin results + name_found = False + best_results = [] + best_name_quality = None + best_text_chunks_matched = None + + # Prioritize results with a "name" slot + name = result.entities.get("name") + is_name = name and not name.is_wildcard + + if name_found and not is_name: + continue + + if not name_found and is_name: + name_found = True + # Clear non-name results + best_results = [] + best_text_chunks_matched = None + + if is_name: + # Prioritize results with a better "name" slot + name_quality = len(cast(MatchEntity, name).value.split()) + if (best_name_quality is None) or (name_quality > best_name_quality): + best_name_quality = name_quality + # Clear worse name results + best_results = [] + best_text_chunks_matched = None + elif name_quality < best_name_quality: + continue + + # Prioritize results with more literal text + # This causes wildcards to match last. + if (best_text_chunks_matched is None) or ( + result.text_chunks_matched > best_text_chunks_matched + ): + best_results = [result] + best_text_chunks_matched = result.text_chunks_matched + elif result.text_chunks_matched == best_text_chunks_matched: + # Accumulate results with the same number of literal text matched. + # We will resolve the ambiguity below. + best_results.append(result) + + if best_results: + # Successful strict match + return best_results[0] + + return None async def _build_speech( self, diff --git a/homeassistant/components/conversation/http.py b/homeassistant/components/conversation/http.py index 5e5800ad6f1..df1ffc7f74f 100644 --- a/homeassistant/components/conversation/http.py +++ b/homeassistant/components/conversation/http.py @@ -6,8 +6,12 @@ from collections.abc import Iterable from typing import Any from aiohttp import web -from hassil.recognize import MISSING_ENTITY, RecognizeResult -from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity +from hassil.recognize import ( + MISSING_ENTITY, + RecognizeResult, + UnmatchedRangeEntity, + UnmatchedTextEntity, +) import voluptuous as vol from homeassistant.components import http, websocket_api diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 1676cdf8254..2c446ac5d70 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.0.1", "home-assistant-intents==2024.11.13"] + "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.11.4"] } diff --git a/homeassistant/components/conversation/trigger.py b/homeassistant/components/conversation/trigger.py index a4f64ffbad9..ec7ecc76da0 100644 --- a/homeassistant/components/conversation/trigger.py +++ b/homeassistant/components/conversation/trigger.py @@ -4,8 +4,7 @@ from __future__ import annotations from typing import Any -from hassil.recognize import RecognizeResult -from hassil.util import PUNCTUATION_ALL +from hassil.recognize import PUNCTUATION, RecognizeResult import voluptuous as vol from homeassistant.const import CONF_COMMAND, CONF_PLATFORM @@ -21,7 +20,7 @@ from .const import DATA_DEFAULT_ENTITY, DOMAIN def has_no_punctuation(value: list[str]) -> list[str]: """Validate result does not contain punctuation.""" for sentence in value: - if PUNCTUATION_ALL.search(sentence): + if PUNCTUATION.search(sentence): raise vol.Invalid("sentence should not contain punctuation") return value diff --git a/homeassistant/components/crownstone/config_flow.py b/homeassistant/components/crownstone/config_flow.py index bf6e9204714..4cfbb10a4bd 100644 --- a/homeassistant/components/crownstone/config_flow.py +++ b/homeassistant/components/crownstone/config_flow.py @@ -143,7 +143,7 @@ class CrownstoneConfigFlowHandler(BaseCrownstoneFlowHandler, ConfigFlow, domain= config_entry: ConfigEntry, ) -> CrownstoneOptionsFlowHandler: """Return the Crownstone options.""" - return CrownstoneOptionsFlowHandler(config_entry) + return CrownstoneOptionsFlowHandler() def __init__(self) -> None: """Initialize the flow.""" @@ -210,10 +210,9 @@ class CrownstoneConfigFlowHandler(BaseCrownstoneFlowHandler, ConfigFlow, domain= class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow): """Handle Crownstone options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize Crownstone options.""" super().__init__(OPTIONS_FLOW, self.async_create_new_entry) - self.options = config_entry.options.copy() async def async_step_init( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/demo/config_flow.py b/homeassistant/components/demo/config_flow.py index 53c1678aa81..2b27689bdaf 100644 --- a/homeassistant/components/demo/config_flow.py +++ b/homeassistant/components/demo/config_flow.py @@ -35,7 +35,7 @@ class DemoConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" @@ -45,10 +45,6 @@ class DemoConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.options = dict(config_entry.options) - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/doods/manifest.json b/homeassistant/components/doods/manifest.json index 7c85ca63467..fabb2c30190 100644 --- a/homeassistant/components/doods/manifest.json +++ b/homeassistant/components/doods/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/doods", "iot_class": "local_polling", "loggers": ["pydoods"], - "requirements": ["pydoods==1.0.2", "Pillow==11.0.0"] + "requirements": ["pydoods==1.0.2", "Pillow==10.4.0"] } diff --git a/homeassistant/components/ecobee/number.py b/homeassistant/components/ecobee/number.py index ed3744bf11e..ab09407903d 100644 --- a/homeassistant/components/ecobee/number.py +++ b/homeassistant/components/ecobee/number.py @@ -6,14 +6,9 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass import logging -from homeassistant.components.number import ( - NumberDeviceClass, - NumberEntity, - NumberEntityDescription, - NumberMode, -) +from homeassistant.components.number import NumberEntity, NumberEntityDescription from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfTemperature, UnitOfTime +from homeassistant.const import UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -59,30 +54,21 @@ async def async_setup_entry( ) -> None: """Set up the ecobee thermostat number entity.""" data: EcobeeData = hass.data[DOMAIN] + _LOGGER.debug("Adding min time ventilators numbers (if present)") - assert data is not None - - entities: list[NumberEntity] = [ - EcobeeVentilatorMinTime(data, index, numbers) - for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["ventilatorType"] != "none" - for numbers in VENTILATOR_NUMBERS - ] - - _LOGGER.debug("Adding compressor min temp number (if present)") - entities.extend( + async_add_entities( ( - EcobeeCompressorMinTemp(data, index) + EcobeeVentilatorMinTime(data, index, numbers) for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["hasHeatPump"] - ) + if thermostat["settings"]["ventilatorType"] != "none" + for numbers in VENTILATOR_NUMBERS + ), + True, ) - async_add_entities(entities, True) - class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity): - """A number class, representing min time for an ecobee thermostat with ventilator attached.""" + """A number class, representing min time for an ecobee thermostat with ventilator attached.""" entity_description: EcobeeNumberEntityDescription @@ -119,53 +105,3 @@ class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity): """Set new ventilator Min On Time value.""" self.entity_description.set_fn(self.data, self.thermostat_index, int(value)) self.update_without_throttle = True - - -class EcobeeCompressorMinTemp(EcobeeBaseEntity, NumberEntity): - """Minimum outdoor temperature at which the compressor will operate. - - This applies more to air source heat pumps than geothermal. This serves as a safety - feature (compressors have a minimum operating temperature) as well as - providing the ability to choose fuel in a dual-fuel system (i.e. choose between - electrical heat pump and fossil auxiliary heat depending on Time of Use, Solar, - etc.). - Note that python-ecobee-api refers to this as Aux Cutover Threshold, but Ecobee - uses Compressor Protection Min Temp. - """ - - _attr_device_class = NumberDeviceClass.TEMPERATURE - _attr_has_entity_name = True - _attr_icon = "mdi:thermometer-off" - _attr_mode = NumberMode.BOX - _attr_native_min_value = -25 - _attr_native_max_value = 66 - _attr_native_step = 5 - _attr_native_unit_of_measurement = UnitOfTemperature.FAHRENHEIT - _attr_translation_key = "compressor_protection_min_temp" - - def __init__( - self, - data: EcobeeData, - thermostat_index: int, - ) -> None: - """Initialize ecobee compressor min temperature.""" - super().__init__(data, thermostat_index) - self._attr_unique_id = f"{self.base_unique_id}_compressor_protection_min_temp" - self.update_without_throttle = False - - async def async_update(self) -> None: - """Get the latest state from the thermostat.""" - if self.update_without_throttle: - await self.data.update(no_throttle=True) - self.update_without_throttle = False - else: - await self.data.update() - - self._attr_native_value = ( - (self.thermostat["settings"]["compressorProtectionMinTemp"]) / 10 - ) - - def set_native_value(self, value: float) -> None: - """Set new compressor minimum temperature.""" - self.data.ecobee.set_aux_cutover_threshold(self.thermostat_index, value) - self.update_without_throttle = True diff --git a/homeassistant/components/ecobee/strings.json b/homeassistant/components/ecobee/strings.json index 8c636bd9b04..18929cb45de 100644 --- a/homeassistant/components/ecobee/strings.json +++ b/homeassistant/components/ecobee/strings.json @@ -33,18 +33,15 @@ }, "number": { "ventilator_min_type_home": { - "name": "Ventilator minimum time home" + "name": "Ventilator min time home" }, "ventilator_min_type_away": { - "name": "Ventilator minimum time away" - }, - "compressor_protection_min_temp": { - "name": "Compressor minimum temperature" + "name": "Ventilator min time away" } }, "switch": { "aux_heat_only": { - "name": "Auxiliary heat only" + "name": "Aux heat only" } } }, diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 0ab9f9a4612..33977b3b0de 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==8.4.1"] + "requirements": ["py-sucks==0.9.10", "deebot-client==8.4.0"] } diff --git a/homeassistant/components/emoncms/__init__.py b/homeassistant/components/emoncms/__init__.py index 0cd686b5b56..98ed6328578 100644 --- a/homeassistant/components/emoncms/__init__.py +++ b/homeassistant/components/emoncms/__init__.py @@ -5,11 +5,8 @@ from pyemoncms import EmoncmsClient from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_URL, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from .const import DOMAIN, EMONCMS_UUID_DOC_URL, LOGGER from .coordinator import EmoncmsCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] @@ -17,49 +14,6 @@ PLATFORMS: list[Platform] = [Platform.SENSOR] type EmonCMSConfigEntry = ConfigEntry[EmoncmsCoordinator] -def _migrate_unique_id( - hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_unique_id: str -) -> None: - """Migrate to emoncms unique id if needed.""" - ent_reg = er.async_get(hass) - entry_entities = ent_reg.entities.get_entries_for_config_entry_id(entry.entry_id) - for entity in entry_entities: - if entity.unique_id.split("-")[0] == entry.entry_id: - feed_id = entity.unique_id.split("-")[-1] - LOGGER.debug(f"moving feed {feed_id} to hardware uuid") - ent_reg.async_update_entity( - entity.entity_id, new_unique_id=f"{emoncms_unique_id}-{feed_id}" - ) - hass.config_entries.async_update_entry( - entry, - unique_id=emoncms_unique_id, - ) - - -async def _check_unique_id_migration( - hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_client: EmoncmsClient -) -> None: - """Check if we can migrate to the emoncms uuid.""" - emoncms_unique_id = await emoncms_client.async_get_uuid() - if emoncms_unique_id: - if entry.unique_id != emoncms_unique_id: - _migrate_unique_id(hass, entry, emoncms_unique_id) - else: - async_create_issue( - hass, - DOMAIN, - "migrate database", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="migrate_database", - translation_placeholders={ - "url": entry.data[CONF_URL], - "doc_url": EMONCMS_UUID_DOC_URL, - }, - ) - - async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> bool: """Load a config entry.""" emoncms_client = EmoncmsClient( @@ -67,7 +21,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> b entry.data[CONF_API_KEY], session=async_get_clientsession(hass), ) - await _check_unique_id_migration(hass, entry, emoncms_client) coordinator = EmoncmsCoordinator(hass, emoncms_client) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/emoncms/config_flow.py b/homeassistant/components/emoncms/config_flow.py index e0d4d0d03e9..b294a5cd3d4 100644 --- a/homeassistant/components/emoncms/config_flow.py +++ b/homeassistant/components/emoncms/config_flow.py @@ -14,7 +14,7 @@ from homeassistant.config_entries import ( OptionsFlow, ) from homeassistant.const import CONF_API_KEY, CONF_URL -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import selector from homeassistant.helpers.typing import ConfigType @@ -48,10 +48,13 @@ def sensor_name(url: str) -> str: return f"emoncms@{sensorip}" -async def get_feed_list( - emoncms_client: EmoncmsClient, -) -> dict[str, Any]: +async def get_feed_list(hass: HomeAssistant, url: str, api_key: str) -> dict[str, Any]: """Check connection to emoncms and return feed list if successful.""" + emoncms_client = EmoncmsClient( + url, + api_key, + session=async_get_clientsession(hass), + ) return await emoncms_client.async_request("/feed/list.json") @@ -79,25 +82,22 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders = {} if user_input is not None: - self.url = user_input[CONF_URL] - self.api_key = user_input[CONF_API_KEY] self._async_abort_entries_match( { - CONF_API_KEY: self.api_key, - CONF_URL: self.url, + CONF_API_KEY: user_input[CONF_API_KEY], + CONF_URL: user_input[CONF_URL], } ) - emoncms_client = EmoncmsClient( - self.url, self.api_key, session=async_get_clientsession(self.hass) + result = await get_feed_list( + self.hass, user_input[CONF_URL], user_input[CONF_API_KEY] ) - result = await get_feed_list(emoncms_client) if not result[CONF_SUCCESS]: errors["base"] = "api_error" description_placeholders = {"details": result[CONF_MESSAGE]} else: self.include_only_feeds = user_input.get(CONF_ONLY_INCLUDE_FEEDID) - await self.async_set_unique_id(await emoncms_client.async_get_uuid()) - self._abort_if_unique_id_configured() + self.url = user_input[CONF_URL] + self.api_key = user_input[CONF_API_KEY] options = get_options(result[CONF_MESSAGE]) self.dropdown = { "options": options, @@ -191,12 +191,7 @@ class EmoncmsOptionsFlow(OptionsFlow): self.config_entry.data.get(CONF_ONLY_INCLUDE_FEEDID, []), ) options: list = include_only_feeds - emoncms_client = EmoncmsClient( - self._url, - self._api_key, - session=async_get_clientsession(self.hass), - ) - result = await get_feed_list(emoncms_client) + result = await get_feed_list(self.hass, self._url, self._api_key) if not result[CONF_SUCCESS]: errors["base"] = "api_error" description_placeholders = {"details": result[CONF_MESSAGE]} diff --git a/homeassistant/components/emoncms/const.py b/homeassistant/components/emoncms/const.py index c53f7cc8a9f..256db5726bb 100644 --- a/homeassistant/components/emoncms/const.py +++ b/homeassistant/components/emoncms/const.py @@ -7,10 +7,6 @@ CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id" CONF_MESSAGE = "message" CONF_SUCCESS = "success" DOMAIN = "emoncms" -EMONCMS_UUID_DOC_URL = ( - "https://docs.openenergymonitor.org/emoncms/update.html" - "#upgrading-to-a-version-producing-a-unique-identifier" -) FEED_ID = "id" FEED_NAME = "name" FEED_TAG = "tag" diff --git a/homeassistant/components/emoncms/sensor.py b/homeassistant/components/emoncms/sensor.py index c696a569135..d8dec12800a 100644 --- a/homeassistant/components/emoncms/sensor.py +++ b/homeassistant/components/emoncms/sensor.py @@ -148,20 +148,20 @@ async def async_setup_entry( return coordinator = entry.runtime_data - # uuid was added in emoncms database 11.5.7 - unique_id = entry.unique_id if entry.unique_id else entry.entry_id elems = coordinator.data if not elems: return + sensors: list[EmonCmsSensor] = [] for idx, elem in enumerate(elems): if include_only_feeds is not None and elem[FEED_ID] not in include_only_feeds: continue + sensors.append( EmonCmsSensor( coordinator, - unique_id, + entry.entry_id, elem["unit"], name, idx, @@ -176,7 +176,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): def __init__( self, coordinator: EmoncmsCoordinator, - unique_id: str, + entry_id: str, unit_of_measurement: str | None, name: str, idx: int, @@ -189,7 +189,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): elem = self.coordinator.data[self.idx] self._attr_name = f"{name} {elem[FEED_NAME]}" self._attr_native_unit_of_measurement = unit_of_measurement - self._attr_unique_id = f"{unique_id}-{elem[FEED_ID]}" + self._attr_unique_id = f"{entry_id}-{elem[FEED_ID]}" if unit_of_measurement in ("kWh", "Wh"): self._attr_device_class = SensorDeviceClass.ENERGY self._attr_state_class = SensorStateClass.TOTAL_INCREASING diff --git a/homeassistant/components/emoncms/strings.json b/homeassistant/components/emoncms/strings.json index 0d841f2efb4..e2b7602f6f2 100644 --- a/homeassistant/components/emoncms/strings.json +++ b/homeassistant/components/emoncms/strings.json @@ -19,9 +19,6 @@ "include_only_feed_id": "Choose feeds to include" } } - }, - "abort": { - "already_configured": "This server is already configured" } }, "options": { @@ -44,10 +41,6 @@ "missing_include_only_feed_id": { "title": "No feed synchronized with the {domain} sensor", "description": "Configuring {domain} using YAML is being removed.\n\nPlease add manually the feeds you want to synchronize with the `configure` button of the integration." - }, - "migrate_database": { - "title": "Upgrade your emoncms version", - "description": "Your [emoncms]({url}) does not ship a unique identifier.\n\n Please upgrade to at least version 11.5.7 and migrate your emoncms database.\n\n More info on [emoncms documentation]({doc_url})" } } } diff --git a/homeassistant/components/emulated_kasa/manifest.json b/homeassistant/components/emulated_kasa/manifest.json index d4889c0c5f5..f1a01f9d7aa 100644 --- a/homeassistant/components/emulated_kasa/manifest.json +++ b/homeassistant/components/emulated_kasa/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_push", "loggers": ["sense_energy"], "quality_scale": "internal", - "requirements": ["sense-energy==0.13.3"] + "requirements": ["sense-energy==0.13.2"] } diff --git a/homeassistant/components/eq3btsmart/__init__.py b/homeassistant/components/eq3btsmart/__init__.py index 84b27161edd..f63e627ea7d 100644 --- a/homeassistant/components/eq3btsmart/__init__.py +++ b/homeassistant/components/eq3btsmart/__init__.py @@ -15,23 +15,17 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED +from .const import DOMAIN, SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED from .models import Eq3Config, Eq3ConfigEntryData PLATFORMS = [ - Platform.BINARY_SENSOR, Platform.CLIMATE, - Platform.NUMBER, - Platform.SWITCH, ] _LOGGER = logging.getLogger(__name__) -type Eq3ConfigEntry = ConfigEntry[Eq3ConfigEntryData] - - -async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Handle config entry setup.""" mac_address: str | None = entry.unique_id @@ -59,11 +53,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool: ble_device=device, ) - entry.runtime_data = Eq3ConfigEntryData( - eq3_config=eq3_config, thermostat=thermostat - ) + eq3_config_entry = Eq3ConfigEntryData(eq3_config=eq3_config, thermostat=thermostat) + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = eq3_config_entry + entry.async_on_unload(entry.add_update_listener(update_listener)) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_create_background_task( hass, _async_run_thermostat(hass, entry), entry.entry_id ) @@ -71,27 +66,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Handle config entry unload.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - await entry.runtime_data.thermostat.async_disconnect() + eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN].pop(entry.entry_id) + await eq3_config_entry.thermostat.async_disconnect() return unload_ok -async def update_listener(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle config entry update.""" await hass.config_entries.async_reload(entry.entry_id) -async def _async_run_thermostat(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None: +async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None: """Run the thermostat.""" - thermostat = entry.runtime_data.thermostat - mac_address = entry.runtime_data.eq3_config.mac_address - scan_interval = entry.runtime_data.eq3_config.scan_interval + eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id] + thermostat = eq3_config_entry.thermostat + mac_address = eq3_config_entry.eq3_config.mac_address + scan_interval = eq3_config_entry.eq3_config.scan_interval await _async_reconnect_thermostat(hass, entry) @@ -120,14 +117,13 @@ async def _async_run_thermostat(hass: HomeAssistant, entry: Eq3ConfigEntry) -> N await asyncio.sleep(scan_interval) -async def _async_reconnect_thermostat( - hass: HomeAssistant, entry: Eq3ConfigEntry -) -> None: +async def _async_reconnect_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None: """Reconnect the thermostat.""" - thermostat = entry.runtime_data.thermostat - mac_address = entry.runtime_data.eq3_config.mac_address - scan_interval = entry.runtime_data.eq3_config.scan_interval + eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id] + thermostat = eq3_config_entry.thermostat + mac_address = eq3_config_entry.eq3_config.mac_address + scan_interval = eq3_config_entry.eq3_config.scan_interval while True: try: diff --git a/homeassistant/components/eq3btsmart/binary_sensor.py b/homeassistant/components/eq3btsmart/binary_sensor.py deleted file mode 100644 index 27525d47972..00000000000 --- a/homeassistant/components/eq3btsmart/binary_sensor.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Platform for eq3 binary sensor entities.""" - -from collections.abc import Callable -from dataclasses import dataclass -from typing import TYPE_CHECKING - -from eq3btsmart.models import Status - -from homeassistant.components.binary_sensor import ( - BinarySensorDeviceClass, - BinarySensorEntity, - BinarySensorEntityDescription, -) -from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import Eq3ConfigEntry -from .const import ENTITY_KEY_BATTERY, ENTITY_KEY_DST, ENTITY_KEY_WINDOW -from .entity import Eq3Entity - - -@dataclass(frozen=True, kw_only=True) -class Eq3BinarySensorEntityDescription(BinarySensorEntityDescription): - """Entity description for eq3 binary sensors.""" - - value_func: Callable[[Status], bool] - - -BINARY_SENSOR_ENTITY_DESCRIPTIONS = [ - Eq3BinarySensorEntityDescription( - value_func=lambda status: status.is_low_battery, - key=ENTITY_KEY_BATTERY, - device_class=BinarySensorDeviceClass.BATTERY, - entity_category=EntityCategory.DIAGNOSTIC, - ), - Eq3BinarySensorEntityDescription( - value_func=lambda status: status.is_window_open, - key=ENTITY_KEY_WINDOW, - device_class=BinarySensorDeviceClass.WINDOW, - ), - Eq3BinarySensorEntityDescription( - value_func=lambda status: status.is_dst, - key=ENTITY_KEY_DST, - translation_key=ENTITY_KEY_DST, - entity_category=EntityCategory.DIAGNOSTIC, - ), -] - - -async def async_setup_entry( - hass: HomeAssistant, - entry: Eq3ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the entry.""" - - async_add_entities( - Eq3BinarySensorEntity(entry, entity_description) - for entity_description in BINARY_SENSOR_ENTITY_DESCRIPTIONS - ) - - -class Eq3BinarySensorEntity(Eq3Entity, BinarySensorEntity): - """Base class for eQ-3 binary sensor entities.""" - - entity_description: Eq3BinarySensorEntityDescription - - def __init__( - self, - entry: Eq3ConfigEntry, - entity_description: Eq3BinarySensorEntityDescription, - ) -> None: - """Initialize the entity.""" - - super().__init__(entry, entity_description.key) - self.entity_description = entity_description - - @property - def is_on(self) -> bool: - """Return the state of the binary sensor.""" - - if TYPE_CHECKING: - assert self._thermostat.status is not None - - return self.entity_description.value_func(self._thermostat.status) diff --git a/homeassistant/components/eq3btsmart/climate.py b/homeassistant/components/eq3btsmart/climate.py index ae01d0fc9a7..7b8ccb6c990 100644 --- a/homeassistant/components/eq3btsmart/climate.py +++ b/homeassistant/components/eq3btsmart/climate.py @@ -3,6 +3,7 @@ import logging from typing import Any +from eq3btsmart import Thermostat from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode from eq3btsmart.exceptions import Eq3Exception @@ -14,35 +15,45 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH +from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import slugify -from . import Eq3ConfigEntry from .const import ( + DEVICE_MODEL, + DOMAIN, EQ_TO_HA_HVAC, HA_TO_EQ_HVAC, + MANUFACTURER, + SIGNAL_THERMOSTAT_CONNECTED, + SIGNAL_THERMOSTAT_DISCONNECTED, CurrentTemperatureSelector, Preset, TargetTemperatureSelector, ) from .entity import Eq3Entity +from .models import Eq3Config, Eq3ConfigEntryData _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - entry: Eq3ConfigEntry, + config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Handle config entry setup.""" + eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][config_entry.entry_id] + async_add_entities( - [Eq3Climate(entry)], + [Eq3Climate(eq3_config_entry.eq3_config, eq3_config_entry.thermostat)], ) @@ -69,6 +80,53 @@ class Eq3Climate(Eq3Entity, ClimateEntity): _attr_preset_mode: str | None = None _target_temperature: float | None = None + def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None: + """Initialize the climate entity.""" + + super().__init__(eq3_config, thermostat) + self._attr_unique_id = dr.format_mac(eq3_config.mac_address) + self._attr_device_info = DeviceInfo( + name=slugify(self._eq3_config.mac_address), + manufacturer=MANUFACTURER, + model=DEVICE_MODEL, + connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, + ) + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + + self._thermostat.register_update_callback(self._async_on_updated) + + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}", + self._async_on_disconnected, + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}", + self._async_on_connected, + ) + ) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + + self._thermostat.unregister_update_callback(self._async_on_updated) + + @callback + def _async_on_disconnected(self) -> None: + self._attr_available = False + self.async_write_ha_state() + + @callback + def _async_on_connected(self) -> None: + self._attr_available = True + self.async_write_ha_state() + @callback def _async_on_updated(self) -> None: """Handle updated data from the thermostat.""" @@ -79,15 +137,12 @@ class Eq3Climate(Eq3Entity, ClimateEntity): if self._thermostat.device_data is not None: self._async_on_device_updated() - super()._async_on_updated() + self.async_write_ha_state() @callback def _async_on_status_updated(self) -> None: """Handle updated status from the thermostat.""" - if self._thermostat.status is None: - return - self._target_temperature = self._thermostat.status.target_temperature.value self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode] self._attr_current_temperature = self._get_current_temperature() @@ -99,16 +154,13 @@ class Eq3Climate(Eq3Entity, ClimateEntity): def _async_on_device_updated(self) -> None: """Handle updated device data from the thermostat.""" - if self._thermostat.device_data is None: - return - device_registry = dr.async_get(self.hass) if device := device_registry.async_get_device( connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, ): device_registry.async_update_device( device.id, - sw_version=str(self._thermostat.device_data.firmware_version), + sw_version=self._thermostat.device_data.firmware_version, serial_number=self._thermostat.device_data.device_serial.value, ) @@ -213,7 +265,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity): self.async_write_ha_state() try: - await self._thermostat.async_set_temperature(temperature) + await self._thermostat.async_set_temperature(self._target_temperature) except Eq3Exception: _LOGGER.error( "[%s] Failed setting temperature", self._eq3_config.mac_address diff --git a/homeassistant/components/eq3btsmart/const.py b/homeassistant/components/eq3btsmart/const.py index 78292940e60..111c4d0eba4 100644 --- a/homeassistant/components/eq3btsmart/const.py +++ b/homeassistant/components/eq3btsmart/const.py @@ -18,20 +18,9 @@ DOMAIN = "eq3btsmart" MANUFACTURER = "eQ-3 AG" DEVICE_MODEL = "CC-RT-BLE-EQ" -ENTITY_KEY_DST = "dst" -ENTITY_KEY_BATTERY = "battery" -ENTITY_KEY_WINDOW = "window" -ENTITY_KEY_LOCK = "lock" -ENTITY_KEY_BOOST = "boost" -ENTITY_KEY_AWAY = "away" -ENTITY_KEY_COMFORT = "comfort" -ENTITY_KEY_ECO = "eco" -ENTITY_KEY_OFFSET = "offset" -ENTITY_KEY_WINDOW_OPEN_TEMPERATURE = "window_open_temperature" -ENTITY_KEY_WINDOW_OPEN_TIMEOUT = "window_open_timeout" - GET_DEVICE_TIMEOUT = 5 # seconds + EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = { OperationMode.OFF: HVACMode.OFF, OperationMode.ON: HVACMode.HEAT, @@ -82,5 +71,3 @@ DEFAULT_SCAN_INTERVAL = 10 # seconds SIGNAL_THERMOSTAT_DISCONNECTED = f"{DOMAIN}.thermostat_disconnected" SIGNAL_THERMOSTAT_CONNECTED = f"{DOMAIN}.thermostat_connected" - -EQ3BT_STEP = 0.5 diff --git a/homeassistant/components/eq3btsmart/entity.py b/homeassistant/components/eq3btsmart/entity.py index e68545c08c7..e8c00d4e3cf 100644 --- a/homeassistant/components/eq3btsmart/entity.py +++ b/homeassistant/components/eq3btsmart/entity.py @@ -1,22 +1,10 @@ """Base class for all eQ-3 entities.""" -from homeassistant.core import callback -from homeassistant.helpers.device_registry import ( - CONNECTION_BLUETOOTH, - DeviceInfo, - format_mac, -) -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity import Entity -from homeassistant.util import slugify +from eq3btsmart.thermostat import Thermostat -from . import Eq3ConfigEntry -from .const import ( - DEVICE_MODEL, - MANUFACTURER, - SIGNAL_THERMOSTAT_CONNECTED, - SIGNAL_THERMOSTAT_DISCONNECTED, -) +from homeassistant.helpers.entity import Entity + +from .models import Eq3Config class Eq3Entity(Entity): @@ -24,70 +12,8 @@ class Eq3Entity(Entity): _attr_has_entity_name = True - def __init__( - self, - entry: Eq3ConfigEntry, - unique_id_key: str | None = None, - ) -> None: + def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None: """Initialize the eq3 entity.""" - self._eq3_config = entry.runtime_data.eq3_config - self._thermostat = entry.runtime_data.thermostat - self._attr_device_info = DeviceInfo( - name=slugify(self._eq3_config.mac_address), - manufacturer=MANUFACTURER, - model=DEVICE_MODEL, - connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, - ) - suffix = f"_{unique_id_key}" if unique_id_key else "" - self._attr_unique_id = f"{format_mac(self._eq3_config.mac_address)}{suffix}" - - async def async_added_to_hass(self) -> None: - """Run when entity about to be added to hass.""" - - self._thermostat.register_update_callback(self._async_on_updated) - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}", - self._async_on_disconnected, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}", - self._async_on_connected, - ) - ) - - async def async_will_remove_from_hass(self) -> None: - """Run when entity will be removed from hass.""" - - self._thermostat.unregister_update_callback(self._async_on_updated) - - def _async_on_updated(self) -> None: - """Handle updated data from the thermostat.""" - - self.async_write_ha_state() - - @callback - def _async_on_disconnected(self) -> None: - """Handle disconnection from the thermostat.""" - - self._attr_available = False - self.async_write_ha_state() - - @callback - def _async_on_connected(self) -> None: - """Handle connection to the thermostat.""" - - self._attr_available = True - self.async_write_ha_state() - - @property - def available(self) -> bool: - """Whether the entity is available.""" - - return self._thermostat.status is not None and self._attr_available + self._eq3_config = eq3_config + self._thermostat = thermostat diff --git a/homeassistant/components/eq3btsmart/icons.json b/homeassistant/components/eq3btsmart/icons.json deleted file mode 100644 index e6eb7532f37..00000000000 --- a/homeassistant/components/eq3btsmart/icons.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "entity": { - "binary_sensor": { - "dst": { - "default": "mdi:sun-clock", - "state": { - "off": "mdi:sun-clock-outline" - } - } - }, - "number": { - "comfort": { - "default": "mdi:sun-thermometer" - }, - "eco": { - "default": "mdi:snowflake-thermometer" - }, - "offset": { - "default": "mdi:thermometer-plus" - }, - "window_open_temperature": { - "default": "mdi:window-open-variant" - }, - "window_open_timeout": { - "default": "mdi:timer-refresh" - } - }, - "switch": { - "away": { - "default": "mdi:home-account", - "state": { - "on": "mdi:home-export" - } - }, - "lock": { - "default": "mdi:lock", - "state": { - "off": "mdi:lock-off" - } - }, - "boost": { - "default": "mdi:fire", - "state": { - "off": "mdi:fire-off" - } - } - } - } -} diff --git a/homeassistant/components/eq3btsmart/manifest.json b/homeassistant/components/eq3btsmart/manifest.json index b30f806bf63..e25c675bf82 100644 --- a/homeassistant/components/eq3btsmart/manifest.json +++ b/homeassistant/components/eq3btsmart/manifest.json @@ -23,5 +23,5 @@ "iot_class": "local_polling", "loggers": ["eq3btsmart"], "quality_scale": "silver", - "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==1.1.0"] + "requirements": ["eq3btsmart==1.2.0", "bleak-esphome==1.1.0"] } diff --git a/homeassistant/components/eq3btsmart/models.py b/homeassistant/components/eq3btsmart/models.py index 858465effa8..8ea0955dbdd 100644 --- a/homeassistant/components/eq3btsmart/models.py +++ b/homeassistant/components/eq3btsmart/models.py @@ -2,6 +2,7 @@ from dataclasses import dataclass +from eq3btsmart.const import DEFAULT_AWAY_HOURS, DEFAULT_AWAY_TEMP from eq3btsmart.thermostat import Thermostat from .const import ( @@ -22,6 +23,8 @@ class Eq3Config: target_temp_selector: TargetTemperatureSelector = DEFAULT_TARGET_TEMP_SELECTOR external_temp_sensor: str = "" scan_interval: int = DEFAULT_SCAN_INTERVAL + default_away_hours: float = DEFAULT_AWAY_HOURS + default_away_temperature: float = DEFAULT_AWAY_TEMP @dataclass(slots=True) diff --git a/homeassistant/components/eq3btsmart/number.py b/homeassistant/components/eq3btsmart/number.py deleted file mode 100644 index 2e069180fa3..00000000000 --- a/homeassistant/components/eq3btsmart/number.py +++ /dev/null @@ -1,158 +0,0 @@ -"""Platform for eq3 number entities.""" - -from collections.abc import Awaitable, Callable -from dataclasses import dataclass -from typing import TYPE_CHECKING - -from eq3btsmart import Thermostat -from eq3btsmart.const import ( - EQ3BT_MAX_OFFSET, - EQ3BT_MAX_TEMP, - EQ3BT_MIN_OFFSET, - EQ3BT_MIN_TEMP, -) -from eq3btsmart.models import Presets - -from homeassistant.components.number import ( - NumberDeviceClass, - NumberEntity, - NumberEntityDescription, - NumberMode, -) -from homeassistant.const import EntityCategory, UnitOfTemperature, UnitOfTime -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import Eq3ConfigEntry -from .const import ( - ENTITY_KEY_COMFORT, - ENTITY_KEY_ECO, - ENTITY_KEY_OFFSET, - ENTITY_KEY_WINDOW_OPEN_TEMPERATURE, - ENTITY_KEY_WINDOW_OPEN_TIMEOUT, - EQ3BT_STEP, -) -from .entity import Eq3Entity - - -@dataclass(frozen=True, kw_only=True) -class Eq3NumberEntityDescription(NumberEntityDescription): - """Entity description for eq3 number entities.""" - - value_func: Callable[[Presets], float] - value_set_func: Callable[ - [Thermostat], - Callable[[float], Awaitable[None]], - ] - mode: NumberMode = NumberMode.BOX - entity_category: EntityCategory | None = EntityCategory.CONFIG - - -NUMBER_ENTITY_DESCRIPTIONS = [ - Eq3NumberEntityDescription( - key=ENTITY_KEY_COMFORT, - value_func=lambda presets: presets.comfort_temperature.value, - value_set_func=lambda thermostat: thermostat.async_configure_comfort_temperature, - translation_key=ENTITY_KEY_COMFORT, - native_min_value=EQ3BT_MIN_TEMP, - native_max_value=EQ3BT_MAX_TEMP, - native_step=EQ3BT_STEP, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=NumberDeviceClass.TEMPERATURE, - ), - Eq3NumberEntityDescription( - key=ENTITY_KEY_ECO, - value_func=lambda presets: presets.eco_temperature.value, - value_set_func=lambda thermostat: thermostat.async_configure_eco_temperature, - translation_key=ENTITY_KEY_ECO, - native_min_value=EQ3BT_MIN_TEMP, - native_max_value=EQ3BT_MAX_TEMP, - native_step=EQ3BT_STEP, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=NumberDeviceClass.TEMPERATURE, - ), - Eq3NumberEntityDescription( - key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE, - value_func=lambda presets: presets.window_open_temperature.value, - value_set_func=lambda thermostat: thermostat.async_configure_window_open_temperature, - translation_key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE, - native_min_value=EQ3BT_MIN_TEMP, - native_max_value=EQ3BT_MAX_TEMP, - native_step=EQ3BT_STEP, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=NumberDeviceClass.TEMPERATURE, - ), - Eq3NumberEntityDescription( - key=ENTITY_KEY_OFFSET, - value_func=lambda presets: presets.offset_temperature.value, - value_set_func=lambda thermostat: thermostat.async_configure_temperature_offset, - translation_key=ENTITY_KEY_OFFSET, - native_min_value=EQ3BT_MIN_OFFSET, - native_max_value=EQ3BT_MAX_OFFSET, - native_step=EQ3BT_STEP, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=NumberDeviceClass.TEMPERATURE, - ), - Eq3NumberEntityDescription( - key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT, - value_set_func=lambda thermostat: thermostat.async_configure_window_open_duration, - value_func=lambda presets: presets.window_open_time.value.total_seconds() / 60, - translation_key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT, - native_min_value=0, - native_max_value=60, - native_step=5, - native_unit_of_measurement=UnitOfTime.MINUTES, - ), -] - - -async def async_setup_entry( - hass: HomeAssistant, - entry: Eq3ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the entry.""" - - async_add_entities( - Eq3NumberEntity(entry, entity_description) - for entity_description in NUMBER_ENTITY_DESCRIPTIONS - ) - - -class Eq3NumberEntity(Eq3Entity, NumberEntity): - """Base class for all eq3 number entities.""" - - entity_description: Eq3NumberEntityDescription - - def __init__( - self, entry: Eq3ConfigEntry, entity_description: Eq3NumberEntityDescription - ) -> None: - """Initialize the entity.""" - - super().__init__(entry, entity_description.key) - self.entity_description = entity_description - - @property - def native_value(self) -> float: - """Return the state of the entity.""" - - if TYPE_CHECKING: - assert self._thermostat.status is not None - assert self._thermostat.status.presets is not None - - return self.entity_description.value_func(self._thermostat.status.presets) - - async def async_set_native_value(self, value: float) -> None: - """Set the state of the entity.""" - - await self.entity_description.value_set_func(self._thermostat)(value) - - @property - def available(self) -> bool: - """Return whether the entity is available.""" - - return ( - self._thermostat.status is not None - and self._thermostat.status.presets is not None - and self._attr_available - ) diff --git a/homeassistant/components/eq3btsmart/strings.json b/homeassistant/components/eq3btsmart/strings.json index acfd5082f45..5108baa1bcf 100644 --- a/homeassistant/components/eq3btsmart/strings.json +++ b/homeassistant/components/eq3btsmart/strings.json @@ -18,40 +18,5 @@ "error": { "invalid_mac_address": "Invalid MAC address" } - }, - "entity": { - "binary_sensor": { - "dst": { - "name": "Daylight saving time" - } - }, - "number": { - "comfort": { - "name": "Comfort temperature" - }, - "eco": { - "name": "Eco temperature" - }, - "offset": { - "name": "Offset temperature" - }, - "window_open_temperature": { - "name": "Window open temperature" - }, - "window_open_timeout": { - "name": "Window open timeout" - } - }, - "switch": { - "lock": { - "name": "Lock" - }, - "boost": { - "name": "Boost" - }, - "away": { - "name": "Away" - } - } } } diff --git a/homeassistant/components/eq3btsmart/switch.py b/homeassistant/components/eq3btsmart/switch.py deleted file mode 100644 index 7525d8ca494..00000000000 --- a/homeassistant/components/eq3btsmart/switch.py +++ /dev/null @@ -1,94 +0,0 @@ -"""Platform for eq3 switch entities.""" - -from collections.abc import Awaitable, Callable -from dataclasses import dataclass -from typing import TYPE_CHECKING, Any - -from eq3btsmart import Thermostat -from eq3btsmart.models import Status - -from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import Eq3ConfigEntry -from .const import ENTITY_KEY_AWAY, ENTITY_KEY_BOOST, ENTITY_KEY_LOCK -from .entity import Eq3Entity - - -@dataclass(frozen=True, kw_only=True) -class Eq3SwitchEntityDescription(SwitchEntityDescription): - """Entity description for eq3 switch entities.""" - - toggle_func: Callable[[Thermostat], Callable[[bool], Awaitable[None]]] - value_func: Callable[[Status], bool] - - -SWITCH_ENTITY_DESCRIPTIONS = [ - Eq3SwitchEntityDescription( - key=ENTITY_KEY_LOCK, - translation_key=ENTITY_KEY_LOCK, - toggle_func=lambda thermostat: thermostat.async_set_locked, - value_func=lambda status: status.is_locked, - ), - Eq3SwitchEntityDescription( - key=ENTITY_KEY_BOOST, - translation_key=ENTITY_KEY_BOOST, - toggle_func=lambda thermostat: thermostat.async_set_boost, - value_func=lambda status: status.is_boost, - ), - Eq3SwitchEntityDescription( - key=ENTITY_KEY_AWAY, - translation_key=ENTITY_KEY_AWAY, - toggle_func=lambda thermostat: thermostat.async_set_away, - value_func=lambda status: status.is_away, - ), -] - - -async def async_setup_entry( - hass: HomeAssistant, - entry: Eq3ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the entry.""" - - async_add_entities( - Eq3SwitchEntity(entry, entity_description) - for entity_description in SWITCH_ENTITY_DESCRIPTIONS - ) - - -class Eq3SwitchEntity(Eq3Entity, SwitchEntity): - """Base class for eq3 switch entities.""" - - entity_description: Eq3SwitchEntityDescription - - def __init__( - self, - entry: Eq3ConfigEntry, - entity_description: Eq3SwitchEntityDescription, - ) -> None: - """Initialize the entity.""" - - super().__init__(entry, entity_description.key) - self.entity_description = entity_description - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn on the switch.""" - - await self.entity_description.toggle_func(self._thermostat)(True) - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn off the switch.""" - - await self.entity_description.toggle_func(self._thermostat)(False) - - @property - def is_on(self) -> bool: - """Return the state of the switch.""" - - if TYPE_CHECKING: - assert self._thermostat.status is not None - - return self.entity_description.value_func(self._thermostat.status) diff --git a/homeassistant/components/esphome/config_flow.py b/homeassistant/components/esphome/config_flow.py index cb892b314cd..99dae2e68ab 100644 --- a/homeassistant/components/esphome/config_flow.py +++ b/homeassistant/components/esphome/config_flow.py @@ -257,9 +257,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): self, discovery_info: MqttServiceInfo ) -> ConfigFlowResult: """Handle MQTT discovery.""" - if not discovery_info.payload: - return self.async_abort(reason="mqtt_missing_payload") - device_info = json_loads_object(discovery_info.payload) if "mac" not in device_info: return self.async_abort(reason="mqtt_missing_mac") diff --git a/homeassistant/components/esphome/strings.json b/homeassistant/components/esphome/strings.json index 18a54772e30..ec7e6f674b3 100644 --- a/homeassistant/components/esphome/strings.json +++ b/homeassistant/components/esphome/strings.json @@ -8,8 +8,7 @@ "service_received": "Action received", "mqtt_missing_mac": "Missing MAC address in MQTT properties.", "mqtt_missing_api": "Missing API port in MQTT properties.", - "mqtt_missing_ip": "Missing IP address in MQTT properties.", - "mqtt_missing_payload": "Missing MQTT Payload." + "mqtt_missing_ip": "Missing IP address in MQTT properties." }, "error": { "resolve_error": "Can't resolve address of the ESP. If this error persists, please set a static IP address", diff --git a/homeassistant/components/ezviz/update.py b/homeassistant/components/ezviz/update.py index 25a506a0052..05735d152cf 100644 --- a/homeassistant/components/ezviz/update.py +++ b/homeassistant/components/ezviz/update.py @@ -73,9 +73,11 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity): return self.data["version"] @property - def in_progress(self) -> bool: + def in_progress(self) -> bool | int | None: """Update installation progress.""" - return bool(self.data["upgrade_in_progress"]) + if self.data["upgrade_in_progress"]: + return self.data["upgrade_percent"] + return False @property def latest_version(self) -> str | None: @@ -91,13 +93,6 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity): return self.data["latest_firmware_info"].get("desc") return None - @property - def update_percentage(self) -> int | None: - """Update installation progress.""" - if self.data["upgrade_in_progress"]: - return self.data["upgrade_percent"] - return None - async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: diff --git a/homeassistant/components/ffmpeg/manifest.json b/homeassistant/components/ffmpeg/manifest.json index 085db6791b3..e5f4f8b93a8 100644 --- a/homeassistant/components/ffmpeg/manifest.json +++ b/homeassistant/components/ffmpeg/manifest.json @@ -4,5 +4,5 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ffmpeg", "integration_type": "system", - "requirements": ["ha-ffmpeg==3.2.2"] + "requirements": ["ha-ffmpeg==3.2.1"] } diff --git a/homeassistant/components/file/__init__.py b/homeassistant/components/file/__init__.py index 7bc206057c8..0c9cfee5f4d 100644 --- a/homeassistant/components/file/__init__.py +++ b/homeassistant/components/file/__init__.py @@ -3,16 +3,88 @@ from copy import deepcopy from typing import Any -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_FILE_PATH, CONF_NAME, CONF_PLATFORM, Platform -from homeassistant.core import HomeAssistant +from homeassistant.components.notify import migrate_notify_issue +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.const import ( + CONF_FILE_PATH, + CONF_NAME, + CONF_PLATFORM, + CONF_SCAN_INTERVAL, + Platform, +) +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import ( + config_validation as cv, + discovery, + issue_registry as ir, +) +from homeassistant.helpers.typing import ConfigType from .const import DOMAIN +from .notify import PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA +from .sensor import PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA + +IMPORT_SCHEMA = { + Platform.SENSOR: SENSOR_PLATFORM_SCHEMA, + Platform.NOTIFY: NOTIFY_PLATFORM_SCHEMA, +} + +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = [Platform.NOTIFY, Platform.SENSOR] +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the file integration.""" + + hass.data[DOMAIN] = config + if hass.config_entries.async_entries(DOMAIN): + # We skip import in case we already have config entries + return True + # The use of the legacy notify service was deprecated with HA Core 2024.6.0 + # and will be removed with HA Core 2024.12 + migrate_notify_issue(hass, DOMAIN, "File", "2024.12.0") + # The YAML config was imported with HA Core 2024.6.0 and will be removed with + # HA Core 2024.12 + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.12.0", + is_fixable=False, + issue_domain=DOMAIN, + learn_more_url="https://www.home-assistant.io/integrations/file/", + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "File", + }, + ) + + # Import the YAML config into separate config entries + platforms_config: dict[Platform, list[ConfigType]] = { + domain: config[domain] for domain in PLATFORMS if domain in config + } + for domain, items in platforms_config.items(): + for item in items: + if item[CONF_PLATFORM] == DOMAIN: + file_config_item = IMPORT_SCHEMA[domain](item) + file_config_item[CONF_PLATFORM] = domain + if CONF_SCAN_INTERVAL in file_config_item: + del file_config_item[CONF_SCAN_INTERVAL] + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=file_config_item, + ) + ) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a file component entry.""" config = {**entry.data, **entry.options} @@ -30,6 +102,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry, [Platform(entry.data[CONF_PLATFORM])] ) entry.async_on_unload(entry.add_update_listener(update_listener)) + if entry.data[CONF_PLATFORM] == Platform.NOTIFY and CONF_NAME in entry.data: + # New notify entities are being setup through the config entry, + # but during the deprecation period we want to keep the legacy notify platform, + # so we forward the setup config through discovery. + # Only the entities from yaml will still be available as legacy service. + hass.async_create_task( + discovery.async_load_platform( + hass, + Platform.NOTIFY, + DOMAIN, + config, + hass.data[DOMAIN], + ) + ) return True diff --git a/homeassistant/components/file/config_flow.py b/homeassistant/components/file/config_flow.py index 992635d05fd..2b8a9bde749 100644 --- a/homeassistant/components/file/config_flow.py +++ b/homeassistant/components/file/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from copy import deepcopy +import os from typing import Any import voluptuous as vol @@ -15,6 +16,7 @@ from homeassistant.config_entries import ( ) from homeassistant.const import ( CONF_FILE_PATH, + CONF_FILENAME, CONF_NAME, CONF_PLATFORM, CONF_UNIT_OF_MEASUREMENT, @@ -130,6 +132,27 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Handle file sensor config flow.""" return await self._async_handle_step(Platform.SENSOR.value, user_input) + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + """Import `file`` config from configuration.yaml.""" + self._async_abort_entries_match(import_data) + platform = import_data[CONF_PLATFORM] + name: str = import_data.get(CONF_NAME, DEFAULT_NAME) + file_name: str + if platform == Platform.NOTIFY: + file_name = import_data.pop(CONF_FILENAME) + file_path: str = os.path.join(self.hass.config.config_dir, file_name) + import_data[CONF_FILE_PATH] = file_path + else: + file_path = import_data[CONF_FILE_PATH] + title = f"{name} [{file_path}]" + data = deepcopy(import_data) + options = {} + for key, value in import_data.items(): + if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME): + data.pop(key) + options[key] = value + return self.async_create_entry(title=title, data=data, options=options) + class FileOptionsFlowHandler(OptionsFlow): """Handle File options.""" diff --git a/homeassistant/components/file/notify.py b/homeassistant/components/file/notify.py index 10e3d4a4ac6..9411b7cf1a8 100644 --- a/homeassistant/components/file/notify.py +++ b/homeassistant/components/file/notify.py @@ -2,23 +2,104 @@ from __future__ import annotations +from functools import partial +import logging import os from typing import Any, TextIO +import voluptuous as vol + from homeassistant.components.notify import ( + ATTR_TITLE, ATTR_TITLE_DEFAULT, + PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA, + BaseNotificationService, NotifyEntity, NotifyEntityFeature, + migrate_notify_issue, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_FILE_PATH, CONF_NAME +from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util from .const import CONF_TIMESTAMP, DEFAULT_NAME, DOMAIN, FILE_ICON +_LOGGER = logging.getLogger(__name__) + +# The legacy platform schema uses a filename, after import +# The full file path is stored in the config entry +PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_FILENAME): cv.string, + vol.Optional(CONF_TIMESTAMP, default=False): cv.boolean, + } +) + + +async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, +) -> FileNotificationService | None: + """Get the file notification service.""" + if discovery_info is None: + # We only set up through discovery + return None + file_path: str = discovery_info[CONF_FILE_PATH] + timestamp: bool = discovery_info[CONF_TIMESTAMP] + + return FileNotificationService(file_path, timestamp) + + +class FileNotificationService(BaseNotificationService): + """Implement the notification service for the File service.""" + + def __init__(self, file_path: str, add_timestamp: bool) -> None: + """Initialize the service.""" + self._file_path = file_path + self.add_timestamp = add_timestamp + + async def async_send_message(self, message: str = "", **kwargs: Any) -> None: + """Send a message to a file.""" + # The use of the legacy notify service was deprecated with HA Core 2024.6.0 + # and will be removed with HA Core 2024.12 + migrate_notify_issue( + self.hass, DOMAIN, "File", "2024.12.0", service_name=self._service_name + ) + await self.hass.async_add_executor_job( + partial(self.send_message, message, **kwargs) + ) + + def send_message(self, message: str = "", **kwargs: Any) -> None: + """Send a message to a file.""" + file: TextIO + filepath = self._file_path + try: + with open(filepath, "a", encoding="utf8") as file: + if os.stat(filepath).st_size == 0: + title = ( + f"{kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)} notifications (Log" + f" started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" + ) + file.write(title) + + if self.add_timestamp: + text = f"{dt_util.utcnow().isoformat()} {message}\n" + else: + text = f"{message}\n" + file.write(text) + except OSError as exc: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="write_access_failed", + translation_placeholders={"filename": filepath, "exc": f"{exc!r}"}, + ) from exc + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/file/sensor.py b/homeassistant/components/file/sensor.py index 879c06e29f3..e37a3df86a6 100644 --- a/homeassistant/components/file/sensor.py +++ b/homeassistant/components/file/sensor.py @@ -6,8 +6,12 @@ import logging import os from file_read_backwards import FileReadBackwards +import voluptuous as vol -from homeassistant.components.sensor import SensorEntity +from homeassistant.components.sensor import ( + PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorEntity, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_FILE_PATH, @@ -16,13 +20,38 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.template import Template +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DEFAULT_NAME, FILE_ICON _LOGGER = logging.getLogger(__name__) +PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_FILE_PATH): cv.isfile, + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_VALUE_TEMPLATE): cv.string, + vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, + } +) + + +async def async_setup_platform( + hass: HomeAssistant, + config: ConfigType, + async_add_entities: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, +) -> None: + """Set up the file sensor from YAML. + + The YAML platform config is automatically + imported to a config entry, this method can be removed + when YAML support is removed. + """ + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/file/strings.json b/homeassistant/components/file/strings.json index 8806c67cd96..60ebf451f78 100644 --- a/homeassistant/components/file/strings.json +++ b/homeassistant/components/file/strings.json @@ -18,7 +18,7 @@ }, "data_description": { "file_path": "The local file path to retrieve the sensor value from", - "value_template": "A template to render the sensors value based on the file content", + "value_template": "A template to render the the sensors value based on the file content", "unit_of_measurement": "Unit of measurement for the sensor" } }, diff --git a/homeassistant/components/fritz/config_flow.py b/homeassistant/components/fritz/config_flow.py index 920ecda1c52..ec9ffdd7554 100644 --- a/homeassistant/components/fritz/config_flow.py +++ b/homeassistant/components/fritz/config_flow.py @@ -57,8 +57,6 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - _host: str - @staticmethod @callback def async_get_options_flow( @@ -69,6 +67,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize FRITZ!Box Tools flow.""" + self._host: str | None = None self._name: str = "" self._password: str = "" self._use_tls: bool = False @@ -113,6 +112,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): async def async_check_configured_entry(self) -> ConfigEntry | None: """Check if entry is configured.""" + assert self._host current_host = await self.hass.async_add_executor_job( socket.gethostbyname, self._host ) @@ -154,17 +154,15 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle a flow initialized by discovery.""" ssdp_location: ParseResult = urlparse(discovery_info.ssdp_location or "") - host = ssdp_location.hostname - if not host or ipaddress.ip_address(host).is_link_local: - return self.async_abort(reason="ignore_ip6_link_local") - - self._host = host + self._host = ssdp_location.hostname self._name = ( discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME) or discovery_info.upnp[ssdp.ATTR_UPNP_MODEL_NAME] ) - uuid: str | None + if not self._host or ipaddress.ip_address(self._host).is_link_local: + return self.async_abort(reason="ignore_ip6_link_local") + if uuid := discovery_info.upnp.get(ssdp.ATTR_UPNP_UDN): if uuid.startswith("uuid:"): uuid = uuid[5:] diff --git a/homeassistant/components/fritzbox/config_flow.py b/homeassistant/components/fritzbox/config_flow.py index ffec4a9ea29..76754fc5082 100644 --- a/homeassistant/components/fritzbox/config_flow.py +++ b/homeassistant/components/fritzbox/config_flow.py @@ -43,11 +43,10 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _name: str - def __init__(self) -> None: """Initialize flow.""" self._host: str | None = None + self._name: str | None = None self._password: str | None = None self._username: str | None = None @@ -159,6 +158,7 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): result = await self.async_try_connect() if result == RESULT_SUCCESS: + assert self._name is not None return self._get_entry(self._name) if result != RESULT_INVALID_AUTH: return self.async_abort(reason=result) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 4dc5a2b0ae4..ff399512c8b 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241106.2"] + "requirements": ["home-assistant-frontend==20241105.0"] } diff --git a/homeassistant/components/generic/manifest.json b/homeassistant/components/generic/manifest.json index c1fbc16d9be..b02a8fa2520 100644 --- a/homeassistant/components/generic/manifest.json +++ b/homeassistant/components/generic/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/generic", "integration_type": "device", "iot_class": "local_push", - "requirements": ["av==13.1.0", "Pillow==11.0.0"] + "requirements": ["av==13.1.0", "Pillow==10.4.0"] } diff --git a/homeassistant/components/generic_hygrostat/strings.json b/homeassistant/components/generic_hygrostat/strings.json index 2be3955eff1..a21ab68c628 100644 --- a/homeassistant/components/generic_hygrostat/strings.json +++ b/homeassistant/components/generic_hygrostat/strings.json @@ -4,7 +4,7 @@ "step": { "user": { "title": "Add generic hygrostat", - "description": "Create a humidifier entity that control the humidity via a switch and sensor.", + "description": "Create a entity that control the humidity via a switch and sensor.", "data": { "device_class": "Device class", "dry_tolerance": "Dry tolerance", diff --git a/homeassistant/components/generic_thermostat/strings.json b/homeassistant/components/generic_thermostat/strings.json index 51549dc844e..1ddd41de734 100644 --- a/homeassistant/components/generic_thermostat/strings.json +++ b/homeassistant/components/generic_thermostat/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add generic thermostat", + "title": "Add generic thermostat helper", "description": "Create a climate entity that controls the temperature via a switch and sensor.", "data": { "ac_mode": "Cooling mode", @@ -17,8 +17,8 @@ "data_description": { "ac_mode": "Set the actuator specified to be treated as a cooling device instead of a heating device.", "heater": "Switch entity used to cool or heat depending on A/C mode.", - "target_sensor": "Temperature sensor that reflects the current temperature.", - "min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on.", + "target_sensor": "Temperature sensor that reflect the current temperature.", + "min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on. This option will be ignored if the keep alive option is set.", "cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor equals or goes below 24.5.", "hot_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched off. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will stop when the sensor equals or goes above 25.5." } diff --git a/homeassistant/components/geniushub/__init__.py b/homeassistant/components/geniushub/__init__.py index 9ca6ecfcfe0..f3081e50289 100644 --- a/homeassistant/components/geniushub/__init__.py +++ b/homeassistant/components/geniushub/__init__.py @@ -9,6 +9,7 @@ import aiohttp from geniushubclient import GeniusHub import voluptuous as vol +from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ENTITY_ID, @@ -20,12 +21,20 @@ from homeassistant.const import ( CONF_USERNAME, Platform, ) -from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + HomeAssistant, + ServiceCall, + callback, +) +from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.service import verify_domain_control +from homeassistant.helpers.typing import ConfigType from .const import DOMAIN @@ -36,6 +45,27 @@ SCAN_INTERVAL = timedelta(seconds=60) MAC_ADDRESS_REGEXP = r"^([0-9A-F]{2}:){5}([0-9A-F]{2})$" +CLOUD_API_SCHEMA = vol.Schema( + { + vol.Required(CONF_TOKEN): cv.string, + vol.Required(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), + } +) + + +LOCAL_API_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): cv.string, + vol.Required(CONF_USERNAME): cv.string, + vol.Required(CONF_PASSWORD): cv.string, + vol.Optional(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), + } +) + +CONFIG_SCHEMA = vol.Schema( + {DOMAIN: vol.Any(LOCAL_API_SCHEMA, CLOUD_API_SCHEMA)}, extra=vol.ALLOW_EXTRA +) + ATTR_ZONE_MODE = "mode" ATTR_DURATION = "duration" @@ -70,6 +100,56 @@ PLATFORMS = [ ] +async def _async_import(hass: HomeAssistant, base_config: ConfigType) -> None: + """Import a config entry from configuration.yaml.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data=base_config[DOMAIN], + ) + if ( + result["type"] is FlowResultType.CREATE_ENTRY + or result["reason"] == "already_configured" + ): + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.12.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Genius Hub", + }, + ) + return + async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_import_issue_{result['reason']}", + breaks_in_ha_version="2024.12.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_yaml_import_issue_{result['reason']}", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Genius Hub", + }, + ) + + +async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool: + """Set up a Genius Hub system.""" + if DOMAIN in base_config: + hass.async_create_task(_async_import(hass, base_config)) + return True + + type GeniusHubConfigEntry = ConfigEntry[GeniusBroker] diff --git a/homeassistant/components/geniushub/config_flow.py b/homeassistant/components/geniushub/config_flow.py index b106f9907bb..601eac6c2f2 100644 --- a/homeassistant/components/geniushub/config_flow.py +++ b/homeassistant/components/geniushub/config_flow.py @@ -13,6 +13,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME +from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN @@ -122,3 +123,14 @@ class GeniusHubConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="cloud_api", errors=errors, data_schema=CLOUD_API_SCHEMA ) + + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + """Import the yaml config.""" + if CONF_HOST in import_data: + result = await self.async_step_local_api(import_data) + else: + result = await self.async_step_cloud_api(import_data) + if result["type"] is FlowResultType.FORM: + assert result["errors"] + return self.async_abort(reason=result["errors"]["base"]) + return result diff --git a/homeassistant/components/go2rtc/__init__.py b/homeassistant/components/go2rtc/__init__.py index f1f6e44abc1..c4ec4a81cf3 100644 --- a/homeassistant/components/go2rtc/__init__.py +++ b/homeassistant/components/go2rtc/__init__.py @@ -1,10 +1,10 @@ """The go2rtc component.""" +import asyncio import logging import shutil from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError -from awesomeversion import AwesomeVersion from go2rtc_client import Go2RtcRestClient from go2rtc_client.exceptions import Go2RtcClientError, Go2RtcVersionError from go2rtc_client.ws import ( @@ -32,24 +32,14 @@ from homeassistant.components.default_config import DOMAIN as DEFAULT_CONFIG_DOM from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import ( - config_validation as cv, - discovery_flow, - issue_registry as ir, -) +from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError +from homeassistant.helpers import config_validation as cv, discovery_flow from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType from homeassistant.util.hass_dict import HassKey from homeassistant.util.package import is_docker_env -from .const import ( - CONF_DEBUG_UI, - DEBUG_UI_URL_MESSAGE, - DOMAIN, - HA_MANAGED_URL, - RECOMMENDED_VERSION, -) +from .const import CONF_DEBUG_UI, DEBUG_UI_URL_MESSAGE, DOMAIN, HA_MANAGED_URL from .server import Server _LOGGER = logging.getLogger(__name__) @@ -158,21 +148,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Validate the server URL try: client = Go2RtcRestClient(async_get_clientsession(hass), url) - version = await client.validate_server_version() - if version < AwesomeVersion(RECOMMENDED_VERSION): - ir.async_create_issue( - hass, - DOMAIN, - "recommended_version", - is_fixable=False, - is_persistent=False, - severity=ir.IssueSeverity.WARNING, - translation_key="recommended_version", - translation_placeholders={ - "recommended_version": RECOMMENDED_VERSION, - "current_version": str(version), - }, - ) + await client.validate_server_version() except Go2RtcClientError as err: if isinstance(err.__cause__, _RETRYABLE_ERRORS): raise ConfigEntryNotReady( @@ -213,6 +189,7 @@ class WebRTCProvider(CameraWebRTCProvider): self._session = async_get_clientsession(hass) self._rest_client = Go2RtcRestClient(self._session, url) self._sessions: dict[str, Go2RtcWsClient] = {} + self._current_tasks: dict[str, asyncio.Task[None]] = {} @property def domain(self) -> str: @@ -224,6 +201,60 @@ class WebRTCProvider(CameraWebRTCProvider): """Return if this provider is supports the Camera as source.""" return stream_source.partition(":")[0] in _SUPPORTED_STREAMS + async def _add_stream_if_not_exists_task(self, camera: Camera) -> None: + """Add stream if it does not exist.""" + if not (stream_source := await camera.stream_source()): + raise HomeAssistantError("Camera does not have a stream source") + + streams = await self._rest_client.streams.list() + + if (stream := streams.get(camera.entity_id)) is None or not any( + stream_source == producer.url for producer in stream.producers + ): + await self._rest_client.streams.add( + camera.entity_id, + stream_source, + ) + + stream = await self._rest_client.streams.probe( + camera.entity_id, audio="all" + ) + if any( + "audio, recvonly" in media + for producer in stream.producers + for media in producer.media + ): + # Add ffmpeg audio transcoding only if the camera has audio + await self._rest_client.streams.add( + camera.entity_id, + [stream_source, f"ffmpeg:{camera.entity_id}#audio=opus"], + ) + + @callback + def _async_add_stream_if_not_exists(self, camera: Camera) -> asyncio.Task[None]: + if task := self._current_tasks.get(camera.entity_id): + return task + + self._current_tasks[camera.entity_id] = task = self._hass.async_create_task( + self._add_stream_if_not_exists_task(camera) + ) + + @callback + def done(task: asyncio.Task) -> None: + self._current_tasks.pop(camera.entity_id) + + if not task.cancelled(): + try: + task.result() + except (Go2RtcClientError, HomeAssistantError): + _LOGGER.exception( + "Adding stream failed for %s", + camera.entity_id, + ) + + task.add_done_callback(done) + return task + async def async_handle_async_webrtc_offer( self, camera: Camera, @@ -236,28 +267,14 @@ class WebRTCProvider(CameraWebRTCProvider): self._session, self._url, source=camera.entity_id ) - if not (stream_source := await camera.stream_source()): + try: + await self._async_add_stream_if_not_exists(camera) + except (Go2RtcClientError, HomeAssistantError) as err: send_message( - WebRTCError("go2rtc_webrtc_offer_failed", "Camera has no stream source") + WebRTCError("go2rtc_webrtc_offer_failed", f"Error adding stream: {err}") ) return - streams = await self._rest_client.streams.list() - - if (stream := streams.get(camera.entity_id)) is None or not any( - stream_source == producer.url for producer in stream.producers - ): - await self._rest_client.streams.add( - camera.entity_id, - [ - stream_source, - # We are setting any ffmpeg rtsp related logs to debug - # Connection problems to the camera will be logged by the first stream - # Therefore setting it to debug will not hide any important logs - f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug", - ], - ) - @callback def on_messages(message: ReceiveMessages) -> None: """Handle messages.""" @@ -291,3 +308,8 @@ class WebRTCProvider(CameraWebRTCProvider): """Close the session.""" ws_client = self._sessions.pop(session_id) self._hass.async_create_task(ws_client.close()) + + @callback + def async_provider_added(self, camera: Camera) -> None: + """Notify the provider that the provider was added to the given camera.""" + self._async_add_stream_if_not_exists(camera) diff --git a/homeassistant/components/go2rtc/const.py b/homeassistant/components/go2rtc/const.py index 3c1c84c42b5..d33ae3e3897 100644 --- a/homeassistant/components/go2rtc/const.py +++ b/homeassistant/components/go2rtc/const.py @@ -6,4 +6,3 @@ CONF_DEBUG_UI = "debug_ui" DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time." HA_MANAGED_API_PORT = 11984 HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/" -RECOMMENDED_VERSION = "1.9.7" diff --git a/homeassistant/components/go2rtc/manifest.json b/homeassistant/components/go2rtc/manifest.json index 201b7168847..e69140a51db 100644 --- a/homeassistant/components/go2rtc/manifest.json +++ b/homeassistant/components/go2rtc/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/go2rtc", "integration_type": "system", "iot_class": "local_polling", - "requirements": ["go2rtc-client==0.1.1"], + "requirements": ["go2rtc-client==0.0.1b4"], "single_config_entry": true } diff --git a/homeassistant/components/go2rtc/server.py b/homeassistant/components/go2rtc/server.py index 6699ee4d8a2..ed3b44aadf9 100644 --- a/homeassistant/components/go2rtc/server.py +++ b/homeassistant/components/go2rtc/server.py @@ -24,15 +24,14 @@ _RESPAWN_COOLDOWN = 1 # Default configuration for HA # - Api is listening only on localhost -# - Enable rtsp for localhost only as ffmpeg needs it +# - Disable rtsp listener # - Clear default ice servers -_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant -# Do not edit it manually - +_GO2RTC_CONFIG_FORMAT = r""" api: listen: "{api_ip}:{api_port}" rtsp: + # ffmpeg needs rtsp for opus audio transcoding listen: "127.0.0.1:18554" webrtc: diff --git a/homeassistant/components/go2rtc/strings.json b/homeassistant/components/go2rtc/strings.json deleted file mode 100644 index e350c19af96..00000000000 --- a/homeassistant/components/go2rtc/strings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "issues": { - "recommended_version": { - "title": "Outdated go2rtc server detected", - "description": "We detected that you are using an outdated go2rtc server version. For the best experience, we recommend updating the go2rtc server to version `{recommended_version}`.\nCurrently you are using version `{current_version}`." - } - } -} diff --git a/homeassistant/components/google/strings.json b/homeassistant/components/google/strings.json index 2ea45239a53..c029b46051e 100644 --- a/homeassistant/components/google/strings.json +++ b/homeassistant/components/google/strings.json @@ -87,8 +87,8 @@ } }, "create_event": { - "name": "Create event", - "description": "Adds a new calendar event.", + "name": "Creates event", + "description": "Add a new calendar event.", "fields": { "summary": { "name": "Summary", diff --git a/homeassistant/components/google_assistant/const.py b/homeassistant/components/google_assistant/const.py index 8132ecaae2c..04c85639e07 100644 --- a/homeassistant/components/google_assistant/const.py +++ b/homeassistant/components/google_assistant/const.py @@ -78,7 +78,6 @@ TYPE_AWNING = f"{PREFIX_TYPES}AWNING" TYPE_BLINDS = f"{PREFIX_TYPES}BLINDS" TYPE_CAMERA = f"{PREFIX_TYPES}CAMERA" TYPE_CURTAIN = f"{PREFIX_TYPES}CURTAIN" -TYPE_CARBON_MONOXIDE_DETECTOR = f"{PREFIX_TYPES}CARBON_MONOXIDE_DETECTOR" TYPE_DEHUMIDIFIER = f"{PREFIX_TYPES}DEHUMIDIFIER" TYPE_DOOR = f"{PREFIX_TYPES}DOOR" TYPE_DOORBELL = f"{PREFIX_TYPES}DOORBELL" @@ -94,7 +93,6 @@ TYPE_SCENE = f"{PREFIX_TYPES}SCENE" TYPE_SENSOR = f"{PREFIX_TYPES}SENSOR" TYPE_SETTOP = f"{PREFIX_TYPES}SETTOP" TYPE_SHUTTER = f"{PREFIX_TYPES}SHUTTER" -TYPE_SMOKE_DETECTOR = f"{PREFIX_TYPES}SMOKE_DETECTOR" TYPE_SPEAKER = f"{PREFIX_TYPES}SPEAKER" TYPE_SWITCH = f"{PREFIX_TYPES}SWITCH" TYPE_THERMOSTAT = f"{PREFIX_TYPES}THERMOSTAT" @@ -138,7 +136,6 @@ EVENT_SYNC_RECEIVED = "google_assistant_sync" DOMAIN_TO_GOOGLE_TYPES = { alarm_control_panel.DOMAIN: TYPE_ALARM, - binary_sensor.DOMAIN: TYPE_SENSOR, button.DOMAIN: TYPE_SCENE, camera.DOMAIN: TYPE_CAMERA, climate.DOMAIN: TYPE_THERMOSTAT, @@ -171,14 +168,6 @@ DEVICE_CLASS_TO_GOOGLE_TYPES = { binary_sensor.DOMAIN, binary_sensor.BinarySensorDeviceClass.GARAGE_DOOR, ): TYPE_GARAGE, - ( - binary_sensor.DOMAIN, - binary_sensor.BinarySensorDeviceClass.SMOKE, - ): TYPE_SMOKE_DETECTOR, - ( - binary_sensor.DOMAIN, - binary_sensor.BinarySensorDeviceClass.CO, - ): TYPE_CARBON_MONOXIDE_DETECTOR, (cover.DOMAIN, cover.CoverDeviceClass.AWNING): TYPE_AWNING, (cover.DOMAIN, cover.CoverDeviceClass.CURTAIN): TYPE_CURTAIN, (cover.DOMAIN, cover.CoverDeviceClass.DOOR): TYPE_DOOR, diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index f99f1574038..df56885995a 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -2706,21 +2706,6 @@ class SensorStateTrait(_Trait): ), } - binary_sensor_types = { - binary_sensor.BinarySensorDeviceClass.CO: ( - "CarbonMonoxideLevel", - ["carbon monoxide detected", "no carbon monoxide detected", "unknown"], - ), - binary_sensor.BinarySensorDeviceClass.SMOKE: ( - "SmokeLevel", - ["smoke detected", "no smoke detected", "unknown"], - ), - binary_sensor.BinarySensorDeviceClass.MOISTURE: ( - "WaterLeak", - ["leak", "no leak", "unknown"], - ), - } - name = TRAIT_SENSOR_STATE commands: list[str] = [] @@ -2743,37 +2728,24 @@ class SensorStateTrait(_Trait): @classmethod def supported(cls, domain, features, device_class, _): """Test if state is supported.""" - return (domain == sensor.DOMAIN and device_class in cls.sensor_types) or ( - domain == binary_sensor.DOMAIN and device_class in cls.binary_sensor_types - ) + return domain == sensor.DOMAIN and device_class in cls.sensor_types def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) + data = self.sensor_types.get(device_class) - def create_sensor_state( - name: str, - raw_value_unit: str | None = None, - available_states: list[str] | None = None, - ) -> dict[str, Any]: - sensor_state: dict[str, Any] = { - "name": name, - } - if raw_value_unit: - sensor_state["numericCapabilities"] = {"rawValueUnit": raw_value_unit} - if available_states: - sensor_state["descriptiveCapabilities"] = { - "availableStates": available_states - } - return {"sensorStatesSupported": [sensor_state]} + if device_class is None or data is None: + return {} - if self.state.domain == sensor.DOMAIN: - sensor_data = self.sensor_types.get(device_class) - if device_class is None or sensor_data is None: - return {} - available_states: list[str] | None = None - if device_class == sensor.SensorDeviceClass.AQI: - available_states = [ + sensor_state = { + "name": data[0], + "numericCapabilities": {"rawValueUnit": data[1]}, + } + + if device_class == sensor.SensorDeviceClass.AQI: + sensor_state["descriptiveCapabilities"] = { + "availableStates": [ "healthy", "moderate", "unhealthy for sensitive groups", @@ -2781,53 +2753,30 @@ class SensorStateTrait(_Trait): "very unhealthy", "hazardous", "unknown", - ] - return create_sensor_state(sensor_data[0], sensor_data[1], available_states) - binary_sensor_data = self.binary_sensor_types.get(device_class) - if device_class is None or binary_sensor_data is None: - return {} - return create_sensor_state( - binary_sensor_data[0], available_states=binary_sensor_data[1] - ) + ], + } + + return {"sensorStatesSupported": [sensor_state]} def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) + data = self.sensor_types.get(device_class) - def create_sensor_state( - name: str, raw_value: float | None = None, current_state: str | None = None - ) -> dict[str, Any]: - sensor_state: dict[str, Any] = { - "name": name, - "rawValue": raw_value, - } - if current_state: - sensor_state["currentSensorState"] = current_state - return {"currentSensorStateData": [sensor_state]} - - if self.state.domain == sensor.DOMAIN: - sensor_data = self.sensor_types.get(device_class) - if device_class is None or sensor_data is None: - return {} - try: - value = float(self.state.state) - except ValueError: - value = None - if self.state.state == STATE_UNKNOWN: - value = None - current_state: str | None = None - if device_class == sensor.SensorDeviceClass.AQI: - current_state = self._air_quality_description_for_aqi(value) - return create_sensor_state(sensor_data[0], value, current_state) - - binary_sensor_data = self.binary_sensor_types.get(device_class) - if device_class is None or binary_sensor_data is None: + if device_class is None or data is None: return {} - value = { - STATE_ON: 0, - STATE_OFF: 1, - STATE_UNKNOWN: 2, - }[self.state.state] - return create_sensor_state( - binary_sensor_data[0], current_state=binary_sensor_data[1][value] - ) + + try: + value = float(self.state.state) + except ValueError: + value = None + if self.state.state == STATE_UNKNOWN: + value = None + sensor_data = {"name": data[0], "rawValue": value} + + if device_class == sensor.SensorDeviceClass.AQI: + sensor_data["currentSensorState"] = self._air_quality_description_for_aqi( + value + ) + + return {"currentSensorStateData": [sensor_data]} diff --git a/homeassistant/components/google_cloud/config_flow.py b/homeassistant/components/google_cloud/config_flow.py index fa6c952022b..8b8fd751df9 100644 --- a/homeassistant/components/google_cloud/config_flow.py +++ b/homeassistant/components/google_cloud/config_flow.py @@ -169,7 +169,7 @@ class GoogleCloudOptionsFlowHandler(OptionsFlow): ) ), **tts_options_schema( - self.config_entry.options, voices, from_config_flow=True + self.options, voices, from_config_flow=True ).schema, vol.Optional( CONF_STT_MODEL, @@ -182,6 +182,6 @@ class GoogleCloudOptionsFlowHandler(OptionsFlow): ), } ), - self.config_entry.options, + self.options, ), ) diff --git a/homeassistant/components/google_cloud/helpers.py b/homeassistant/components/google_cloud/helpers.py index f6e89fae7fa..3c614156132 100644 --- a/homeassistant/components/google_cloud/helpers.py +++ b/homeassistant/components/google_cloud/helpers.py @@ -52,7 +52,7 @@ async def async_tts_voices( def tts_options_schema( - config_options: Mapping[str, Any], + config_options: dict[str, Any], voices: dict[str, list[str]], from_config_flow: bool = False, ) -> vol.Schema: diff --git a/homeassistant/components/habitica/const.py b/homeassistant/components/habitica/const.py index ae98cb13dcb..55322a13e6a 100644 --- a/homeassistant/components/habitica/const.py +++ b/homeassistant/components/habitica/const.py @@ -25,16 +25,7 @@ UNIT_TASKS = "tasks" ATTR_CONFIG_ENTRY = "config_entry" ATTR_SKILL = "skill" ATTR_TASK = "task" -ATTR_DIRECTION = "direction" SERVICE_CAST_SKILL = "cast_skill" -SERVICE_START_QUEST = "start_quest" -SERVICE_ACCEPT_QUEST = "accept_quest" -SERVICE_CANCEL_QUEST = "cancel_quest" -SERVICE_ABORT_QUEST = "abort_quest" -SERVICE_REJECT_QUEST = "reject_quest" -SERVICE_LEAVE_QUEST = "leave_quest" -SERVICE_SCORE_HABIT = "score_habit" -SERVICE_SCORE_REWARD = "score_reward" WARRIOR = "warrior" ROGUE = "rogue" diff --git a/homeassistant/components/habitica/coordinator.py b/homeassistant/components/habitica/coordinator.py index f9ffb1b53bd..cce2c684ba8 100644 --- a/homeassistant/components/habitica/coordinator.py +++ b/homeassistant/components/habitica/coordinator.py @@ -51,17 +51,12 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]): ), ) self.api = habitipy - self.content: dict[str, Any] = {} async def _async_update_data(self) -> HabiticaData: try: user_response = await self.api.user.get() tasks_response = await self.api.tasks.user.get() tasks_response.extend(await self.api.tasks.user.get(type="completedTodos")) - if not self.content: - self.content = await self.api.content.get( - language=user_response["preferences"]["language"] - ) except ClientResponseError as error: if error.status == HTTPStatus.TOO_MANY_REQUESTS: _LOGGER.debug("Rate limit exceeded, will try again later") diff --git a/homeassistant/components/habitica/icons.json b/homeassistant/components/habitica/icons.json index d33b9c60c96..0698b85afe1 100644 --- a/homeassistant/components/habitica/icons.json +++ b/homeassistant/components/habitica/icons.json @@ -126,18 +126,6 @@ }, "rewards": { "default": "mdi:treasure-chest" - }, - "strength": { - "default": "mdi:arm-flex-outline" - }, - "intelligence": { - "default": "mdi:head-snowflake-outline" - }, - "perception": { - "default": "mdi:eye-outline" - }, - "constitution": { - "default": "mdi:run-fast" } }, "switch": { @@ -163,30 +151,6 @@ }, "cast_skill": { "service": "mdi:creation-outline" - }, - "accept_quest": { - "service": "mdi:script-text" - }, - "reject_quest": { - "service": "mdi:script-text" - }, - "leave_quest": { - "service": "mdi:script-text" - }, - "abort_quest": { - "service": "mdi:script-text-key" - }, - "cancel_quest": { - "service": "mdi:script-text-key" - }, - "start_quest": { - "service": "mdi:script-text-key" - }, - "score_habit": { - "service": "mdi:counter" - }, - "score_reward": { - "service": "mdi:sack" } } } diff --git a/homeassistant/components/habitica/sensor.py b/homeassistant/components/habitica/sensor.py index 3b2395ecc52..77356f88265 100644 --- a/homeassistant/components/habitica/sensor.py +++ b/homeassistant/components/habitica/sensor.py @@ -27,7 +27,7 @@ from homeassistant.helpers.typing import StateType from .const import DOMAIN, UNIT_TASKS from .entity import HabiticaBase from .types import HabiticaConfigEntry -from .util import entity_used_in, get_attribute_points, get_attributes_total +from .util import entity_used_in _LOGGER = logging.getLogger(__name__) @@ -36,10 +36,7 @@ _LOGGER = logging.getLogger(__name__) class HabitipySensorEntityDescription(SensorEntityDescription): """Habitipy Sensor Description.""" - value_fn: Callable[[dict[str, Any], dict[str, Any]], StateType] - attributes_fn: ( - Callable[[dict[str, Any], dict[str, Any]], dict[str, Any] | None] | None - ) = None + value_fn: Callable[[dict[str, Any]], StateType] @dataclass(kw_only=True, frozen=True) @@ -68,80 +65,76 @@ class HabitipySensorEntity(StrEnum): REWARDS = "rewards" GEMS = "gems" TRINKETS = "trinkets" - STRENGTH = "strength" - INTELLIGENCE = "intelligence" - CONSTITUTION = "constitution" - PERCEPTION = "perception" SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = ( HabitipySensorEntityDescription( key=HabitipySensorEntity.DISPLAY_NAME, translation_key=HabitipySensorEntity.DISPLAY_NAME, - value_fn=lambda user, _: user.get("profile", {}).get("name"), + value_fn=lambda user: user.get("profile", {}).get("name"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.HEALTH, translation_key=HabitipySensorEntity.HEALTH, native_unit_of_measurement="HP", suggested_display_precision=0, - value_fn=lambda user, _: user.get("stats", {}).get("hp"), + value_fn=lambda user: user.get("stats", {}).get("hp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.HEALTH_MAX, translation_key=HabitipySensorEntity.HEALTH_MAX, native_unit_of_measurement="HP", entity_registry_enabled_default=False, - value_fn=lambda user, _: user.get("stats", {}).get("maxHealth"), + value_fn=lambda user: user.get("stats", {}).get("maxHealth"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.MANA, translation_key=HabitipySensorEntity.MANA, native_unit_of_measurement="MP", suggested_display_precision=0, - value_fn=lambda user, _: user.get("stats", {}).get("mp"), + value_fn=lambda user: user.get("stats", {}).get("mp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.MANA_MAX, translation_key=HabitipySensorEntity.MANA_MAX, native_unit_of_measurement="MP", - value_fn=lambda user, _: user.get("stats", {}).get("maxMP"), + value_fn=lambda user: user.get("stats", {}).get("maxMP"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.EXPERIENCE, translation_key=HabitipySensorEntity.EXPERIENCE, native_unit_of_measurement="XP", - value_fn=lambda user, _: user.get("stats", {}).get("exp"), + value_fn=lambda user: user.get("stats", {}).get("exp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.EXPERIENCE_MAX, translation_key=HabitipySensorEntity.EXPERIENCE_MAX, native_unit_of_measurement="XP", - value_fn=lambda user, _: user.get("stats", {}).get("toNextLevel"), + value_fn=lambda user: user.get("stats", {}).get("toNextLevel"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.LEVEL, translation_key=HabitipySensorEntity.LEVEL, - value_fn=lambda user, _: user.get("stats", {}).get("lvl"), + value_fn=lambda user: user.get("stats", {}).get("lvl"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.GOLD, translation_key=HabitipySensorEntity.GOLD, native_unit_of_measurement="GP", suggested_display_precision=2, - value_fn=lambda user, _: user.get("stats", {}).get("gp"), + value_fn=lambda user: user.get("stats", {}).get("gp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.CLASS, translation_key=HabitipySensorEntity.CLASS, - value_fn=lambda user, _: user.get("stats", {}).get("class"), + value_fn=lambda user: user.get("stats", {}).get("class"), device_class=SensorDeviceClass.ENUM, options=["warrior", "healer", "wizard", "rogue"], ), HabitipySensorEntityDescription( key=HabitipySensorEntity.GEMS, translation_key=HabitipySensorEntity.GEMS, - value_fn=lambda user, _: user.get("balance", 0) * 4, + value_fn=lambda user: user.get("balance", 0) * 4, suggested_display_precision=0, native_unit_of_measurement="gems", ), @@ -149,7 +142,7 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = ( key=HabitipySensorEntity.TRINKETS, translation_key=HabitipySensorEntity.TRINKETS, value_fn=( - lambda user, _: user.get("purchased", {}) + lambda user: user.get("purchased", {}) .get("plan", {}) .get("consecutive", {}) .get("trinkets", 0) @@ -157,38 +150,6 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = ( suggested_display_precision=0, native_unit_of_measurement="⧖", ), - HabitipySensorEntityDescription( - key=HabitipySensorEntity.STRENGTH, - translation_key=HabitipySensorEntity.STRENGTH, - value_fn=lambda user, content: get_attributes_total(user, content, "str"), - attributes_fn=lambda user, content: get_attribute_points(user, content, "str"), - suggested_display_precision=0, - native_unit_of_measurement="STR", - ), - HabitipySensorEntityDescription( - key=HabitipySensorEntity.INTELLIGENCE, - translation_key=HabitipySensorEntity.INTELLIGENCE, - value_fn=lambda user, content: get_attributes_total(user, content, "int"), - attributes_fn=lambda user, content: get_attribute_points(user, content, "int"), - suggested_display_precision=0, - native_unit_of_measurement="INT", - ), - HabitipySensorEntityDescription( - key=HabitipySensorEntity.PERCEPTION, - translation_key=HabitipySensorEntity.PERCEPTION, - value_fn=lambda user, content: get_attributes_total(user, content, "per"), - attributes_fn=lambda user, content: get_attribute_points(user, content, "per"), - suggested_display_precision=0, - native_unit_of_measurement="PER", - ), - HabitipySensorEntityDescription( - key=HabitipySensorEntity.CONSTITUTION, - translation_key=HabitipySensorEntity.CONSTITUTION, - value_fn=lambda user, content: get_attributes_total(user, content, "con"), - attributes_fn=lambda user, content: get_attribute_points(user, content, "con"), - suggested_display_precision=0, - native_unit_of_measurement="CON", - ), ) @@ -282,16 +243,7 @@ class HabitipySensor(HabiticaBase, SensorEntity): def native_value(self) -> StateType: """Return the state of the device.""" - return self.entity_description.value_fn( - self.coordinator.data.user, self.coordinator.content - ) - - @property - def extra_state_attributes(self) -> dict[str, float | None] | None: - """Return entity specific state attributes.""" - if func := self.entity_description.attributes_fn: - return func(self.coordinator.data.user, self.coordinator.content) - return None + return self.entity_description.value_fn(self.coordinator.data.user) class HabitipyTaskSensor(HabiticaBase, SensorEntity): diff --git a/homeassistant/components/habitica/services.py b/homeassistant/components/habitica/services.py index a50e5f1e6e3..440e2d4fb23 100644 --- a/homeassistant/components/habitica/services.py +++ b/homeassistant/components/habitica/services.py @@ -19,29 +19,19 @@ from homeassistant.core import ( ) from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.selector import ConfigEntrySelector from .const import ( ATTR_ARGS, ATTR_CONFIG_ENTRY, ATTR_DATA, - ATTR_DIRECTION, ATTR_PATH, ATTR_SKILL, ATTR_TASK, DOMAIN, EVENT_API_CALL_SUCCESS, - SERVICE_ABORT_QUEST, - SERVICE_ACCEPT_QUEST, SERVICE_API_CALL, - SERVICE_CANCEL_QUEST, SERVICE_CAST_SKILL, - SERVICE_LEAVE_QUEST, - SERVICE_REJECT_QUEST, - SERVICE_SCORE_HABIT, - SERVICE_SCORE_REWARD, - SERVICE_START_QUEST, ) from .types import HabiticaConfigEntry @@ -64,19 +54,6 @@ SERVICE_CAST_SKILL_SCHEMA = vol.Schema( } ) -SERVICE_MANAGE_QUEST_SCHEMA = vol.Schema( - { - vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), - } -) -SERVICE_SCORE_TASK_SCHEMA = vol.Schema( - { - vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), - vol.Required(ATTR_TASK): cv.string, - vol.Optional(ATTR_DIRECTION): cv.string, - } -) - def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry: """Return config entry or raise if not found or not loaded.""" @@ -93,23 +70,10 @@ def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry: return entry -def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 +def async_setup_services(hass: HomeAssistant) -> None: """Set up services for Habitica integration.""" async def handle_api_call(call: ServiceCall) -> None: - async_create_issue( - hass, - DOMAIN, - "deprecated_api_call", - breaks_in_ha_version="2025.6.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="deprecated_api_call", - ) - _LOGGER.warning( - "Deprecated action called: 'habitica.api_call' is deprecated and will be removed in Home Assistant version 2025.6.0" - ) - name = call.data[ATTR_NAME] path = call.data[ATTR_PATH] entries = hass.config_entries.async_entries(DOMAIN) @@ -196,104 +160,6 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 await coordinator.async_request_refresh() return response - async def manage_quests(call: ServiceCall) -> ServiceResponse: - """Accept, reject, start, leave or cancel quests.""" - entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) - coordinator = entry.runtime_data - - COMMAND_MAP = { - SERVICE_ABORT_QUEST: "abort", - SERVICE_ACCEPT_QUEST: "accept", - SERVICE_CANCEL_QUEST: "cancel", - SERVICE_LEAVE_QUEST: "leave", - SERVICE_REJECT_QUEST: "reject", - SERVICE_START_QUEST: "force-start", - } - try: - return await coordinator.api.groups.party.quests[ - COMMAND_MAP[call.service] - ].post() - except ClientResponseError as e: - if e.status == HTTPStatus.TOO_MANY_REQUESTS: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="setup_rate_limit_exception", - ) from e - if e.status == HTTPStatus.UNAUTHORIZED: - raise ServiceValidationError( - translation_domain=DOMAIN, translation_key="quest_action_unallowed" - ) from e - if e.status == HTTPStatus.NOT_FOUND: - raise ServiceValidationError( - translation_domain=DOMAIN, translation_key="quest_not_found" - ) from e - raise HomeAssistantError( - translation_domain=DOMAIN, translation_key="service_call_exception" - ) from e - - for service in ( - SERVICE_ABORT_QUEST, - SERVICE_ACCEPT_QUEST, - SERVICE_CANCEL_QUEST, - SERVICE_LEAVE_QUEST, - SERVICE_REJECT_QUEST, - SERVICE_START_QUEST, - ): - hass.services.async_register( - DOMAIN, - service, - manage_quests, - schema=SERVICE_MANAGE_QUEST_SCHEMA, - supports_response=SupportsResponse.ONLY, - ) - - async def score_task(call: ServiceCall) -> ServiceResponse: - """Score a task action.""" - entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) - coordinator = entry.runtime_data - try: - task_id, task_value = next( - (task["id"], task.get("value")) - for task in coordinator.data.tasks - if call.data[ATTR_TASK] in (task["id"], task.get("alias")) - or call.data[ATTR_TASK] == task["text"] - ) - except StopIteration as e: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="task_not_found", - translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"}, - ) from e - - try: - response: dict[str, Any] = ( - await coordinator.api.tasks[task_id] - .score[call.data.get(ATTR_DIRECTION, "up")] - .post() - ) - except ClientResponseError as e: - if e.status == HTTPStatus.TOO_MANY_REQUESTS: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="setup_rate_limit_exception", - ) from e - if e.status == HTTPStatus.UNAUTHORIZED and task_value is not None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="not_enough_gold", - translation_placeholders={ - "gold": f"{coordinator.data.user["stats"]["gp"]:.2f} GP", - "cost": f"{task_value} GP", - }, - ) from e - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="service_call_exception", - ) from e - else: - await coordinator.async_request_refresh() - return response - hass.services.async_register( DOMAIN, SERVICE_API_CALL, @@ -308,18 +174,3 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 schema=SERVICE_CAST_SKILL_SCHEMA, supports_response=SupportsResponse.ONLY, ) - - hass.services.async_register( - DOMAIN, - SERVICE_SCORE_HABIT, - score_task, - schema=SERVICE_SCORE_TASK_SCHEMA, - supports_response=SupportsResponse.ONLY, - ) - hass.services.async_register( - DOMAIN, - SERVICE_SCORE_REWARD, - score_task, - schema=SERVICE_SCORE_TASK_SCHEMA, - supports_response=SupportsResponse.ONLY, - ) diff --git a/homeassistant/components/habitica/services.yaml b/homeassistant/components/habitica/services.yaml index b539f6c65bf..546ac8c1c34 100644 --- a/homeassistant/components/habitica/services.yaml +++ b/homeassistant/components/habitica/services.yaml @@ -17,7 +17,7 @@ api_call: object: cast_skill: fields: - config_entry: &config_entry + config_entry: required: true selector: config_entry: @@ -33,42 +33,7 @@ cast_skill: - "fireball" mode: dropdown translation_key: "skill_select" - task: &task + task: required: true selector: text: -accept_quest: - fields: - config_entry: *config_entry -reject_quest: - fields: - config_entry: *config_entry -start_quest: - fields: - config_entry: *config_entry -cancel_quest: - fields: - config_entry: *config_entry -abort_quest: - fields: - config_entry: *config_entry -leave_quest: - fields: - config_entry: *config_entry -score_habit: - fields: - config_entry: *config_entry - task: *task - direction: - required: true - selector: - select: - options: - - value: up - label: "➕" - - value: down - label: "➖" -score_reward: - fields: - config_entry: *config_entry - task: *task diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index ac1faf5fcef..f7d2f20b8f9 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -1,8 +1,7 @@ { "common": { "todos": "To-Do's", - "dailies": "Dailies", - "config_entry_name": "Select character" + "dailies": "Dailies" }, "config": { "abort": { @@ -165,86 +164,6 @@ }, "rewards": { "name": "Rewards" - }, - "strength": { - "name": "Strength", - "state_attributes": { - "level": { - "name": "[%key:component::habitica::entity::sensor::level::name%]" - }, - "equipment": { - "name": "Battle gear" - }, - "class": { - "name": "Class equip bonus" - }, - "allocated": { - "name": "Allocated attribute points" - }, - "buffs": { - "name": "Buffs" - } - } - }, - "intelligence": { - "name": "Intelligence", - "state_attributes": { - "level": { - "name": "[%key:component::habitica::entity::sensor::level::name%]" - }, - "equipment": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" - }, - "class": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" - }, - "allocated": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" - }, - "buffs": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" - } - } - }, - "perception": { - "name": "Perception", - "state_attributes": { - "level": { - "name": "[%key:component::habitica::entity::sensor::level::name%]" - }, - "equipment": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" - }, - "class": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" - }, - "allocated": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" - }, - "buffs": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" - } - } - }, - "constitution": { - "name": "Constitution", - "state_attributes": { - "level": { - "name": "[%key:component::habitica::entity::sensor::level::name%]" - }, - "equipment": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" - }, - "class": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" - }, - "allocated": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" - }, - "buffs": { - "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" - } - } } }, "switch": { @@ -301,9 +220,6 @@ "not_enough_mana": { "message": "Unable to cast skill, not enough mana. Your character has {mana}, but the skill costs {cost}." }, - "not_enough_gold": { - "message": "Unable to buy reward, not enough gold. Your character has {gold}, but the reward costs {cost}." - }, "skill_not_found": { "message": "Unable to cast skill, your character does not have the skill or spell {skill}." }, @@ -314,23 +230,13 @@ "message": "The selected character is currently not loaded or disabled in Home Assistant." }, "task_not_found": { - "message": "Unable to complete action, could not find the task {task}" - }, - "quest_action_unallowed": { - "message": "Action not allowed, only quest leader or group leader can perform this action" - }, - "quest_not_found": { - "message": "Unable to complete action, quest or group not found" + "message": "Unable to cast skill, could not find the task {task}" } }, "issues": { "deprecated_task_entity": { "title": "The Habitica {task_name} sensor is deprecated", "description": "The Habitica entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to replace the sensor entity with the newly added todo entity.\nWhen you are done migrating you can disable `{entity}`." - }, - "deprecated_api_call": { - "title": "The Habitica action habitica.api_call is deprecated", - "description": "The Habitica action `habitica.api_call` is deprecated and will be removed in Home Assistant 2025.5.0.\n\nPlease update your automations and scripts to use other Habitica actions and entities." } }, "services": { @@ -357,7 +263,7 @@ "description": "Use a skill or spell from your Habitica character on a specific task to affect its progress or status.", "fields": { "config_entry": { - "name": "[%key:component::habitica::common::config_entry_name%]", + "name": "Select character", "description": "Choose the Habitica character to cast the skill." }, "skill": { @@ -369,98 +275,6 @@ "description": "The name (or task ID) of the task you want to target with the skill or spell." } } - }, - "accept_quest": { - "name": "Accept a quest invitation", - "description": "Accept a pending invitation to a quest.", - "fields": { - "config_entry": { - "name": "[%key:component::habitica::common::config_entry_name%]", - "description": "Choose the Habitica character for which to perform the action." - } - } - }, - "reject_quest": { - "name": "Reject a quest invitation", - "description": "Reject a pending invitation to a quest.", - "fields": { - "config_entry": { - "name": "[%key:component::habitica::common::config_entry_name%]", - "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" - } - } - }, - "leave_quest": { - "name": "Leave a quest", - "description": "Leave the current quest you are participating in.", - "fields": { - "config_entry": { - "name": "[%key:component::habitica::common::config_entry_name%]", - "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" - } - } - }, - "abort_quest": { - "name": "Abort an active quest", - "description": "Terminate your party's ongoing quest. All progress will be lost and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.", - "fields": { - "config_entry": { - "name": "[%key:component::habitica::common::config_entry_name%]", - "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" - } - } - }, - "cancel_quest": { - "name": "Cancel a pending quest", - "description": "Cancel a quest that has not yet startet. All accepted and pending invitations will be canceled and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.", - "fields": { - "config_entry": { - "name": "[%key:component::habitica::common::config_entry_name%]", - "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" - } - } - }, - "start_quest": { - "name": "Force-start a pending quest", - "description": "Begin the quest immediately, bypassing any pending invitations that haven't been accepted or rejected. Only quest leader or group leader can perform this action.", - "fields": { - "config_entry": { - "name": "[%key:component::habitica::common::config_entry_name%]", - "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" - } - } - }, - "score_habit": { - "name": "Track a habit", - "description": "Increase the positive or negative streak of a habit to track its progress.", - "fields": { - "config_entry": { - "name": "[%key:component::habitica::common::config_entry_name%]", - "description": "Select the Habitica character tracking your habit." - }, - "task": { - "name": "Habit name", - "description": "The name (or task ID) of the Habitica habit." - }, - "direction": { - "name": "Reward or loss", - "description": "Is it positive or negative progress you want to track for your habit." - } - } - }, - "score_reward": { - "name": "Buy a reward", - "description": "Reward yourself and buy one of your custom rewards with gold earned by fulfilling tasks.", - "fields": { - "config_entry": { - "name": "[%key:component::habitica::common::config_entry_name%]", - "description": "Select the Habitica character buying the reward." - }, - "task": { - "name": "Reward name", - "description": "The name (or task ID) of the custom reward." - } - } } }, "selector": { diff --git a/homeassistant/components/habitica/util.py b/homeassistant/components/habitica/util.py index 03acb08baf9..93a7c234a5d 100644 --- a/homeassistant/components/habitica/util.py +++ b/homeassistant/components/habitica/util.py @@ -3,7 +3,6 @@ from __future__ import annotations import datetime -from math import floor from typing import TYPE_CHECKING, Any from dateutil.rrule import ( @@ -140,52 +139,3 @@ def get_recurrence_rule(recurrence: rrule) -> str: """ return str(recurrence).split("RRULE:")[1] - - -def get_attribute_points( - user: dict[str, Any], content: dict[str, Any], attribute: str -) -> dict[str, float]: - """Get modifiers contributing to strength attribute.""" - - gear_set = { - "weapon", - "armor", - "head", - "shield", - "back", - "headAccessory", - "eyewear", - "body", - } - - equipment = sum( - stats[attribute] - for gear in gear_set - if (equipped := user["items"]["gear"]["equipped"].get(gear)) - and (stats := content["gear"]["flat"].get(equipped)) - ) - - class_bonus = sum( - stats[attribute] / 2 - for gear in gear_set - if (equipped := user["items"]["gear"]["equipped"].get(gear)) - and (stats := content["gear"]["flat"].get(equipped)) - and stats["klass"] == user["stats"]["class"] - ) - - return { - "level": min(round(user["stats"]["lvl"] / 2), 50), - "equipment": equipment, - "class": class_bonus, - "allocated": user["stats"][attribute], - "buffs": user["stats"]["buffs"][attribute], - } - - -def get_attributes_total( - user: dict[str, Any], content: dict[str, Any], attribute: str -) -> int: - """Get total attribute points.""" - return floor( - sum(value for value in get_attribute_points(user, content, attribute).values()) - ) diff --git a/homeassistant/components/hassio/const.py b/homeassistant/components/hassio/const.py index 82ce74832c2..b337017147b 100644 --- a/homeassistant/components/hassio/const.py +++ b/homeassistant/components/hassio/const.py @@ -137,3 +137,17 @@ class SupervisorEntityModel(StrEnum): CORE = "Home Assistant Core" SUPERVIOSR = "Home Assistant Supervisor" HOST = "Home Assistant Host" + + +class SupervisorIssueContext(StrEnum): + """Context for supervisor issues.""" + + ADDON = "addon" + CORE = "core" + DNS_SERVER = "dns_server" + MOUNT = "mount" + OS = "os" + PLUGIN = "plugin" + SUPERVISOR = "supervisor" + STORE = "store" + SYSTEM = "system" diff --git a/homeassistant/components/hassio/handler.py b/homeassistant/components/hassio/handler.py index 58f2aa8c144..f69ee40293b 100644 --- a/homeassistant/components/hassio/handler.py +++ b/homeassistant/components/hassio/handler.py @@ -91,6 +91,15 @@ async def async_create_backup( return await hassio.send_command(command, payload=payload, timeout=None) +@bind_hass +@_api_bool +async def async_apply_suggestion(hass: HomeAssistant, suggestion_uuid: str) -> dict: + """Apply a suggestion from supervisor's resolution center.""" + hassio: HassIO = hass.data[DOMAIN] + command = f"/resolution/suggestion/{suggestion_uuid}" + return await hassio.send_command(command, timeout=None) + + @api_data async def async_get_green_settings(hass: HomeAssistant) -> dict[str, bool]: """Return settings specific to Home Assistant Green.""" @@ -236,6 +245,26 @@ class HassIO: """ return self.send_command("/ingress/panels", method="get") + @api_data + def get_resolution_info(self) -> Coroutine: + """Return data for Supervisor resolution center. + + This method returns a coroutine. + """ + return self.send_command("/resolution/info", method="get") + + @api_data + def get_suggestions_for_issue( + self, issue_id: str + ) -> Coroutine[Any, Any, dict[str, Any]]: + """Return suggestions for issue from Supervisor resolution center. + + This method returns a coroutine. + """ + return self.send_command( + f"/resolution/issue/{issue_id}/suggestions", method="get" + ) + @_api_bool async def update_hass_api( self, http_config: dict[str, Any], refresh_token: RefreshToken @@ -275,6 +304,14 @@ class HassIO: "/supervisor/options", payload={"diagnostics": diagnostics} ) + @_api_bool + def apply_suggestion(self, suggestion_uuid: str) -> Coroutine: + """Apply a suggestion from supervisor's resolution center. + + This method returns a coroutine. + """ + return self.send_command(f"/resolution/suggestion/{suggestion_uuid}") + async def send_command( self, command: str, diff --git a/homeassistant/components/hassio/issues.py b/homeassistant/components/hassio/issues.py index 16697659077..944bc99a6b9 100644 --- a/homeassistant/components/hassio/issues.py +++ b/homeassistant/components/hassio/issues.py @@ -7,10 +7,6 @@ from dataclasses import dataclass, field from datetime import datetime import logging from typing import Any, NotRequired, TypedDict -from uuid import UUID - -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import ContextType, Issue as SupervisorIssue from homeassistant.core import HassJob, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -24,8 +20,12 @@ from homeassistant.helpers.issue_registry import ( from .const import ( ATTR_DATA, ATTR_HEALTHY, + ATTR_ISSUES, + ATTR_SUGGESTIONS, ATTR_SUPPORTED, + ATTR_UNHEALTHY, ATTR_UNHEALTHY_REASONS, + ATTR_UNSUPPORTED, ATTR_UNSUPPORTED_REASONS, ATTR_UPDATE_KEY, ATTR_WS_EVENT, @@ -45,9 +45,10 @@ from .const import ( PLACEHOLDER_KEY_REFERENCE, REQUEST_REFRESH_DELAY, UPDATE_KEY_SUPERVISOR, + SupervisorIssueContext, ) from .coordinator import get_addons_info -from .handler import HassIO, get_supervisor_client +from .handler import HassIO, HassioAPIError ISSUE_KEY_UNHEALTHY = "unhealthy" ISSUE_KEY_UNSUPPORTED = "unsupported" @@ -119,9 +120,9 @@ class SuggestionDataType(TypedDict): class Suggestion: """Suggestion from Supervisor which resolves an issue.""" - uuid: UUID + uuid: str type: str - context: ContextType + context: SupervisorIssueContext reference: str | None = None @property @@ -133,9 +134,9 @@ class Suggestion: def from_dict(cls, data: SuggestionDataType) -> Suggestion: """Convert from dictionary representation.""" return cls( - uuid=UUID(data["uuid"]), + uuid=data["uuid"], type=data["type"], - context=ContextType(data["context"]), + context=SupervisorIssueContext(data["context"]), reference=data["reference"], ) @@ -154,9 +155,9 @@ class IssueDataType(TypedDict): class Issue: """Issue from Supervisor.""" - uuid: UUID + uuid: str type: str - context: ContextType + context: SupervisorIssueContext reference: str | None = None suggestions: list[Suggestion] = field(default_factory=list, compare=False) @@ -170,9 +171,9 @@ class Issue: """Convert from dictionary representation.""" suggestions: list[SuggestionDataType] = data.get("suggestions", []) return cls( - uuid=UUID(data["uuid"]), + uuid=data["uuid"], type=data["type"], - context=ContextType(data["context"]), + context=SupervisorIssueContext(data["context"]), reference=data["reference"], suggestions=[ Suggestion.from_dict(suggestion) for suggestion in suggestions @@ -189,8 +190,7 @@ class SupervisorIssues: self._client = client self._unsupported_reasons: set[str] = set() self._unhealthy_reasons: set[str] = set() - self._issues: dict[UUID, Issue] = {} - self._supervisor_client = get_supervisor_client(hass) + self._issues: dict[str, Issue] = {} @property def unhealthy_reasons(self) -> set[str]: @@ -283,7 +283,7 @@ class SupervisorIssues: async_create_issue( self._hass, DOMAIN, - issue.uuid.hex, + issue.uuid, is_fixable=bool(issue.suggestions), severity=IssueSeverity.WARNING, translation_key=issue.key, @@ -292,37 +292,19 @@ class SupervisorIssues: self._issues[issue.uuid] = issue - async def add_issue_from_data(self, data: SupervisorIssue) -> None: + async def add_issue_from_data(self, data: IssueDataType) -> None: """Add issue from data to list after getting latest suggestions.""" try: - suggestions = ( - await self._supervisor_client.resolution.suggestions_for_issue( - data.uuid - ) - ) - except SupervisorError: + data["suggestions"] = ( + await self._client.get_suggestions_for_issue(data["uuid"]) + )[ATTR_SUGGESTIONS] + except HassioAPIError: _LOGGER.error( "Could not get suggestions for supervisor issue %s, skipping it", - data.uuid.hex, + data["uuid"], ) return - self.add_issue( - Issue( - uuid=data.uuid, - type=str(data.type), - context=data.context, - reference=data.reference, - suggestions=[ - Suggestion( - uuid=suggestion.uuid, - type=str(suggestion.type), - context=suggestion.context, - reference=suggestion.reference, - ) - for suggestion in suggestions - ], - ) - ) + self.add_issue(Issue.from_dict(data)) def remove_issue(self, issue: Issue) -> None: """Remove an issue from the list. Delete a repair if necessary.""" @@ -330,13 +312,13 @@ class SupervisorIssues: return if issue.key in ISSUE_KEYS_FOR_REPAIRS: - async_delete_issue(self._hass, DOMAIN, issue.uuid.hex) + async_delete_issue(self._hass, DOMAIN, issue.uuid) del self._issues[issue.uuid] def get_issue(self, issue_id: str) -> Issue | None: """Get issue from key.""" - return self._issues.get(UUID(issue_id)) + return self._issues.get(issue_id) async def setup(self) -> None: """Create supervisor events listener.""" @@ -349,8 +331,8 @@ class SupervisorIssues: async def _update(self, _: datetime | None = None) -> None: """Update issues from Supervisor resolution center.""" try: - data = await self._supervisor_client.resolution.info() - except SupervisorError as err: + data = await self._client.get_resolution_info() + except HassioAPIError as err: _LOGGER.error("Failed to update supervisor issues: %r", err) async_call_later( self._hass, @@ -358,16 +340,18 @@ class SupervisorIssues: HassJob(self._update, cancel_on_shutdown=True), ) return - self.unhealthy_reasons = set(data.unhealthy) - self.unsupported_reasons = set(data.unsupported) + self.unhealthy_reasons = set(data[ATTR_UNHEALTHY]) + self.unsupported_reasons = set(data[ATTR_UNSUPPORTED]) # Remove any cached issues that weren't returned - for issue_id in set(self._issues) - {issue.uuid for issue in data.issues}: + for issue_id in set(self._issues.keys()) - { + issue["uuid"] for issue in data[ATTR_ISSUES] + }: self.remove_issue(self._issues[issue_id]) # Add/update any issues that came back await asyncio.gather( - *[self.add_issue_from_data(issue) for issue in data.issues] + *[self.add_issue_from_data(issue) for issue in data[ATTR_ISSUES]] ) @callback diff --git a/homeassistant/components/hassio/repairs.py b/homeassistant/components/hassio/repairs.py index 0e8122c08b9..0fcd96ace38 100644 --- a/homeassistant/components/hassio/repairs.py +++ b/homeassistant/components/hassio/repairs.py @@ -6,8 +6,6 @@ from collections.abc import Callable, Coroutine from types import MethodType from typing import Any -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import ContextType import voluptuous as vol from homeassistant.components.repairs import RepairsFlow @@ -22,8 +20,9 @@ from .const import ( PLACEHOLDER_KEY_ADDON, PLACEHOLDER_KEY_COMPONENTS, PLACEHOLDER_KEY_REFERENCE, + SupervisorIssueContext, ) -from .handler import get_supervisor_client +from .handler import async_apply_suggestion from .issues import Issue, Suggestion HELP_URLS = { @@ -52,10 +51,9 @@ class SupervisorIssueRepairFlow(RepairsFlow): _data: dict[str, Any] | None = None _issue: Issue | None = None - def __init__(self, hass: HomeAssistant, issue_id: str) -> None: + def __init__(self, issue_id: str) -> None: """Initialize repair flow.""" self._issue_id = issue_id - self._supervisor_client = get_supervisor_client(hass) super().__init__() @property @@ -126,12 +124,9 @@ class SupervisorIssueRepairFlow(RepairsFlow): if not confirmed and suggestion.key in SUGGESTION_CONFIRMATION_REQUIRED: return self._async_form_for_suggestion(suggestion) - try: - await self._supervisor_client.resolution.apply_suggestion(suggestion.uuid) - except SupervisorError: - return self.async_abort(reason="apply_suggestion_fail") - - return self.async_create_entry(data={}) + if await async_apply_suggestion(self.hass, suggestion.uuid): + return self.async_create_entry(data={}) + return self.async_abort(reason="apply_suggestion_fail") @staticmethod def _async_step( @@ -168,9 +163,9 @@ class DockerConfigIssueRepairFlow(SupervisorIssueRepairFlow): if issue.key == self.issue.key or issue.type != self.issue.type: continue - if issue.context == ContextType.CORE: + if issue.context == SupervisorIssueContext.CORE: components.insert(0, "Home Assistant") - elif issue.context == ContextType.ADDON: + elif issue.context == SupervisorIssueContext.ADDON: components.append( next( ( @@ -215,11 +210,11 @@ async def async_create_fix_flow( supervisor_issues = get_issues_info(hass) issue = supervisor_issues and supervisor_issues.get_issue(issue_id) if issue and issue.key == ISSUE_KEY_SYSTEM_DOCKER_CONFIG: - return DockerConfigIssueRepairFlow(hass, issue_id) + return DockerConfigIssueRepairFlow(issue_id) if issue and issue.key in { ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, ISSUE_KEY_ADDON_BOOT_FAIL, }: - return AddonIssueRepairFlow(hass, issue_id) + return AddonIssueRepairFlow(issue_id) - return SupervisorIssueRepairFlow(hass, issue_id) + return SupervisorIssueRepairFlow(issue_id) diff --git a/homeassistant/components/home_connect/binary_sensor.py b/homeassistant/components/home_connect/binary_sensor.py index 232b581d58b..935aae5cbda 100644 --- a/homeassistant/components/home_connect/binary_sensor.py +++ b/homeassistant/components/home_connect/binary_sensor.py @@ -12,13 +12,8 @@ from homeassistant.components.binary_sensor import ( from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import ( - IssueSeverity, - async_create_issue, - async_delete_issue, -) +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .api import HomeConnectDevice from .const import ( @@ -193,32 +188,11 @@ class HomeConnectDoorBinarySensor(HomeConnectBinarySensor): async def async_added_to_hass(self) -> None: """Call when entity is added to hass.""" await super().async_added_to_hass() - automations = automations_with_entity(self.hass, self.entity_id) - scripts = scripts_with_entity(self.hass, self.entity_id) - items = automations + scripts + entity_automations = automations_with_entity(self.hass, self.entity_id) + entity_scripts = scripts_with_entity(self.hass, self.entity_id) + items = entity_automations + entity_scripts if not items: return - - entity_reg: er.EntityRegistry = er.async_get(self.hass) - entity_automations = [ - automation_entity - for automation_id in automations - if (automation_entity := entity_reg.async_get(automation_id)) - ] - entity_scripts = [ - script_entity - for script_id in scripts - if (script_entity := entity_reg.async_get(script_id)) - ] - - items_list = [ - f"- [{item.original_name}](/config/automation/edit/{item.unique_id})" - for item in entity_automations - ] + [ - f"- [{item.original_name}](/config/script/edit/{item.unique_id})" - for item in entity_scripts - ] - async_create_issue( self.hass, DOMAIN, @@ -229,12 +203,6 @@ class HomeConnectDoorBinarySensor(HomeConnectBinarySensor): translation_key="deprecated_binary_common_door_sensor", translation_placeholders={ "entity": self.entity_id, - "items": "\n".join(items_list), + "items": "\n".join([f"- {item}" for item in items]), }, ) - - async def async_will_remove_from_hass(self) -> None: - """Call when entity will be removed from hass.""" - async_delete_issue( - self.hass, DOMAIN, f"deprecated_binary_common_door_sensor_{self.entity_id}" - ) diff --git a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py index a91fb00c142..37d12d2bd61 100644 --- a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py +++ b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py @@ -24,6 +24,7 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, + OptionsFlowWithConfigEntry, ) from homeassistant.core import callback from homeassistant.data_entry_flow import AbortFlow @@ -495,15 +496,13 @@ class BaseFirmwareConfigFlow(BaseFirmwareInstallFlow, ConfigFlow): return await self.async_step_pick_firmware() -class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlow): +class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlowWithConfigEntry): """Zigbee and Thread options flow handlers.""" - def __init__(self, config_entry: ConfigEntry, *args: Any, **kwargs: Any) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: """Instantiate options flow.""" super().__init__(*args, **kwargs) - self._config_entry = config_entry - self._probed_firmware_type = ApplicationType(self.config_entry.data["firmware"]) # Make `context` a regular dictionary diff --git a/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py b/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py index 2b08031405f..14ae57391ef 100644 --- a/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py +++ b/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py @@ -318,6 +318,7 @@ class OptionsFlowHandler(OptionsFlow, ABC): self.start_task: asyncio.Task | None = None self.stop_task: asyncio.Task | None = None self._zha_migration_mgr: ZhaMultiPANMigrationHelper | None = None + self.config_entry = config_entry self.original_addon_config: dict[str, Any] | None = None self.revert_reason: str | None = None diff --git a/homeassistant/components/homekit/type_security_systems.py b/homeassistant/components/homekit/type_security_systems.py index 8634589cb5f..9f3f183f11f 100644 --- a/homeassistant/components/homekit/type_security_systems.py +++ b/homeassistant/components/homekit/type_security_systems.py @@ -18,8 +18,6 @@ from homeassistant.const import ( SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_DISARM, - STATE_UNAVAILABLE, - STATE_UNKNOWN, ) from homeassistant.core import State, callback @@ -154,12 +152,12 @@ class SecuritySystem(HomeAccessory): @callback def async_update_state(self, new_state: State) -> None: """Update security state after state changed.""" - hass_state: str | AlarmControlPanelState = new_state.state - if hass_state in {"None", STATE_UNKNOWN, STATE_UNAVAILABLE}: - # Bail out early for no state, unknown or unavailable + hass_state = None + if new_state and new_state.state == "None": + # Bail out early for no state return - if hass_state is not None: - hass_state = AlarmControlPanelState(hass_state) + if new_state and new_state.state is not None: + hass_state = AlarmControlPanelState(new_state.state) if ( hass_state and (current_state := HASS_TO_HOMEKIT_CURRENT.get(hass_state)) is not None diff --git a/homeassistant/components/hunterdouglas_powerview/number.py b/homeassistant/components/hunterdouglas_powerview/number.py index fb8c9f76d79..f893b04b2d1 100644 --- a/homeassistant/components/hunterdouglas_powerview/number.py +++ b/homeassistant/components/hunterdouglas_powerview/number.py @@ -95,7 +95,7 @@ class PowerViewNumber(ShadeEntity, RestoreNumber): self.entity_description = description self._attr_unique_id = f"{self._attr_unique_id}_{description.key}" - async def async_set_native_value(self, value: float) -> None: + def set_native_value(self, value: float) -> None: """Update the current value.""" self._attr_native_value = value self.entity_description.store_value_fn(self.coordinator, self._shade.id, value) diff --git a/homeassistant/components/husqvarna_automower/coordinator.py b/homeassistant/components/husqvarna_automower/coordinator.py index c19f37a040d..458ff50dac9 100644 --- a/homeassistant/components/husqvarna_automower/coordinator.py +++ b/homeassistant/components/husqvarna_automower/coordinator.py @@ -8,7 +8,6 @@ from aioautomower.exceptions import ( ApiException, AuthException, HusqvarnaWSServerHandshakeError, - TimeoutException, ) from aioautomower.model import MowerAttributes from aioautomower.session import AutomowerSession @@ -23,7 +22,6 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) MAX_WS_RECONNECT_TIME = 600 SCAN_INTERVAL = timedelta(minutes=8) -DEFAULT_RECONNECT_TIME = 2 # Define a default reconnect time class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttributes]]): @@ -42,8 +40,8 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib update_interval=SCAN_INTERVAL, ) self.api = api + self.ws_connected: bool = False - self.reconnect_time = DEFAULT_RECONNECT_TIME async def _async_update_data(self) -> dict[str, MowerAttributes]: """Subscribe for websocket and poll data from the API.""" @@ -68,28 +66,24 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib hass: HomeAssistant, entry: ConfigEntry, automower_client: AutomowerSession, + reconnect_time: int = 2, ) -> None: """Listen with the client.""" try: await automower_client.auth.websocket_connect() - # Reset reconnect time after successful connection - self.reconnect_time = DEFAULT_RECONNECT_TIME + reconnect_time = 2 await automower_client.start_listening() except HusqvarnaWSServerHandshakeError as err: _LOGGER.debug( - "Failed to connect to websocket. Trying to reconnect: %s", - err, - ) - except TimeoutException as err: - _LOGGER.debug( - "Failed to listen to websocket. Trying to reconnect: %s", - err, + "Failed to connect to websocket. Trying to reconnect: %s", err ) + if not hass.is_stopping: - await asyncio.sleep(self.reconnect_time) - self.reconnect_time = min(self.reconnect_time * 2, MAX_WS_RECONNECT_TIME) - entry.async_create_background_task( - hass, - self.client_listen(hass, entry, automower_client), - "reconnect_task", + await asyncio.sleep(reconnect_time) + reconnect_time = min(reconnect_time * 2, MAX_WS_RECONNECT_TIME) + await self.client_listen( + hass=hass, + entry=entry, + automower_client=automower_client, + reconnect_time=reconnect_time, ) diff --git a/homeassistant/components/huum/manifest.json b/homeassistant/components/huum/manifest.json index 38562e1a072..7629f529b91 100644 --- a/homeassistant/components/huum/manifest.json +++ b/homeassistant/components/huum/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/huum", "iot_class": "cloud_polling", - "requirements": ["huum==0.7.12"] + "requirements": ["huum==0.7.10"] } diff --git a/homeassistant/components/image_upload/manifest.json b/homeassistant/components/image_upload/manifest.json index bb8c33ba749..963721a0476 100644 --- a/homeassistant/components/image_upload/manifest.json +++ b/homeassistant/components/image_upload/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/image_upload", "integration_type": "system", "quality_scale": "internal", - "requirements": ["Pillow==11.0.0"] + "requirements": ["Pillow==10.4.0"] } diff --git a/homeassistant/components/imap/strings.json b/homeassistant/components/imap/strings.json index 7c4a0d9a973..115d46f3d0e 100644 --- a/homeassistant/components/imap/strings.json +++ b/homeassistant/components/imap/strings.json @@ -104,7 +104,7 @@ "services": { "fetch": { "name": "Fetch message", - "description": "Fetch an email message from the server.", + "description": "Fetch the email message from the server.", "fields": { "entry": { "name": "Entry", diff --git a/homeassistant/components/insteon/strings.json b/homeassistant/components/insteon/strings.json index 4df997ac939..1464a2dbc8f 100644 --- a/homeassistant/components/insteon/strings.json +++ b/homeassistant/components/insteon/strings.json @@ -112,7 +112,7 @@ "services": { "add_all_link": { "name": "Add all link", - "description": "Tells the Insteon Modem (IM) start All-Linking mode. Once the IM is in All-Linking mode, press the link button on the device to complete All-Linking.", + "description": "Tells the Insteom Modem (IM) start All-Linking mode. Once the IM is in All-Linking mode, press the link button on the device to complete All-Linking.", "fields": { "group": { "name": "Group", diff --git a/homeassistant/components/iqvia/manifest.json b/homeassistant/components/iqvia/manifest.json index 11c99a7428f..6142fa1349e 100644 --- a/homeassistant/components/iqvia/manifest.json +++ b/homeassistant/components/iqvia/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyiqvia"], - "requirements": ["numpy==2.1.3", "pyiqvia==2022.04.0"] + "requirements": ["numpy==1.26.4", "pyiqvia==2022.04.0"] } diff --git a/homeassistant/components/jewish_calendar/__init__.py b/homeassistant/components/jewish_calendar/__init__.py index 823e9bd59be..fd238e8d615 100644 --- a/homeassistant/components/jewish_calendar/__init__.py +++ b/homeassistant/components/jewish_calendar/__init__.py @@ -5,17 +5,26 @@ from __future__ import annotations from functools import partial from hdate import Location +import voluptuous as vol +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( CONF_ELEVATION, CONF_LANGUAGE, CONF_LATITUDE, + CONF_LOCATION, CONF_LONGITUDE, + CONF_NAME, CONF_TIME_ZONE, Platform, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +import homeassistant.helpers.config_validation as cv +import homeassistant.helpers.entity_registry as er +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.typing import ConfigType +from .binary_sensor import BINARY_SENSORS from .const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, @@ -24,15 +33,94 @@ from .const import ( DEFAULT_DIASPORA, DEFAULT_HAVDALAH_OFFSET_MINUTES, DEFAULT_LANGUAGE, + DEFAULT_NAME, + DOMAIN, ) -from .entity import JewishCalendarConfigEntry, JewishCalendarData +from .sensor import INFO_SENSORS, TIME_SENSORS PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] +CONFIG_SCHEMA = vol.Schema( + { + DOMAIN: vol.All( + cv.deprecated(DOMAIN), + { + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_DIASPORA, default=DEFAULT_DIASPORA): cv.boolean, + vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude, + vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude, + vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In( + ["hebrew", "english"] + ), + vol.Optional( + CONF_CANDLE_LIGHT_MINUTES, default=DEFAULT_CANDLE_LIGHT + ): int, + # Default of 0 means use 8.5 degrees / 'three_stars' time. + vol.Optional( + CONF_HAVDALAH_OFFSET_MINUTES, + default=DEFAULT_HAVDALAH_OFFSET_MINUTES, + ): int, + }, + ) + }, + extra=vol.ALLOW_EXTRA, +) -async def async_setup_entry( - hass: HomeAssistant, config_entry: JewishCalendarConfigEntry -) -> bool: + +def get_unique_prefix( + location: Location, + language: str, + candle_lighting_offset: int | None, + havdalah_offset: int | None, +) -> str: + """Create a prefix for unique ids.""" + # location.altitude was unset before 2024.6 when this method + # was used to create the unique id. As such it would always + # use the default altitude of 754. + config_properties = [ + location.latitude, + location.longitude, + location.timezone, + 754, + location.diaspora, + language, + candle_lighting_offset, + havdalah_offset, + ] + prefix = "_".join(map(str, config_properties)) + return f"{prefix}" + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Jewish Calendar component.""" + if DOMAIN not in config: + return True + + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + is_fixable=False, + issue_domain=DOMAIN, + breaks_in_ha_version="2024.12.0", + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": DEFAULT_NAME, + }, + ) + + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN] + ) + ) + + return True + + +async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up a configuration entry for Jewish calendar.""" language = config_entry.data.get(CONF_LANGUAGE, DEFAULT_LANGUAGE) diaspora = config_entry.data.get(CONF_DIASPORA, DEFAULT_DIASPORA) @@ -55,19 +143,27 @@ async def async_setup_entry( ) ) - config_entry.runtime_data = JewishCalendarData( - language, - diaspora, - location, - candle_lighting_offset, - havdalah_offset, + hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = { + CONF_LANGUAGE: language, + CONF_DIASPORA: diaspora, + CONF_LOCATION: location, + CONF_CANDLE_LIGHT_MINUTES: candle_lighting_offset, + CONF_HAVDALAH_OFFSET_MINUTES: havdalah_offset, + } + + # Update unique ID to be unrelated to user defined options + old_prefix = get_unique_prefix( + location, language, candle_lighting_offset, havdalah_offset ) + ent_reg = er.async_get(hass) + entries = er.async_entries_for_config_entry(ent_reg, config_entry.entry_id) + if not entries or any(entry.unique_id.startswith(old_prefix) for entry in entries): + async_update_unique_ids(ent_reg, config_entry.entry_id, old_prefix) + await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) - async def update_listener( - hass: HomeAssistant, config_entry: JewishCalendarConfigEntry - ) -> None: + async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: # Trigger update of states for all platforms await hass.config_entries.async_reload(config_entry.entry_id) @@ -75,8 +171,35 @@ async def async_setup_entry( return True -async def async_unload_entry( - hass: HomeAssistant, config_entry: JewishCalendarConfigEntry -) -> bool: +async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload a config entry.""" - return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) + unload_ok = await hass.config_entries.async_unload_platforms( + config_entry, PLATFORMS + ) + + if unload_ok: + hass.data[DOMAIN].pop(config_entry.entry_id) + + return unload_ok + + +@callback +def async_update_unique_ids( + ent_reg: er.EntityRegistry, new_prefix: str, old_prefix: str +) -> None: + """Update unique ID to be unrelated to user defined options. + + Introduced with release 2024.6 + """ + platform_descriptions = { + Platform.BINARY_SENSOR: BINARY_SENSORS, + Platform.SENSOR: (*INFO_SENSORS, *TIME_SENSORS), + } + for platform, descriptions in platform_descriptions.items(): + for description in descriptions: + new_unique_id = f"{new_prefix}-{description.key}" + old_unique_id = f"{old_prefix}_{description.key}" + if entity_id := ent_reg.async_get_entity_id( + platform, DOMAIN, old_unique_id + ): + ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) diff --git a/homeassistant/components/jewish_calendar/binary_sensor.py b/homeassistant/components/jewish_calendar/binary_sensor.py index 9fd1371f8a8..060650ee25c 100644 --- a/homeassistant/components/jewish_calendar/binary_sensor.py +++ b/homeassistant/components/jewish_calendar/binary_sensor.py @@ -14,13 +14,15 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers import event from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from .entity import JewishCalendarConfigEntry, JewishCalendarEntity +from .const import DOMAIN +from .entity import JewishCalendarEntity @dataclass(frozen=True) @@ -61,12 +63,14 @@ BINARY_SENSORS: tuple[JewishCalendarBinarySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: JewishCalendarConfigEntry, + config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Jewish Calendar binary sensors.""" + entry = hass.data[DOMAIN][config_entry.entry_id] + async_add_entities( - JewishCalendarBinarySensor(config_entry, description) + JewishCalendarBinarySensor(config_entry, entry, description) for description in BINARY_SENSORS ) diff --git a/homeassistant/components/jewish_calendar/config_flow.py b/homeassistant/components/jewish_calendar/config_flow.py index a2eadbf57bd..9673fc6cf22 100644 --- a/homeassistant/components/jewish_calendar/config_flow.py +++ b/homeassistant/components/jewish_calendar/config_flow.py @@ -101,10 +101,23 @@ class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: + _options = {} + if CONF_CANDLE_LIGHT_MINUTES in user_input: + _options[CONF_CANDLE_LIGHT_MINUTES] = user_input[ + CONF_CANDLE_LIGHT_MINUTES + ] + del user_input[CONF_CANDLE_LIGHT_MINUTES] + if CONF_HAVDALAH_OFFSET_MINUTES in user_input: + _options[CONF_HAVDALAH_OFFSET_MINUTES] = user_input[ + CONF_HAVDALAH_OFFSET_MINUTES + ] + del user_input[CONF_HAVDALAH_OFFSET_MINUTES] if CONF_LOCATION in user_input: user_input[CONF_LATITUDE] = user_input[CONF_LOCATION][CONF_LATITUDE] user_input[CONF_LONGITUDE] = user_input[CONF_LOCATION][CONF_LONGITUDE] - return self.async_create_entry(title=DEFAULT_NAME, data=user_input) + return self.async_create_entry( + title=DEFAULT_NAME, data=user_input, options=_options + ) return self.async_show_form( step_id="user", @@ -113,6 +126,10 @@ class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): ), ) + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + """Import a config entry from configuration.yaml.""" + return await self.async_step_user(import_data) + async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/jewish_calendar/entity.py b/homeassistant/components/jewish_calendar/entity.py index ad5ac8e2137..c11925df954 100644 --- a/homeassistant/components/jewish_calendar/entity.py +++ b/homeassistant/components/jewish_calendar/entity.py @@ -1,27 +1,18 @@ """Entity representing a Jewish Calendar sensor.""" -from dataclasses import dataclass - -from hdate import Location +from typing import Any from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_LANGUAGE, CONF_LOCATION from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import Entity, EntityDescription -from .const import DOMAIN - -type JewishCalendarConfigEntry = ConfigEntry[JewishCalendarData] - - -@dataclass -class JewishCalendarData: - """Jewish Calendar runtime dataclass.""" - - language: str - diaspora: bool - location: Location - candle_lighting_offset: int - havdalah_offset: int +from .const import ( + CONF_CANDLE_LIGHT_MINUTES, + CONF_DIASPORA, + CONF_HAVDALAH_OFFSET_MINUTES, + DOMAIN, +) class JewishCalendarEntity(Entity): @@ -31,7 +22,8 @@ class JewishCalendarEntity(Entity): def __init__( self, - config_entry: JewishCalendarConfigEntry, + config_entry: ConfigEntry, + data: dict[str, Any], description: EntityDescription, ) -> None: """Initialize a Jewish Calendar entity.""" @@ -40,10 +32,10 @@ class JewishCalendarEntity(Entity): self._attr_device_info = DeviceInfo( entry_type=DeviceEntryType.SERVICE, identifiers={(DOMAIN, config_entry.entry_id)}, + name=config_entry.title, ) - data = config_entry.runtime_data - self._location = data.location - self._hebrew = data.language == "hebrew" - self._candle_lighting_offset = data.candle_lighting_offset - self._havdalah_offset = data.havdalah_offset - self._diaspora = data.diaspora + self._location = data[CONF_LOCATION] + self._hebrew = data[CONF_LANGUAGE] == "hebrew" + self._candle_lighting_offset = data[CONF_CANDLE_LIGHT_MINUTES] + self._havdalah_offset = data[CONF_HAVDALAH_OFFSET_MINUTES] + self._diaspora = data[CONF_DIASPORA] diff --git a/homeassistant/components/jewish_calendar/sensor.py b/homeassistant/components/jewish_calendar/sensor.py index c32647af07c..87b4375b8b2 100644 --- a/homeassistant/components/jewish_calendar/sensor.py +++ b/homeassistant/components/jewish_calendar/sensor.py @@ -14,13 +14,15 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import SUN_EVENT_SUNSET, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sun import get_astral_event_date import homeassistant.util.dt as dt_util -from .entity import JewishCalendarConfigEntry, JewishCalendarEntity +from .const import DOMAIN +from .entity import JewishCalendarEntity _LOGGER = logging.getLogger(__name__) @@ -167,15 +169,17 @@ TIME_SENSORS: tuple[SensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: JewishCalendarConfigEntry, + config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Jewish calendar sensors .""" + entry = hass.data[DOMAIN][config_entry.entry_id] sensors = [ - JewishCalendarSensor(config_entry, description) for description in INFO_SENSORS + JewishCalendarSensor(config_entry, entry, description) + for description in INFO_SENSORS ] sensors.extend( - JewishCalendarTimeSensor(config_entry, description) + JewishCalendarTimeSensor(config_entry, entry, description) for description in TIME_SENSORS ) @@ -189,11 +193,12 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): def __init__( self, - config_entry: JewishCalendarConfigEntry, + config_entry: ConfigEntry, + data: dict[str, Any], description: SensorEntityDescription, ) -> None: """Initialize the Jewish calendar sensor.""" - super().__init__(config_entry, description) + super().__init__(config_entry, data, description) self._attrs: dict[str, str] = {} async def async_update(self) -> None: diff --git a/homeassistant/components/kostal_plenticore/sensor.py b/homeassistant/components/kostal_plenticore/sensor.py index 67de34f2fce..fbbfb03fb3e 100644 --- a/homeassistant/components/kostal_plenticore/sensor.py +++ b/homeassistant/components/kostal_plenticore/sensor.py @@ -17,7 +17,6 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, - EntityCategory, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, @@ -748,15 +747,6 @@ SENSOR_PROCESS_DATA = [ state_class=SensorStateClass.TOTAL_INCREASING, formatter="format_energy", ), - PlenticoreSensorEntityDescription( - module_id="scb:event", - key="Event:ActiveErrorCnt", - name="Active Alarms", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - icon="mdi:alert", - formatter="format_round", - ), PlenticoreSensorEntityDescription( module_id="_virt_", key="pv_P", diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index da513bc8cff..82a91c0003f 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -2,12 +2,12 @@ import logging +from lmcloud.client_bluetooth import LaMarzoccoBluetoothClient +from lmcloud.client_cloud import LaMarzoccoCloudClient +from lmcloud.client_local import LaMarzoccoLocalClient +from lmcloud.const import BT_MODEL_PREFIXES, FirmwareType +from lmcloud.exceptions import AuthFail, RequestNotSuccessful from packaging import version -from pylamarzocco.client_bluetooth import LaMarzoccoBluetoothClient -from pylamarzocco.client_cloud import LaMarzoccoCloudClient -from pylamarzocco.client_local import LaMarzoccoLocalClient -from pylamarzocco.const import BT_MODEL_PREFIXES, FirmwareType -from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.components.bluetooth import async_discovered_service_info from homeassistant.config_entries import ConfigEntry diff --git a/homeassistant/components/lamarzocco/binary_sensor.py b/homeassistant/components/lamarzocco/binary_sensor.py index 444e4d0723b..c48453214bd 100644 --- a/homeassistant/components/lamarzocco/binary_sensor.py +++ b/homeassistant/components/lamarzocco/binary_sensor.py @@ -3,7 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass -from pylamarzocco.models import LaMarzoccoMachineConfig +from lmcloud.models import LaMarzoccoMachineConfig from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, diff --git a/homeassistant/components/lamarzocco/button.py b/homeassistant/components/lamarzocco/button.py index ae79e21897f..60374a85e1e 100644 --- a/homeassistant/components/lamarzocco/button.py +++ b/homeassistant/components/lamarzocco/button.py @@ -1,11 +1,11 @@ """Button platform for La Marzocco espresso machines.""" -import asyncio from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.exceptions import RequestNotSuccessful +from lmcloud.lm_machine import LaMarzoccoMachine from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.core import HomeAssistant @@ -13,11 +13,9 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN -from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator +from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription -BACKFLUSH_ENABLED_DURATION = 15 - @dataclass(frozen=True, kw_only=True) class LaMarzoccoButtonEntityDescription( @@ -26,25 +24,14 @@ class LaMarzoccoButtonEntityDescription( ): """Description of a La Marzocco button.""" - press_fn: Callable[[LaMarzoccoUpdateCoordinator], Coroutine[Any, Any, None]] - - -async def async_backflush_and_update(coordinator: LaMarzoccoUpdateCoordinator) -> None: - """Press backflush button.""" - await coordinator.device.start_backflush() - # lib will set state optimistically - coordinator.async_set_updated_data(None) - # backflush is enabled for 15 seconds - # then turns off automatically - await asyncio.sleep(BACKFLUSH_ENABLED_DURATION + 1) - await coordinator.async_request_refresh() + press_fn: Callable[[LaMarzoccoMachine], Coroutine[Any, Any, None]] ENTITIES: tuple[LaMarzoccoButtonEntityDescription, ...] = ( LaMarzoccoButtonEntityDescription( key="start_backflush", translation_key="start_backflush", - press_fn=async_backflush_and_update, + press_fn=lambda machine: machine.start_backflush(), ), ) @@ -72,7 +59,7 @@ class LaMarzoccoButtonEntity(LaMarzoccoEntity, ButtonEntity): async def async_press(self) -> None: """Press button.""" try: - await self.entity_description.press_fn(self.coordinator) + await self.entity_description.press_fn(self.coordinator.device) except RequestNotSuccessful as exc: raise HomeAssistantError( translation_domain=DOMAIN, @@ -81,3 +68,4 @@ class LaMarzoccoButtonEntity(LaMarzoccoEntity, ButtonEntity): "key": self.entity_description.key, }, ) from exc + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/lamarzocco/calendar.py b/homeassistant/components/lamarzocco/calendar.py index 0ec9b55a9a1..3d8b2474c94 100644 --- a/homeassistant/components/lamarzocco/calendar.py +++ b/homeassistant/components/lamarzocco/calendar.py @@ -3,7 +3,7 @@ from collections.abc import Iterator from datetime import datetime, timedelta -from pylamarzocco.models import LaMarzoccoWakeUpSleepEntry +from lmcloud.models import LaMarzoccoWakeUpSleepEntry from homeassistant.components.calendar import CalendarEntity, CalendarEvent from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/lamarzocco/config_flow.py b/homeassistant/components/lamarzocco/config_flow.py index 04e705edbdc..4fadd3a9a32 100644 --- a/homeassistant/components/lamarzocco/config_flow.py +++ b/homeassistant/components/lamarzocco/config_flow.py @@ -6,10 +6,10 @@ from collections.abc import Mapping import logging from typing import Any -from pylamarzocco.client_cloud import LaMarzoccoCloudClient -from pylamarzocco.client_local import LaMarzoccoLocalClient -from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful -from pylamarzocco.models import LaMarzoccoDeviceInfo +from lmcloud.client_cloud import LaMarzoccoCloudClient +from lmcloud.client_local import LaMarzoccoLocalClient +from lmcloud.exceptions import AuthFail, RequestNotSuccessful +from lmcloud.models import LaMarzoccoDeviceInfo import voluptuous as vol from homeassistant.components.bluetooth import ( diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index 05fee98c599..e2ff8791a05 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -8,11 +8,11 @@ import logging from time import time from typing import Any -from pylamarzocco.client_bluetooth import LaMarzoccoBluetoothClient -from pylamarzocco.client_cloud import LaMarzoccoCloudClient -from pylamarzocco.client_local import LaMarzoccoLocalClient -from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful -from pylamarzocco.lm_machine import LaMarzoccoMachine +from lmcloud.client_bluetooth import LaMarzoccoBluetoothClient +from lmcloud.client_cloud import LaMarzoccoCloudClient +from lmcloud.client_local import LaMarzoccoLocalClient +from lmcloud.exceptions import AuthFail, RequestNotSuccessful +from lmcloud.lm_machine import LaMarzoccoMachine from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MODEL, CONF_NAME, EVENT_HOMEASSISTANT_STOP diff --git a/homeassistant/components/lamarzocco/diagnostics.py b/homeassistant/components/lamarzocco/diagnostics.py index 43ae51ee192..edce6a349aa 100644 --- a/homeassistant/components/lamarzocco/diagnostics.py +++ b/homeassistant/components/lamarzocco/diagnostics.py @@ -5,7 +5,7 @@ from __future__ import annotations from dataclasses import asdict from typing import Any, TypedDict -from pylamarzocco.const import FirmwareType +from lmcloud.const import FirmwareType from homeassistant.components.diagnostics import async_redact_data from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/lamarzocco/entity.py b/homeassistant/components/lamarzocco/entity.py index 1ea84302a17..f7e6ff9e2b8 100644 --- a/homeassistant/components/lamarzocco/entity.py +++ b/homeassistant/components/lamarzocco/entity.py @@ -3,8 +3,8 @@ from collections.abc import Callable from dataclasses import dataclass -from pylamarzocco.const import FirmwareType -from pylamarzocco.lm_machine import LaMarzoccoMachine +from lmcloud.const import FirmwareType +from lmcloud.lm_machine import LaMarzoccoMachine from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 6b226051118..bfe0d34a9e4 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -32,6 +32,6 @@ "documentation": "https://www.home-assistant.io/integrations/lamarzocco", "integration_type": "device", "iot_class": "cloud_polling", - "loggers": ["pylamarzocco"], - "requirements": ["pylamarzocco==1.2.3"] + "loggers": ["lmcloud"], + "requirements": ["lmcloud==1.2.3"] } diff --git a/homeassistant/components/lamarzocco/number.py b/homeassistant/components/lamarzocco/number.py index 825c5d6deb0..df75147e7e1 100644 --- a/homeassistant/components/lamarzocco/number.py +++ b/homeassistant/components/lamarzocco/number.py @@ -4,16 +4,16 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from pylamarzocco.const import ( +from lmcloud.const import ( KEYS_PER_MODEL, BoilerType, MachineModel, PhysicalKey, PrebrewMode, ) -from pylamarzocco.exceptions import RequestNotSuccessful -from pylamarzocco.lm_machine import LaMarzoccoMachine -from pylamarzocco.models import LaMarzoccoMachineConfig +from lmcloud.exceptions import RequestNotSuccessful +from lmcloud.lm_machine import LaMarzoccoMachine +from lmcloud.models import LaMarzoccoMachineConfig from homeassistant.components.number import ( NumberDeviceClass, diff --git a/homeassistant/components/lamarzocco/select.py b/homeassistant/components/lamarzocco/select.py index 1889ba38d6b..1958fa6f210 100644 --- a/homeassistant/components/lamarzocco/select.py +++ b/homeassistant/components/lamarzocco/select.py @@ -4,10 +4,10 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel -from pylamarzocco.exceptions import RequestNotSuccessful -from pylamarzocco.lm_machine import LaMarzoccoMachine -from pylamarzocco.models import LaMarzoccoMachineConfig +from lmcloud.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from lmcloud.exceptions import RequestNotSuccessful +from lmcloud.lm_machine import LaMarzoccoMachine +from lmcloud.models import LaMarzoccoMachineConfig from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.const import EntityCategory diff --git a/homeassistant/components/lamarzocco/sensor.py b/homeassistant/components/lamarzocco/sensor.py index 04b095e798c..ca8a118c1ee 100644 --- a/homeassistant/components/lamarzocco/sensor.py +++ b/homeassistant/components/lamarzocco/sensor.py @@ -3,8 +3,8 @@ from collections.abc import Callable from dataclasses import dataclass -from pylamarzocco.const import BoilerType, MachineModel, PhysicalKey -from pylamarzocco.lm_machine import LaMarzoccoMachine +from lmcloud.const import BoilerType, MachineModel, PhysicalKey +from lmcloud.lm_machine import LaMarzoccoMachine from homeassistant.components.sensor import ( SensorDeviceClass, diff --git a/homeassistant/components/lamarzocco/switch.py b/homeassistant/components/lamarzocco/switch.py index f7690885f05..a611424418f 100644 --- a/homeassistant/components/lamarzocco/switch.py +++ b/homeassistant/components/lamarzocco/switch.py @@ -4,10 +4,10 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from pylamarzocco.const import BoilerType -from pylamarzocco.exceptions import RequestNotSuccessful -from pylamarzocco.lm_machine import LaMarzoccoMachine -from pylamarzocco.models import LaMarzoccoMachineConfig +from lmcloud.const import BoilerType +from lmcloud.exceptions import RequestNotSuccessful +from lmcloud.lm_machine import LaMarzoccoMachine +from lmcloud.models import LaMarzoccoMachineConfig from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory diff --git a/homeassistant/components/lamarzocco/update.py b/homeassistant/components/lamarzocco/update.py index 371ff679bae..61f436a7d7f 100644 --- a/homeassistant/components/lamarzocco/update.py +++ b/homeassistant/components/lamarzocco/update.py @@ -3,8 +3,8 @@ from dataclasses import dataclass from typing import Any -from pylamarzocco.const import FirmwareType -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.const import FirmwareType +from lmcloud.exceptions import RequestNotSuccessful from homeassistant.components.update import ( UpdateDeviceClass, diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index eb26ef48e4e..5995e06efcc 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -8,7 +8,7 @@ import logging import pypck from pypck.connection import PchkConnectionManager -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( CONF_DEVICE_ID, CONF_DOMAIN, @@ -20,7 +20,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.typing import ConfigType from .const import ( @@ -39,29 +39,40 @@ from .helpers import ( InputType, async_update_config_entry, generate_unique_id, + import_lcn_config, register_lcn_address_devices, register_lcn_host_device, ) -from .services import register_services +from .schemas import CONFIG_SCHEMA # noqa: F401 +from .services import SERVICES from .websocket import register_panel_and_ws_api _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the LCN component.""" - hass.data.setdefault(DOMAIN, {}) + if DOMAIN not in config: + return True - await register_services(hass) - await register_panel_and_ws_api(hass) + # initialize a config_flow for all LCN configurations read from + # configuration.yaml + config_entries_data = import_lcn_config(config[DOMAIN]) + for config_entry_data in config_entries_data: + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=config_entry_data, + ) + ) return True async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up a connection to PCHK host from a config entry.""" + hass.data.setdefault(DOMAIN, {}) if config_entry.entry_id in hass.data[DOMAIN]: return False @@ -121,6 +132,15 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b ) lcn_connection.register_for_inputs(input_received) + # register service calls + for service_name, service in SERVICES: + if not hass.services.has_service(DOMAIN, service_name): + hass.services.async_register( + DOMAIN, service_name, service(hass).async_call_service, service.schema + ) + + await register_panel_and_ws_api(hass) + return True @@ -171,6 +191,11 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> host = hass.data[DOMAIN].pop(config_entry.entry_id) await host[CONNECTION].async_close() + # unregister service calls + if unload_ok and not hass.data[DOMAIN]: # check if this is the last entry to unload + for service_name, _ in SERVICES: + hass.services.async_remove(DOMAIN, service_name) + return unload_ok diff --git a/homeassistant/components/lcn/binary_sensor.py b/homeassistant/components/lcn/binary_sensor.py index d0ce4815f19..1e29a36da4e 100644 --- a/homeassistant/components/lcn/binary_sensor.py +++ b/homeassistant/components/lcn/binary_sensor.py @@ -15,11 +15,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DOMAIN, CONF_ENTITIES, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import ( - IssueSeverity, - async_create_issue, - async_delete_issue, -) +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType from .const import ( @@ -119,9 +115,6 @@ class LcnRegulatorLockSensor(LcnEntity, BinarySensorEntity): await self.device_connection.cancel_status_request_handler( self.setpoint_variable ) - async_delete_issue( - self.hass, DOMAIN, f"deprecated_binary_sensor_{self.entity_id}" - ) def input_received(self, input_obj: InputType) -> None: """Set sensor value when LCN input object (command) is received.""" @@ -208,9 +201,6 @@ class LcnLockKeysSensor(LcnEntity, BinarySensorEntity): await super().async_will_remove_from_hass() if not self.device_connection.is_group: await self.device_connection.cancel_status_request_handler(self.source) - async_delete_issue( - self.hass, DOMAIN, f"deprecated_binary_sensor_{self.entity_id}" - ) def input_received(self, input_obj: InputType) -> None: """Set sensor value when LCN input object (command) is received.""" diff --git a/homeassistant/components/lcn/config_flow.py b/homeassistant/components/lcn/config_flow.py index 008265e62ae..e78378a61b1 100644 --- a/homeassistant/components/lcn/config_flow.py +++ b/homeassistant/components/lcn/config_flow.py @@ -9,6 +9,7 @@ import pypck import voluptuous as vol from homeassistant import config_entries +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import ( CONF_BASE, CONF_DEVICES, @@ -19,12 +20,14 @@ from homeassistant.const import ( CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType from . import PchkConnectionManager from .const import CONF_ACKNOWLEDGE, CONF_DIM_MODE, CONF_SK_NUM_TRIES, DIM_MODES, DOMAIN +from .helpers import purge_device_registry, purge_entity_registry _LOGGER = logging.getLogger(__name__) @@ -110,6 +113,55 @@ class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): VERSION = 2 MINOR_VERSION = 1 + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + """Import existing configuration from LCN.""" + # validate the imported connection parameters + if error := await validate_connection(import_data): + async_create_issue( + self.hass, + DOMAIN, + error, + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.ERROR, + translation_key=error, + translation_placeholders={ + "url": "/config/integrations/dashboard/add?domain=lcn" + }, + ) + return self.async_abort(reason=error) + + async_create_issue( + self.hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.12.0", + is_fixable=False, + is_persistent=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "LCN", + }, + ) + + # check if we already have a host with the same address configured + if entry := get_config_entry(self.hass, import_data): + entry.source = config_entries.SOURCE_IMPORT + # Cleanup entity and device registry, if we imported from configuration.yaml to + # remove orphans when entities were removed from configuration + purge_entity_registry(self.hass, entry.entry_id, import_data) + purge_device_registry(self.hass, entry.entry_id, import_data) + + self.hass.config_entries.async_update_entry(entry, data=import_data) + return self.async_abort(reason="existing_configuration_updated") + + return self.async_create_entry( + title=f"{import_data[CONF_HOST]}", data=import_data + ) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> config_entries.ConfigFlowResult: diff --git a/homeassistant/components/lcn/helpers.py b/homeassistant/components/lcn/helpers.py index 6a9c63ea212..7da047682ac 100644 --- a/homeassistant/components/lcn/helpers.py +++ b/homeassistant/components/lcn/helpers.py @@ -9,6 +9,7 @@ import re from typing import cast import pypck +import voluptuous as vol from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -18,12 +19,17 @@ from homeassistant.const import ( CONF_DEVICES, CONF_DOMAIN, CONF_ENTITIES, + CONF_HOST, + CONF_IP_ADDRESS, CONF_LIGHTS, CONF_NAME, + CONF_PASSWORD, + CONF_PORT, CONF_RESOURCE, CONF_SENSORS, CONF_SOURCE, CONF_SWITCHES, + CONF_USERNAME, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -31,13 +37,19 @@ from homeassistant.helpers.typing import ConfigType from .const import ( BINSENSOR_PORTS, + CONF_ACKNOWLEDGE, CONF_CLIMATES, + CONF_CONNECTIONS, + CONF_DIM_MODE, + CONF_DOMAIN_DATA, CONF_HARDWARE_SERIAL, CONF_HARDWARE_TYPE, CONF_OUTPUT, CONF_SCENES, + CONF_SK_NUM_TRIES, CONF_SOFTWARE_SERIAL, CONNECTION, + DEFAULT_NAME, DOMAIN, LED_PORTS, LOGICOP_PORTS, @@ -134,6 +146,110 @@ def generate_unique_id( return unique_id +def import_lcn_config(lcn_config: ConfigType) -> list[ConfigType]: + """Convert lcn settings from configuration.yaml to config_entries data. + + Create a list of config_entry data structures like: + + "data": { + "host": "pchk", + "ip_address": "192.168.2.41", + "port": 4114, + "username": "lcn", + "password": "lcn, + "sk_num_tries: 0, + "dim_mode: "STEPS200", + "acknowledge": False, + "devices": [ + { + "address": (0, 7, False) + "name": "", + "hardware_serial": -1, + "software_serial": -1, + "hardware_type": -1 + }, ... + ], + "entities": [ + { + "address": (0, 7, False) + "name": "Light_Output1", + "resource": "output1", + "domain": "light", + "domain_data": { + "output": "OUTPUT1", + "dimmable": True, + "transition": 5000.0 + } + }, ... + ] + } + """ + data = {} + for connection in lcn_config[CONF_CONNECTIONS]: + host = { + CONF_HOST: connection[CONF_NAME], + CONF_IP_ADDRESS: connection[CONF_HOST], + CONF_PORT: connection[CONF_PORT], + CONF_USERNAME: connection[CONF_USERNAME], + CONF_PASSWORD: connection[CONF_PASSWORD], + CONF_SK_NUM_TRIES: connection[CONF_SK_NUM_TRIES], + CONF_DIM_MODE: connection[CONF_DIM_MODE], + CONF_ACKNOWLEDGE: False, + CONF_DEVICES: [], + CONF_ENTITIES: [], + } + data[connection[CONF_NAME]] = host + + for confkey, domain_config in lcn_config.items(): + if confkey == CONF_CONNECTIONS: + continue + domain = DOMAIN_LOOKUP[confkey] + # loop over entities in configuration.yaml + for domain_data in domain_config: + # remove name and address from domain_data + entity_name = domain_data.pop(CONF_NAME) + address, host_name = domain_data.pop(CONF_ADDRESS) + + if host_name is None: + host_name = DEFAULT_NAME + + # check if we have a new device config + for device_config in data[host_name][CONF_DEVICES]: + if address == device_config[CONF_ADDRESS]: + break + else: # create new device_config + device_config = { + CONF_ADDRESS: address, + CONF_NAME: "", + CONF_HARDWARE_SERIAL: -1, + CONF_SOFTWARE_SERIAL: -1, + CONF_HARDWARE_TYPE: -1, + } + + data[host_name][CONF_DEVICES].append(device_config) + + # insert entity config + resource = get_resource(domain, domain_data).lower() + for entity_config in data[host_name][CONF_ENTITIES]: + if ( + address == entity_config[CONF_ADDRESS] + and resource == entity_config[CONF_RESOURCE] + and domain == entity_config[CONF_DOMAIN] + ): + break + else: # create new entity_config + entity_config = { + CONF_ADDRESS: address, + CONF_NAME: entity_name, + CONF_RESOURCE: resource, + CONF_DOMAIN: domain, + CONF_DOMAIN_DATA: domain_data.copy(), + } + data[host_name][CONF_ENTITIES].append(entity_config) + + return list(data.values()) + + def purge_entity_registry( hass: HomeAssistant, entry_id: str, imported_entry_data: ConfigType ) -> None: @@ -320,6 +436,26 @@ def get_device_config( return None +def has_unique_host_names(hosts: list[ConfigType]) -> list[ConfigType]: + """Validate that all connection names are unique. + + Use 'pchk' as default connection_name (or add a numeric suffix if + pchk' is already in use. + """ + suffix = 0 + for host in hosts: + if host.get(CONF_NAME) is None: + if suffix == 0: + host[CONF_NAME] = DEFAULT_NAME + else: + host[CONF_NAME] = f"{DEFAULT_NAME}{suffix:d}" + suffix += 1 + + schema = vol.Schema(vol.Unique()) + schema([host.get(CONF_NAME) for host in hosts]) + return hosts + + def is_address(value: str) -> tuple[AddressType, str]: """Validate the given address string. diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index 695a35df871..6ce41a2d08d 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.24", "lcn-frontend==0.2.2"] + "requirements": ["pypck==0.7.24", "lcn-frontend==0.2.1"] } diff --git a/homeassistant/components/lcn/schemas.py b/homeassistant/components/lcn/schemas.py index c9c91b9843d..3b4d2333970 100644 --- a/homeassistant/components/lcn/schemas.py +++ b/homeassistant/components/lcn/schemas.py @@ -4,9 +4,20 @@ import voluptuous as vol from homeassistant.components.climate import DEFAULT_MAX_TEMP, DEFAULT_MIN_TEMP from homeassistant.const import ( + CONF_ADDRESS, + CONF_BINARY_SENSORS, + CONF_COVERS, + CONF_HOST, + CONF_LIGHTS, + CONF_NAME, + CONF_PASSWORD, + CONF_PORT, CONF_SCENE, + CONF_SENSORS, CONF_SOURCE, + CONF_SWITCHES, CONF_UNIT_OF_MEASUREMENT, + CONF_USERNAME, UnitOfTemperature, ) import homeassistant.helpers.config_validation as cv @@ -14,6 +25,9 @@ from homeassistant.helpers.typing import VolDictType from .const import ( BINSENSOR_PORTS, + CONF_CLIMATES, + CONF_CONNECTIONS, + CONF_DIM_MODE, CONF_DIMMABLE, CONF_LOCKABLE, CONF_MAX_TEMP, @@ -23,8 +37,12 @@ from .const import ( CONF_OUTPUTS, CONF_REGISTER, CONF_REVERSE_TIME, + CONF_SCENES, CONF_SETPOINT, + CONF_SK_NUM_TRIES, CONF_TRANSITION, + DIM_MODES, + DOMAIN, KEYS, LED_PORTS, LOGICOP_PORTS, @@ -38,6 +56,7 @@ from .const import ( VAR_UNITS, VARIABLES, ) +from .helpers import has_unique_host_names, is_address ADDRESS_SCHEMA = vol.Coerce(tuple) @@ -111,3 +130,72 @@ DOMAIN_DATA_SWITCH: VolDictType = { vol.In(OUTPUT_PORTS + RELAY_PORTS + SETPOINTS + KEYS), ), } + + +# +# Configuration +# + +DOMAIN_DATA_BASE: VolDictType = { + vol.Required(CONF_NAME): cv.string, + vol.Required(CONF_ADDRESS): is_address, +} + +BINARY_SENSORS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_BINARY_SENSOR}) + +CLIMATES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_CLIMATE}) + +COVERS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_COVER}) + +LIGHTS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_LIGHT}) + +SCENES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SCENE}) + +SENSORS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SENSOR}) + +SWITCHES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SWITCH}) + +CONNECTION_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): cv.string, + vol.Required(CONF_PORT): cv.port, + vol.Required(CONF_USERNAME): cv.string, + vol.Required(CONF_PASSWORD): cv.string, + vol.Optional(CONF_SK_NUM_TRIES, default=0): cv.positive_int, + vol.Optional(CONF_DIM_MODE, default="steps50"): vol.All( + vol.Upper, vol.In(DIM_MODES) + ), + vol.Optional(CONF_NAME): cv.string, + } +) + +CONFIG_SCHEMA = vol.Schema( + vol.All( + cv.deprecated(DOMAIN), + { + DOMAIN: vol.Schema( + { + vol.Required(CONF_CONNECTIONS): vol.All( + cv.ensure_list, has_unique_host_names, [CONNECTION_SCHEMA] + ), + vol.Optional(CONF_BINARY_SENSORS): vol.All( + cv.ensure_list, [BINARY_SENSORS_SCHEMA] + ), + vol.Optional(CONF_CLIMATES): vol.All( + cv.ensure_list, [CLIMATES_SCHEMA] + ), + vol.Optional(CONF_COVERS): vol.All(cv.ensure_list, [COVERS_SCHEMA]), + vol.Optional(CONF_LIGHTS): vol.All(cv.ensure_list, [LIGHTS_SCHEMA]), + vol.Optional(CONF_SCENES): vol.All(cv.ensure_list, [SCENES_SCHEMA]), + vol.Optional(CONF_SENSORS): vol.All( + cv.ensure_list, [SENSORS_SCHEMA] + ), + vol.Optional(CONF_SWITCHES): vol.All( + cv.ensure_list, [SWITCHES_SCHEMA] + ), + }, + ) + }, + ), + extra=vol.ALLOW_EXTRA, +) diff --git a/homeassistant/components/lcn/services.py b/homeassistant/components/lcn/services.py index 92f5863c47e..611a7353bcd 100644 --- a/homeassistant/components/lcn/services.py +++ b/homeassistant/components/lcn/services.py @@ -429,11 +429,3 @@ SERVICES = ( (LcnService.DYN_TEXT, DynText), (LcnService.PCK, Pck), ) - - -async def register_services(hass: HomeAssistant) -> None: - """Register services for LCN.""" - for service_name, service in SERVICES: - hass.services.async_register( - DOMAIN, service_name, service(hass).async_call_service, service.schema - ) diff --git a/homeassistant/components/lcn/strings.json b/homeassistant/components/lcn/strings.json index 088a3654500..ae0b1b01f9a 100644 --- a/homeassistant/components/lcn/strings.json +++ b/homeassistant/components/lcn/strings.json @@ -63,6 +63,18 @@ } }, "issues": { + "authentication_error": { + "title": "Authentication failed.", + "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure username and password are correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + }, + "license_error": { + "title": "Maximum number of connections was reached.", + "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure sufficient PCHK licenses are registered and restart Home Assistant.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + }, + "connection_refused": { + "title": "Unable to connect to PCHK.", + "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure the connection (IP and port) to the LCN bus coupler is correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + }, "deprecated_regulatorlock_sensor": { "title": "Deprecated LCN regulator lock binary sensor", "description": "Your LCN regulator lock binary sensor entity `{entity}` is beeing used in automations or scripts. A regulator lock switch entity is available and should be used going forward.\n\nPlease adjust your automations or scripts to fix this issue." diff --git a/homeassistant/components/lektrico/__init__.py b/homeassistant/components/lektrico/__init__.py index 475b6132541..c309bb42ece 100644 --- a/homeassistant/components/lektrico/__init__.py +++ b/homeassistant/components/lektrico/__init__.py @@ -12,7 +12,6 @@ from .coordinator import LektricoDeviceDataUpdateCoordinator # List the platforms that charger supports. CHARGERS_PLATFORMS: list[Platform] = [ - Platform.BINARY_SENSOR, Platform.BUTTON, Platform.NUMBER, Platform.SENSOR, diff --git a/homeassistant/components/lektrico/binary_sensor.py b/homeassistant/components/lektrico/binary_sensor.py deleted file mode 100644 index d0a3e39690c..00000000000 --- a/homeassistant/components/lektrico/binary_sensor.py +++ /dev/null @@ -1,139 +0,0 @@ -"""Support for Lektrico binary sensors entities.""" - -from collections.abc import Callable -from dataclasses import dataclass -from typing import Any - -from homeassistant.components.binary_sensor import ( - BinarySensorDeviceClass, - BinarySensorEntity, - BinarySensorEntityDescription, -) -from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_TYPE, EntityCategory -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator -from .entity import LektricoEntity - - -@dataclass(frozen=True, kw_only=True) -class LektricoBinarySensorEntityDescription(BinarySensorEntityDescription): - """Describes Lektrico binary sensor entity.""" - - value_fn: Callable[[dict[str, Any]], bool] - - -BINARY_SENSORS: tuple[LektricoBinarySensorEntityDescription, ...] = ( - LektricoBinarySensorEntityDescription( - key="state_e_activated", - translation_key="state_e_activated", - entity_category=EntityCategory.DIAGNOSTIC, - device_class=BinarySensorDeviceClass.PROBLEM, - value_fn=lambda data: bool(data["state_e_activated"]), - ), - LektricoBinarySensorEntityDescription( - key="overtemp", - translation_key="overtemp", - entity_category=EntityCategory.DIAGNOSTIC, - device_class=BinarySensorDeviceClass.PROBLEM, - value_fn=lambda data: bool(data["overtemp"]), - ), - LektricoBinarySensorEntityDescription( - key="critical_temp", - translation_key="critical_temp", - entity_category=EntityCategory.DIAGNOSTIC, - device_class=BinarySensorDeviceClass.PROBLEM, - value_fn=lambda data: bool(data["critical_temp"]), - ), - LektricoBinarySensorEntityDescription( - key="overcurrent", - translation_key="overcurrent", - entity_category=EntityCategory.DIAGNOSTIC, - device_class=BinarySensorDeviceClass.PROBLEM, - value_fn=lambda data: bool(data["overcurrent"]), - ), - LektricoBinarySensorEntityDescription( - key="meter_fault", - translation_key="meter_fault", - entity_category=EntityCategory.DIAGNOSTIC, - device_class=BinarySensorDeviceClass.PROBLEM, - value_fn=lambda data: bool(data["meter_fault"]), - ), - LektricoBinarySensorEntityDescription( - key="undervoltage", - translation_key="undervoltage", - entity_category=EntityCategory.DIAGNOSTIC, - device_class=BinarySensorDeviceClass.PROBLEM, - value_fn=lambda data: bool(data["undervoltage_error"]), - ), - LektricoBinarySensorEntityDescription( - key="overvoltage", - translation_key="overvoltage", - entity_category=EntityCategory.DIAGNOSTIC, - device_class=BinarySensorDeviceClass.PROBLEM, - value_fn=lambda data: bool(data["overvoltage_error"]), - ), - LektricoBinarySensorEntityDescription( - key="rcd_error", - translation_key="rcd_error", - entity_category=EntityCategory.DIAGNOSTIC, - device_class=BinarySensorDeviceClass.PROBLEM, - value_fn=lambda data: bool(data["rcd_error"]), - ), - LektricoBinarySensorEntityDescription( - key="cp_diode_failure", - translation_key="cp_diode_failure", - entity_category=EntityCategory.DIAGNOSTIC, - device_class=BinarySensorDeviceClass.PROBLEM, - value_fn=lambda data: bool(data["cp_diode_failure"]), - ), - LektricoBinarySensorEntityDescription( - key="contactor_failure", - translation_key="contactor_failure", - entity_category=EntityCategory.DIAGNOSTIC, - device_class=BinarySensorDeviceClass.PROBLEM, - value_fn=lambda data: bool(data["contactor_failure"]), - ), -) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: LektricoConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Lektrico binary sensor entities based on a config entry.""" - coordinator = entry.runtime_data - - async_add_entities( - LektricoBinarySensor( - description, - coordinator, - f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", - ) - for description in BINARY_SENSORS - ) - - -class LektricoBinarySensor(LektricoEntity, BinarySensorEntity): - """Defines a Lektrico binary sensor entity.""" - - entity_description: LektricoBinarySensorEntityDescription - - def __init__( - self, - description: LektricoBinarySensorEntityDescription, - coordinator: LektricoDeviceDataUpdateCoordinator, - device_name: str, - ) -> None: - """Initialize Lektrico binary sensor.""" - super().__init__(coordinator, device_name) - self.entity_description = description - self._coordinator = coordinator - self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" - - @property - def is_on(self) -> bool: - """Return the state of the binary sensor.""" - return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/lektrico/strings.json b/homeassistant/components/lektrico/strings.json index e24700c9b09..e6dc7b9eb46 100644 --- a/homeassistant/components/lektrico/strings.json +++ b/homeassistant/components/lektrico/strings.json @@ -22,38 +22,6 @@ } }, "entity": { - "binary_sensor": { - "state_e_activated": { - "name": "Ev error" - }, - "overtemp": { - "name": "Thermal throttling" - }, - "critical_temp": { - "name": "Overheating" - }, - "overcurrent": { - "name": "Overcurrent" - }, - "meter_fault": { - "name": "Metering error" - }, - "undervoltage": { - "name": "Undervoltage" - }, - "overvoltage": { - "name": "Overvoltage" - }, - "rcd_error": { - "name": "Rcd error" - }, - "cp_diode_failure": { - "name": "Ev diode short" - }, - "contactor_failure": { - "name": "Relay contacts welded" - } - }, "button": { "charge_start": { "name": "Charge start" diff --git a/homeassistant/components/lg_thinq/fan.py b/homeassistant/components/lg_thinq/fan.py index edcadf2598a..187cc74b3eb 100644 --- a/homeassistant/components/lg_thinq/fan.py +++ b/homeassistant/components/lg_thinq/fan.py @@ -72,11 +72,8 @@ class ThinQFanEntity(ThinQEntity, FanEntity): super().__init__(coordinator, entity_description, property_id) self._ordered_named_fan_speeds = [] - self._attr_supported_features = ( - FanEntityFeature.SET_SPEED - | FanEntityFeature.TURN_ON - | FanEntityFeature.TURN_OFF - ) + self._attr_supported_features |= FanEntityFeature.SET_SPEED + if (fan_modes := self.data.fan_modes) is not None: self._attr_speed_count = len(fan_modes) if self.speed_count == 4: @@ -101,7 +98,7 @@ class ThinQFanEntity(ThinQEntity, FanEntity): self._attr_percentage = 0 _LOGGER.debug( - "[%s:%s] update status: %s -> %s (percentage=%s)", + "[%s:%s] update status: %s -> %s (percntage=%s)", self.coordinator.device_name, self.property_id, self.data.is_on, @@ -123,7 +120,7 @@ class ThinQFanEntity(ThinQEntity, FanEntity): return _LOGGER.debug( - "[%s:%s] async_set_percentage. percentage=%s, value=%s", + "[%s:%s] async_set_percentage. percntage=%s, value=%s", self.coordinator.device_name, self.property_id, percentage, diff --git a/homeassistant/components/linkplay/diagnostics.py b/homeassistant/components/linkplay/diagnostics.py deleted file mode 100644 index cfc1346aff4..00000000000 --- a/homeassistant/components/linkplay/diagnostics.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Diagnostics support for Linkplay.""" - -from __future__ import annotations - -from typing import Any - -from homeassistant.core import HomeAssistant - -from . import LinkPlayConfigEntry - - -async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: LinkPlayConfigEntry -) -> dict[str, Any]: - """Return diagnostics for a config entry.""" - data = entry.runtime_data - return {"device_info": data.bridge.to_dict()} diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index e74d22b8207..f2b2e2da00c 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["linkplay"], - "requirements": ["python-linkplay==0.0.20"], + "requirements": ["python-linkplay==0.0.17"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index c29c2978522..36834610c04 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -9,7 +9,7 @@ from typing import Any, Concatenate from linkplay.bridge import LinkPlayBridge from linkplay.consts import EqualizerMode, LoopMode, PlayingMode, PlayingStatus from linkplay.controller import LinkPlayController, LinkPlayMultiroom -from linkplay.exceptions import LinkPlayRequestException +from linkplay.exceptions import LinkPlayException, LinkPlayRequestException import voluptuous as vol from homeassistant.components import media_source @@ -69,8 +69,6 @@ SOURCE_MAP: dict[PlayingMode, str] = { PlayingMode.FM: "FM Radio", PlayingMode.RCA: "RCA", PlayingMode.UDISK: "USB", - PlayingMode.SPOTIFY: "Spotify", - PlayingMode.TIDAL: "Tidal", PlayingMode.FOLLOWER: "Follower", } @@ -203,8 +201,9 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): try: await self._bridge.player.update_status() self._update_properties() - except LinkPlayRequestException: + except LinkPlayException: self._attr_available = False + raise @exception_wrap async def async_select_source(self, source: str) -> None: @@ -293,15 +292,7 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): @exception_wrap async def async_play_preset(self, preset_number: int) -> None: """Play preset number.""" - try: - await self._bridge.player.play_preset(preset_number) - except ValueError as err: - raise HomeAssistantError(err) from err - - @exception_wrap - async def async_media_seek(self, position: float) -> None: - """Seek to a position.""" - await self._bridge.player.seek(round(position)) + await self._bridge.player.play_preset(preset_number) @exception_wrap async def async_join_players(self, group_members: list[str]) -> None: @@ -388,9 +379,9 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): ) self._attr_source = SOURCE_MAP.get(self._bridge.player.play_mode, "other") - self._attr_media_position = self._bridge.player.current_position_in_seconds + self._attr_media_position = self._bridge.player.current_position / 1000 self._attr_media_position_updated_at = utcnow() - self._attr_media_duration = self._bridge.player.total_length_in_seconds + self._attr_media_duration = self._bridge.player.total_length / 1000 self._attr_media_artist = self._bridge.player.artist self._attr_media_title = self._bridge.player.title self._attr_media_album_name = self._bridge.player.album diff --git a/homeassistant/components/linkplay/services.yaml b/homeassistant/components/linkplay/services.yaml index 0d7335a28c8..20bc47be7a7 100644 --- a/homeassistant/components/linkplay/services.yaml +++ b/homeassistant/components/linkplay/services.yaml @@ -11,4 +11,5 @@ play_preset: selector: number: min: 1 + max: 10 mode: box diff --git a/homeassistant/components/matrix/manifest.json b/homeassistant/components/matrix/manifest.json index 43c151c7c23..520bd0550cc 100644 --- a/homeassistant/components/matrix/manifest.json +++ b/homeassistant/components/matrix/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/matrix", "iot_class": "cloud_push", "loggers": ["matrix_client"], - "requirements": ["matrix-nio==0.25.2", "Pillow==11.0.0"] + "requirements": ["matrix-nio==0.25.2", "Pillow==10.4.0"] } diff --git a/homeassistant/components/media_source/__init__.py b/homeassistant/components/media_source/__init__.py index 3ea8f581245..604f9b7cc88 100644 --- a/homeassistant/components/media_source/__init__.py +++ b/homeassistant/components/media_source/__init__.py @@ -18,7 +18,7 @@ from homeassistant.components.media_player import ( from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.frame import report_usage +from homeassistant.helpers.frame import report from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) @@ -156,7 +156,7 @@ async def async_resolve_media( raise Unresolvable("Media Source not loaded") if target_media_player is UNDEFINED: - report_usage( + report( "calls media_source.async_resolve_media without passing an entity_id", exclude_integrations={DOMAIN}, ) diff --git a/homeassistant/components/mill/manifest.json b/homeassistant/components/mill/manifest.json index 6316eb72096..16e7bf552ba 100644 --- a/homeassistant/components/mill/manifest.json +++ b/homeassistant/components/mill/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mill", "iot_class": "local_polling", "loggers": ["mill", "mill_local"], - "requirements": ["millheater==0.12.2", "mill-local==0.3.0"] + "requirements": ["millheater==0.11.8", "mill-local==0.3.0"] } diff --git a/homeassistant/components/modern_forms/config_flow.py b/homeassistant/components/modern_forms/config_flow.py index 33e814efb51..dee08736234 100644 --- a/homeassistant/components/modern_forms/config_flow.py +++ b/homeassistant/components/modern_forms/config_flow.py @@ -9,13 +9,11 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_MAC +from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN -USER_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) - class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a ModernForms config flow.""" @@ -57,21 +55,17 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None, prepare: bool = False ) -> ConfigFlowResult: """Config flow handler for ModernForms.""" + source = self.context["source"] + # Request user input, unless we are preparing discovery flow if user_input is None: user_input = {} if not prepare: - if self.source == SOURCE_ZEROCONF: - return self.async_show_form( - step_id="zeroconf_confirm", - description_placeholders={"name": self.name}, - ) - return self.async_show_form( - step_id="user", - data_schema=USER_SCHEMA, - ) + if source == SOURCE_ZEROCONF: + return self._show_confirm_dialog() + return self._show_setup_form() - if self.source == SOURCE_ZEROCONF: + if source == SOURCE_ZEROCONF: user_input[CONF_HOST] = self.host user_input[CONF_MAC] = self.mac @@ -81,21 +75,18 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): try: device = await device.update() except ModernFormsConnectionError: - if self.source == SOURCE_ZEROCONF: + if source == SOURCE_ZEROCONF: return self.async_abort(reason="cannot_connect") - return self.async_show_form( - step_id="user", - data_schema=USER_SCHEMA, - errors={"base": "cannot_connect"}, - ) + return self._show_setup_form({"base": "cannot_connect"}) user_input[CONF_MAC] = device.info.mac_address + user_input[CONF_NAME] = device.info.device_name # Check if already configured await self.async_set_unique_id(user_input[CONF_MAC]) self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]}) title = device.info.device_name - if self.source == SOURCE_ZEROCONF: + if source == SOURCE_ZEROCONF: title = self.name if prepare: @@ -105,3 +96,19 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): title=title, data={CONF_HOST: user_input[CONF_HOST], CONF_MAC: user_input[CONF_MAC]}, ) + + def _show_setup_form(self, errors: dict | None = None) -> ConfigFlowResult: + """Show the setup form to the user.""" + return self.async_show_form( + step_id="user", + data_schema=vol.Schema({vol.Required(CONF_HOST): str}), + errors=errors or {}, + ) + + def _show_confirm_dialog(self, errors: dict | None = None) -> ConfigFlowResult: + """Show the confirm dialog to the user.""" + return self.async_show_form( + step_id="zeroconf_confirm", + description_placeholders={"name": self.name}, + errors=errors or {}, + ) diff --git a/homeassistant/components/music_assistant/manifest.json b/homeassistant/components/music_assistant/manifest.json index 65e6652407f..23401f30abc 100644 --- a/homeassistant/components/music_assistant/manifest.json +++ b/homeassistant/components/music_assistant/manifest.json @@ -4,8 +4,9 @@ "after_dependencies": ["media_source", "media_player"], "codeowners": ["@music-assistant"], "config_flow": true, - "documentation": "https://www.home-assistant.io/integrations/music_assistant", + "documentation": "https://music-assistant.io", "iot_class": "local_push", + "issue_tracker": "https://github.com/music-assistant/hass-music-assistant/issues", "loggers": ["music_assistant"], "requirements": ["music-assistant-client==1.0.5"], "zeroconf": ["_mass._tcp.local."] diff --git a/homeassistant/components/myuplink/binary_sensor.py b/homeassistant/components/myuplink/binary_sensor.py index 953859986d0..0ba6ac7b078 100644 --- a/homeassistant/components/myuplink/binary_sensor.py +++ b/homeassistant/components/myuplink/binary_sensor.py @@ -12,12 +12,11 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator -from .const import F_SERIES from .entity import MyUplinkEntity, MyUplinkSystemEntity -from .helpers import find_matching_platform, transform_model_series +from .helpers import find_matching_platform CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, BinarySensorEntityDescription]] = { - F_SERIES: { + "F730": { "43161": BinarySensorEntityDescription( key="elect_add", translation_key="elect_add", @@ -51,7 +50,6 @@ def get_description(device_point: DevicePoint) -> BinarySensorEntityDescription 2. Default to None """ prefix, _, _ = device_point.category.partition(" ") - prefix = transform_model_series(prefix) return CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get(device_point.parameter_id) diff --git a/homeassistant/components/myuplink/const.py b/homeassistant/components/myuplink/const.py index 6fd354a21ec..3541a8078c3 100644 --- a/homeassistant/components/myuplink/const.py +++ b/homeassistant/components/myuplink/const.py @@ -6,5 +6,3 @@ API_ENDPOINT = "https://api.myuplink.com" OAUTH2_AUTHORIZE = "https://api.myuplink.com/oauth/authorize" OAUTH2_TOKEN = "https://api.myuplink.com/oauth/token" OAUTH2_SCOPES = ["WRITESYSTEM", "READSYSTEM", "offline_access"] - -F_SERIES = "f-series" diff --git a/homeassistant/components/myuplink/helpers.py b/homeassistant/components/myuplink/helpers.py index de5486d8dea..eb4881c410e 100644 --- a/homeassistant/components/myuplink/helpers.py +++ b/homeassistant/components/myuplink/helpers.py @@ -6,8 +6,6 @@ from homeassistant.components.number import NumberEntityDescription from homeassistant.components.sensor import SensorEntityDescription from homeassistant.const import Platform -from .const import F_SERIES - def find_matching_platform( device_point: DevicePoint, @@ -88,9 +86,8 @@ PARAMETER_ID_TO_EXCLUDE_F730 = ( "47941", "47975", "48009", + "48042", "48072", - "48442", - "49909", "50113", ) @@ -113,7 +110,7 @@ def skip_entity(model: str, device_point: DevicePoint) -> bool: ): return False return True - if model.lower().startswith("f"): + if "F730" in model: # Entity names containing weekdays are used for advanced scheduling in the # heat pump and should not be exposed in the integration if any(d in device_point.parameter_name.lower() for d in WEEKDAYS): @@ -121,10 +118,3 @@ def skip_entity(model: str, device_point: DevicePoint) -> bool: if device_point.parameter_id in PARAMETER_ID_TO_EXCLUDE_F730: return True return False - - -def transform_model_series(prefix: str) -> str: - """Remap all F-series models.""" - if prefix.lower().startswith("f"): - return F_SERIES - return prefix diff --git a/homeassistant/components/myuplink/number.py b/homeassistant/components/myuplink/number.py index b05ab5d46c9..0c7da0c716f 100644 --- a/homeassistant/components/myuplink/number.py +++ b/homeassistant/components/myuplink/number.py @@ -10,9 +10,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator -from .const import F_SERIES from .entity import MyUplinkEntity -from .helpers import find_matching_platform, skip_entity, transform_model_series +from .helpers import find_matching_platform, skip_entity DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, NumberEntityDescription] = { "DM": NumberEntityDescription( @@ -23,7 +22,7 @@ DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, NumberEntityDescription] = { } CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, NumberEntityDescription]] = { - F_SERIES: { + "F730": { "40940": NumberEntityDescription( key="degree_minutes", translation_key="degree_minutes", @@ -49,7 +48,6 @@ def get_description(device_point: DevicePoint) -> NumberEntityDescription | None 3. Default to None """ prefix, _, _ = device_point.category.partition(" ") - prefix = transform_model_series(prefix) description = CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get( device_point.parameter_id ) diff --git a/homeassistant/components/myuplink/sensor.py b/homeassistant/components/myuplink/sensor.py index ef827fc1fb1..7feb20bc093 100644 --- a/homeassistant/components/myuplink/sensor.py +++ b/homeassistant/components/myuplink/sensor.py @@ -25,9 +25,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from . import MyUplinkConfigEntry, MyUplinkDataCoordinator -from .const import F_SERIES from .entity import MyUplinkEntity -from .helpers import find_matching_platform, skip_entity, transform_model_series +from .helpers import find_matching_platform, skip_entity DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, SensorEntityDescription] = { "°C": SensorEntityDescription( @@ -140,7 +139,7 @@ DEVICE_POINT_UNIT_DESCRIPTIONS: dict[str, SensorEntityDescription] = { MARKER_FOR_UNKNOWN_VALUE = -32768 CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, SensorEntityDescription]] = { - F_SERIES: { + "F730": { "43108": SensorEntityDescription( key="fan_mode", translation_key="fan_mode", @@ -201,7 +200,6 @@ def get_description(device_point: DevicePoint) -> SensorEntityDescription | None """ description = None prefix, _, _ = device_point.category.partition(" ") - prefix = transform_model_series(prefix) description = CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get( device_point.parameter_id ) diff --git a/homeassistant/components/myuplink/switch.py b/homeassistant/components/myuplink/switch.py index 75ba6bd7819..5c47c8294fe 100644 --- a/homeassistant/components/myuplink/switch.py +++ b/homeassistant/components/myuplink/switch.py @@ -12,12 +12,11 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator -from .const import F_SERIES from .entity import MyUplinkEntity -from .helpers import find_matching_platform, skip_entity, transform_model_series +from .helpers import find_matching_platform, skip_entity CATEGORY_BASED_DESCRIPTIONS: dict[str, dict[str, SwitchEntityDescription]] = { - F_SERIES: { + "F730": { "50004": SwitchEntityDescription( key="temporary_lux", translation_key="temporary_lux", @@ -48,7 +47,6 @@ def get_description(device_point: DevicePoint) -> SwitchEntityDescription | None 2. Default to None """ prefix, _, _ = device_point.category.partition(" ") - prefix = transform_model_series(prefix) return CATEGORY_BASED_DESCRIPTIONS.get(prefix, {}).get(device_point.parameter_id) diff --git a/homeassistant/components/nasweb/__init__.py b/homeassistant/components/nasweb/__init__.py deleted file mode 100644 index 1992cc41c75..00000000000 --- a/homeassistant/components/nasweb/__init__.py +++ /dev/null @@ -1,125 +0,0 @@ -"""The NASweb integration.""" - -from __future__ import annotations - -import logging - -from webio_api import WebioAPI -from webio_api.api_client import AuthError - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.network import NoURLAvailableError -from homeassistant.util.hass_dict import HassKey - -from .const import DOMAIN, MANUFACTURER, SUPPORT_EMAIL -from .coordinator import NASwebCoordinator -from .nasweb_data import NASwebData - -PLATFORMS: list[Platform] = [Platform.SWITCH] - -NASWEB_CONFIG_URL = "https://{host}/page" - -_LOGGER = logging.getLogger(__name__) -type NASwebConfigEntry = ConfigEntry[NASwebCoordinator] -DATA_NASWEB: HassKey[NASwebData] = HassKey(DOMAIN) - - -async def async_setup_entry(hass: HomeAssistant, entry: NASwebConfigEntry) -> bool: - """Set up NASweb from a config entry.""" - - if DATA_NASWEB not in hass.data: - data = NASwebData() - data.initialize(hass) - hass.data[DATA_NASWEB] = data - nasweb_data = hass.data[DATA_NASWEB] - - webio_api = WebioAPI( - entry.data[CONF_HOST], entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD] - ) - try: - if not await webio_api.check_connection(): - raise ConfigEntryNotReady( - f"[{entry.data[CONF_HOST]}] Check connection failed" - ) - if not await webio_api.refresh_device_info(): - _LOGGER.error("[%s] Refresh device info failed", entry.data[CONF_HOST]) - raise ConfigEntryError( - translation_key="config_entry_error_internal_error", - translation_placeholders={"support_email": SUPPORT_EMAIL}, - ) - webio_serial = webio_api.get_serial_number() - if webio_serial is None: - _LOGGER.error("[%s] Serial number not available", entry.data[CONF_HOST]) - raise ConfigEntryError( - translation_key="config_entry_error_internal_error", - translation_placeholders={"support_email": SUPPORT_EMAIL}, - ) - if entry.unique_id != webio_serial: - _LOGGER.error( - "[%s] Serial number doesn't match config entry", entry.data[CONF_HOST] - ) - raise ConfigEntryError(translation_key="config_entry_error_serial_mismatch") - - coordinator = NASwebCoordinator( - hass, webio_api, name=f"NASweb[{webio_api.get_name()}]" - ) - entry.runtime_data = coordinator - nasweb_data.notify_coordinator.add_coordinator(webio_serial, entry.runtime_data) - - webhook_url = nasweb_data.get_webhook_url(hass) - if not await webio_api.status_subscription(webhook_url, True): - _LOGGER.error("Failed to subscribe for status updates from webio") - raise ConfigEntryError( - translation_key="config_entry_error_internal_error", - translation_placeholders={"support_email": SUPPORT_EMAIL}, - ) - if not await nasweb_data.notify_coordinator.check_connection(webio_serial): - _LOGGER.error("Did not receive status from device") - raise ConfigEntryError( - translation_key="config_entry_error_no_status_update", - translation_placeholders={"support_email": SUPPORT_EMAIL}, - ) - except TimeoutError as error: - raise ConfigEntryNotReady( - f"[{entry.data[CONF_HOST]}] Check connection reached timeout" - ) from error - except AuthError as error: - raise ConfigEntryError( - translation_key="config_entry_error_invalid_authentication" - ) from error - except NoURLAvailableError as error: - raise ConfigEntryError( - translation_key="config_entry_error_missing_internal_url" - ) from error - - device_registry = dr.async_get(hass) - device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - identifiers={(DOMAIN, webio_serial)}, - manufacturer=MANUFACTURER, - name=webio_api.get_name(), - configuration_url=NASWEB_CONFIG_URL.format(host=entry.data[CONF_HOST]), - ) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: NASwebConfigEntry) -> bool: - """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - nasweb_data = hass.data[DATA_NASWEB] - coordinator = entry.runtime_data - serial = entry.unique_id - if serial is not None: - nasweb_data.notify_coordinator.remove_coordinator(serial) - if nasweb_data.can_be_deinitialized(): - nasweb_data.deinitialize(hass) - hass.data.pop(DATA_NASWEB) - webhook_url = nasweb_data.get_webhook_url(hass) - await coordinator.webio_api.status_subscription(webhook_url, False) - - return unload_ok diff --git a/homeassistant/components/nasweb/config_flow.py b/homeassistant/components/nasweb/config_flow.py deleted file mode 100644 index 3a9ad3f7d49..00000000000 --- a/homeassistant/components/nasweb/config_flow.py +++ /dev/null @@ -1,137 +0,0 @@ -"""Config flow for NASweb integration.""" - -from __future__ import annotations - -import logging -from typing import Any - -import voluptuous as vol -from webio_api import WebioAPI -from webio_api.api_client import AuthError - -from homeassistant import config_entries -from homeassistant.config_entries import ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_UNIQUE_ID, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import AbortFlow -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.network import NoURLAvailableError - -from .const import DOMAIN -from .coordinator import NASwebCoordinator -from .nasweb_data import NASwebData - -NASWEB_SCHEMA_IMG_URL = ( - "https://home-assistant.io/images/integrations/nasweb/nasweb_scheme.png" -) - -_LOGGER = logging.getLogger(__name__) - -STEP_USER_DATA_SCHEMA = vol.Schema( - { - vol.Required(CONF_HOST): str, - vol.Required(CONF_USERNAME): str, - vol.Required(CONF_PASSWORD): str, - } -) - - -async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: - """Validate user-provided data.""" - webio_api = WebioAPI(data[CONF_HOST], data[CONF_USERNAME], data[CONF_PASSWORD]) - if not await webio_api.check_connection(): - raise CannotConnect - try: - await webio_api.refresh_device_info() - except AuthError as e: - raise InvalidAuth from e - - nasweb_data = NASwebData() - nasweb_data.initialize(hass) - try: - webio_serial = webio_api.get_serial_number() - if webio_serial is None: - raise MissingNASwebData("Device serial number is not available") - - coordinator = NASwebCoordinator(hass, webio_api) - webhook_url = nasweb_data.get_webhook_url(hass) - nasweb_data.notify_coordinator.add_coordinator(webio_serial, coordinator) - subscription = await webio_api.status_subscription(webhook_url, True) - if not subscription: - nasweb_data.notify_coordinator.remove_coordinator(webio_serial) - raise MissingNASwebData( - "Failed to subscribe for status updates from device" - ) - - result = await nasweb_data.notify_coordinator.check_connection(webio_serial) - nasweb_data.notify_coordinator.remove_coordinator(webio_serial) - if not result: - if subscription: - await webio_api.status_subscription(webhook_url, False) - raise MissingNASwebStatus("Did not receive status from device") - - name = webio_api.get_name() - finally: - nasweb_data.deinitialize(hass) - return {"title": name, CONF_UNIQUE_ID: webio_serial} - - -class NASwebConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): - """Handle a config flow for NASweb.""" - - VERSION = 1 - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the initial step.""" - errors: dict[str, str] = {} - if user_input is not None: - try: - info = await validate_input(self.hass, user_input) - await self.async_set_unique_id(info[CONF_UNIQUE_ID]) - self._abort_if_unique_id_configured() - except CannotConnect: - errors["base"] = "cannot_connect" - except InvalidAuth: - errors["base"] = "invalid_auth" - except NoURLAvailableError: - errors["base"] = "missing_internal_url" - except MissingNASwebData: - errors["base"] = "missing_nasweb_data" - except MissingNASwebStatus: - errors["base"] = "missing_status" - except AbortFlow: - raise - except Exception: # pylint: disable=broad-except - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" - else: - return self.async_create_entry(title=info["title"], data=user_input) - - return self.async_show_form( - step_id="user", - data_schema=self.add_suggested_values_to_schema( - STEP_USER_DATA_SCHEMA, user_input - ), - errors=errors, - description_placeholders={ - "nasweb_schema_img": '
', - }, - ) - - -class CannotConnect(HomeAssistantError): - """Error to indicate we cannot connect.""" - - -class InvalidAuth(HomeAssistantError): - """Error to indicate there is invalid auth.""" - - -class MissingNASwebData(HomeAssistantError): - """Error to indicate missing information from NASweb.""" - - -class MissingNASwebStatus(HomeAssistantError): - """Error to indicate there was no status received from NASweb.""" diff --git a/homeassistant/components/nasweb/const.py b/homeassistant/components/nasweb/const.py deleted file mode 100644 index ec750c90c8c..00000000000 --- a/homeassistant/components/nasweb/const.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Constants for the NASweb integration.""" - -DOMAIN = "nasweb" -MANUFACTURER = "chomtech.pl" -STATUS_UPDATE_MAX_TIME_INTERVAL = 60 -SUPPORT_EMAIL = "support@chomtech.eu" -WEBHOOK_URL = "{internal_url}/api/webhook/{webhook_id}" diff --git a/homeassistant/components/nasweb/coordinator.py b/homeassistant/components/nasweb/coordinator.py deleted file mode 100644 index 90dca0f3022..00000000000 --- a/homeassistant/components/nasweb/coordinator.py +++ /dev/null @@ -1,191 +0,0 @@ -"""Message routing coordinators for handling NASweb push notifications.""" - -from __future__ import annotations - -import asyncio -from collections.abc import Callable -from datetime import datetime, timedelta -import logging -import time -from typing import Any - -from aiohttp.web import Request, Response -from webio_api import WebioAPI -from webio_api.const import KEY_DEVICE_SERIAL, KEY_OUTPUTS, KEY_TYPE, TYPE_STATUS_UPDATE - -from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback -from homeassistant.helpers import event -from homeassistant.helpers.update_coordinator import BaseDataUpdateCoordinatorProtocol - -from .const import STATUS_UPDATE_MAX_TIME_INTERVAL - -_LOGGER = logging.getLogger(__name__) - - -class NotificationCoordinator: - """Coordinator redirecting push notifications for this integration to appropriate NASwebCoordinator.""" - - def __init__(self) -> None: - """Initialize coordinator.""" - self._coordinators: dict[str, NASwebCoordinator] = {} - - def add_coordinator(self, serial: str, coordinator: NASwebCoordinator) -> None: - """Add NASwebCoordinator to possible notification targets.""" - self._coordinators[serial] = coordinator - _LOGGER.debug("Added NASwebCoordinator for NASweb[%s]", serial) - - def remove_coordinator(self, serial: str) -> None: - """Remove NASwebCoordinator from possible notification targets.""" - self._coordinators.pop(serial) - _LOGGER.debug("Removed NASwebCoordinator for NASweb[%s]", serial) - - def has_coordinators(self) -> bool: - """Check if there is any registered coordinator for push notifications.""" - return len(self._coordinators) > 0 - - async def check_connection(self, serial: str) -> bool: - """Wait for first status update to confirm connection with NASweb.""" - nasweb_coordinator = self._coordinators.get(serial) - if nasweb_coordinator is None: - _LOGGER.error("Cannot check connection. No device match serial number") - return False - for counter in range(10): - _LOGGER.debug("Checking connection with: %s (%s)", serial, counter) - if nasweb_coordinator.is_connection_confirmed(): - return True - await asyncio.sleep(1) - return False - - async def handle_webhook_request( - self, hass: HomeAssistant, webhook_id: str, request: Request - ) -> Response | None: - """Handle webhook request from Push API.""" - if not self.has_coordinators(): - return None - notification = await request.json() - serial = notification.get(KEY_DEVICE_SERIAL, None) - _LOGGER.debug("Received push: %s", notification) - if serial is None: - _LOGGER.warning("Received notification without nasweb identifier") - return None - nasweb_coordinator = self._coordinators.get(serial) - if nasweb_coordinator is None: - _LOGGER.warning("Received notification for not registered nasweb") - return None - await nasweb_coordinator.handle_push_notification(notification) - return Response(body='{"response": "ok"}', content_type="application/json") - - -class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol): - """Coordinator managing status of single NASweb device. - - Since status updates are managed through push notifications, this class schedules - periodic checks to ensure that devices are marked unavailable if updates - haven't been received for a prolonged period. - """ - - def __init__( - self, hass: HomeAssistant, webio_api: WebioAPI, name: str = "NASweb[default]" - ) -> None: - """Initialize NASweb coordinator.""" - self._hass = hass - self.name = name - self.webio_api = webio_api - self._last_update: float | None = None - job_name = f"NASwebCoordinator[{name}]" - self._job = HassJob(self._handle_max_update_interval, job_name) - self._unsub_last_update_check: CALLBACK_TYPE | None = None - self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {} - data: dict[str, Any] = {} - data[KEY_OUTPUTS] = self.webio_api.outputs - self.async_set_updated_data(data) - - def is_connection_confirmed(self) -> bool: - """Check whether coordinator received status update from NASweb.""" - return self._last_update is not None - - @callback - def async_add_listener( - self, update_callback: CALLBACK_TYPE, context: Any = None - ) -> Callable[[], None]: - """Listen for data updates.""" - schedule_update_check = not self._listeners - - @callback - def remove_listener() -> None: - """Remove update listener.""" - self._listeners.pop(remove_listener) - if not self._listeners: - self._async_unsub_last_update_check() - - self._listeners[remove_listener] = (update_callback, context) - # This is the first listener, set up interval. - if schedule_update_check: - self._schedule_last_update_check() - return remove_listener - - @callback - def async_set_updated_data(self, data: dict[str, Any]) -> None: - """Update data and notify listeners.""" - self.data = data - self.last_update = self._hass.loop.time() - _LOGGER.debug("Updated %s data", self.name) - if self._listeners: - self._schedule_last_update_check() - self.async_update_listeners() - - @callback - def async_update_listeners(self) -> None: - """Update all registered listeners.""" - for update_callback, _ in list(self._listeners.values()): - update_callback() - - async def _handle_max_update_interval(self, now: datetime) -> None: - """Handle max update interval occurrence. - - This method is called when `STATUS_UPDATE_MAX_TIME_INTERVAL` has passed without - receiving a status update. It only needs to trigger state update of entities - which then change their state accordingly. - """ - self._unsub_last_update_check = None - if self._listeners: - self.async_update_listeners() - - def _schedule_last_update_check(self) -> None: - """Schedule a task to trigger entities state update after `STATUS_UPDATE_MAX_TIME_INTERVAL`. - - This method schedules a task (`_handle_max_update_interval`) to be executed after - `STATUS_UPDATE_MAX_TIME_INTERVAL` seconds without status update, which enables entities - to change their state to unavailable. After each status update this task is rescheduled. - """ - self._async_unsub_last_update_check() - now = self._hass.loop.time() - next_check = ( - now + timedelta(seconds=STATUS_UPDATE_MAX_TIME_INTERVAL).total_seconds() - ) - self._unsub_last_update_check = event.async_call_at( - self._hass, - self._job, - next_check, - ) - - def _async_unsub_last_update_check(self) -> None: - """Cancel any scheduled update check call.""" - if self._unsub_last_update_check: - self._unsub_last_update_check() - self._unsub_last_update_check = None - - async def handle_push_notification(self, notification: dict) -> None: - """Handle incoming push notification from NASweb.""" - msg_type = notification.get(KEY_TYPE) - _LOGGER.debug("Received push notification: %s", msg_type) - - if msg_type == TYPE_STATUS_UPDATE: - await self.process_status_update(notification) - self._last_update = time.time() - - async def process_status_update(self, new_status: dict) -> None: - """Process status update from NASweb.""" - self.webio_api.update_device_status(new_status) - new_data = {KEY_OUTPUTS: self.webio_api.outputs} - self.async_set_updated_data(new_data) diff --git a/homeassistant/components/nasweb/manifest.json b/homeassistant/components/nasweb/manifest.json deleted file mode 100644 index e7e06419dad..00000000000 --- a/homeassistant/components/nasweb/manifest.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "domain": "nasweb", - "name": "NASweb", - "codeowners": ["@nasWebio"], - "config_flow": true, - "dependencies": ["webhook"], - "documentation": "https://www.home-assistant.io/integrations/nasweb", - "homekit": {}, - "integration_type": "hub", - "iot_class": "local_push", - "requirements": ["webio-api==0.1.8"], - "ssdp": [], - "zeroconf": [] -} diff --git a/homeassistant/components/nasweb/nasweb_data.py b/homeassistant/components/nasweb/nasweb_data.py deleted file mode 100644 index 4f6a37e6cc7..00000000000 --- a/homeassistant/components/nasweb/nasweb_data.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Dataclass storing integration data in hass.data[DOMAIN].""" - -from dataclasses import dataclass, field -import logging - -from aiohttp.hdrs import METH_POST - -from homeassistant.components.webhook import ( - async_generate_id, - async_register as webhook_register, - async_unregister as webhook_unregister, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers.network import get_url - -from .const import DOMAIN, WEBHOOK_URL -from .coordinator import NotificationCoordinator - -_LOGGER = logging.getLogger(__name__) - - -@dataclass -class NASwebData: - """Class storing integration data.""" - - notify_coordinator: NotificationCoordinator = field( - default_factory=NotificationCoordinator - ) - webhook_id = "" - - def is_initialized(self) -> bool: - """Return True if instance was initialized and is ready for use.""" - return bool(self.webhook_id) - - def can_be_deinitialized(self) -> bool: - """Return whether this instance can be deinitialized.""" - return not self.notify_coordinator.has_coordinators() - - def initialize(self, hass: HomeAssistant) -> None: - """Initialize NASwebData instance.""" - if self.is_initialized(): - return - new_webhook_id = async_generate_id() - webhook_register( - hass, - DOMAIN, - "NASweb", - new_webhook_id, - self.notify_coordinator.handle_webhook_request, - allowed_methods=[METH_POST], - ) - self.webhook_id = new_webhook_id - _LOGGER.debug("Registered webhook: %s", self.webhook_id) - - def deinitialize(self, hass: HomeAssistant) -> None: - """Deinitialize NASwebData instance.""" - if not self.is_initialized(): - return - webhook_unregister(hass, self.webhook_id) - - def get_webhook_url(self, hass: HomeAssistant) -> str: - """Return webhook url for Push API.""" - hass_url = get_url(hass, allow_external=False) - return WEBHOOK_URL.format(internal_url=hass_url, webhook_id=self.webhook_id) diff --git a/homeassistant/components/nasweb/strings.json b/homeassistant/components/nasweb/strings.json deleted file mode 100644 index b8af8cd54db..00000000000 --- a/homeassistant/components/nasweb/strings.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "Add NASweb device", - "description": "{nasweb_schema_img}NASweb combines the functions of a control panel and the ability to manage building automation. The device monitors the flow of information from sensors and programmable switches and stores settings, definitions and configured actions.", - "data": { - "host": "[%key:common::config_flow::data::host%]", - "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]" - } - } - }, - "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "missing_internal_url": "Make sure Home Assistant has valid internal url", - "missing_nasweb_data": "Something isn't right with device internal configuration. Try restarting the device and HomeAssistant.", - "missing_status": "Did not received any status updates within the expected time window. Make sure the Home Assistant Internal URL is reachable from the NASweb device.", - "unknown": "[%key:common::config_flow::error::unknown%]" - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" - } - }, - "exceptions": { - "config_entry_error_invalid_authentication": { - "message": "Invalid username/password. Most likely user changed password or was removed. Delete this entry and create new one with correct username/password." - }, - "config_entry_error_internal_error": { - "message": "Something isn't right with device internal configuration. Try restarting the device and HomeAssistant. If the issue persists contact support at {support_email}" - }, - "config_entry_error_no_status_update": { - "message": "Did not received any status updates within the expected time window. Make sure the Home Assistant Internal URL is reachable from the NASweb device. If the issue persists contact support at {support_email}" - }, - "config_entry_error_missing_internal_url": { - "message": "[%key:component::nasweb::config::error::missing_internal_url%]" - }, - "serial_mismatch": { - "message": "Connected to different NASweb device (serial number mismatch)." - } - }, - "entity": { - "switch": { - "switch_output": { - "name": "Relay Switch {index}" - } - } - } -} diff --git a/homeassistant/components/nasweb/switch.py b/homeassistant/components/nasweb/switch.py deleted file mode 100644 index 00e5a21da18..00000000000 --- a/homeassistant/components/nasweb/switch.py +++ /dev/null @@ -1,133 +0,0 @@ -"""Platform for NASweb output.""" - -from __future__ import annotations - -import logging -import time -from typing import Any - -from webio_api import Output as NASwebOutput - -from homeassistant.components.switch import DOMAIN as DOMAIN_SWITCH, SwitchEntity -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback -import homeassistant.helpers.entity_registry as er -from homeassistant.helpers.typing import DiscoveryInfoType -from homeassistant.helpers.update_coordinator import ( - BaseCoordinatorEntity, - BaseDataUpdateCoordinatorProtocol, -) - -from . import NASwebConfigEntry -from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL -from .coordinator import NASwebCoordinator - -OUTPUT_TRANSLATION_KEY = "switch_output" - -_LOGGER = logging.getLogger(__name__) - - -def _get_output(coordinator: NASwebCoordinator, index: int) -> NASwebOutput | None: - for out in coordinator.webio_api.outputs: - if out.index == index: - return out - return None - - -async def async_setup_entry( - hass: HomeAssistant, - config: NASwebConfigEntry, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up switch platform.""" - coordinator = config.runtime_data - current_outputs: set[int] = set() - - @callback - def _check_entities() -> None: - received_outputs = {out.index for out in coordinator.webio_api.outputs} - added = {i for i in received_outputs if i not in current_outputs} - removed = {i for i in current_outputs if i not in received_outputs} - entities_to_add: list[RelaySwitch] = [] - for index in added: - webio_output = _get_output(coordinator, index) - if not isinstance(webio_output, NASwebOutput): - _LOGGER.error("Cannot create RelaySwitch entity without NASwebOutput") - continue - new_output = RelaySwitch(coordinator, webio_output) - entities_to_add.append(new_output) - current_outputs.add(index) - async_add_entities(entities_to_add) - entity_registry = er.async_get(hass) - for index in removed: - unique_id = f"{DOMAIN}.{config.unique_id}.relay_switch.{index}" - if entity_id := entity_registry.async_get_entity_id( - DOMAIN_SWITCH, DOMAIN, unique_id - ): - entity_registry.async_remove(entity_id) - current_outputs.remove(index) - else: - _LOGGER.warning("Failed to remove old output: no entity_id") - - coordinator.async_add_listener(_check_entities) - _check_entities() - - -class RelaySwitch(SwitchEntity, BaseCoordinatorEntity): - """Entity representing NASweb Output.""" - - def __init__( - self, - coordinator: BaseDataUpdateCoordinatorProtocol, - nasweb_output: NASwebOutput, - ) -> None: - """Initialize RelaySwitch.""" - super().__init__(coordinator) - self._output = nasweb_output - self._attr_icon = "mdi:export" - self._attr_has_entity_name = True - self._attr_translation_key = OUTPUT_TRANSLATION_KEY - self._attr_translation_placeholders = {"index": f"{nasweb_output.index:2d}"} - self._attr_unique_id = ( - f"{DOMAIN}.{self._output.webio_serial}.relay_switch.{self._output.index}" - ) - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._output.webio_serial)}, - ) - - async def async_added_to_hass(self) -> None: - """When entity is added to hass.""" - await super().async_added_to_hass() - self._handle_coordinator_update() - - @callback - def _handle_coordinator_update(self) -> None: - """Handle updated data from the coordinator.""" - self._attr_is_on = self._output.state - if ( - self.coordinator.last_update is None - or time.time() - self._output.last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL - ): - self._attr_available = False - else: - self._attr_available = ( - self._output.available if self._output.available is not None else False - ) - self.async_write_ha_state() - - async def async_update(self) -> None: - """Update the entity. - - Only used by the generic entity update service. - Scheduling updates is not necessary, the coordinator takes care of updates via push notifications. - """ - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn On RelaySwitch.""" - await self._output.turn_on() - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn Off RelaySwitch.""" - await self._output.turn_off() diff --git a/homeassistant/components/nest/api.py b/homeassistant/components/nest/api.py index 5c65a70c75d..aa359dcd167 100644 --- a/homeassistant/components/nest/api.py +++ b/homeassistant/components/nest/api.py @@ -114,8 +114,9 @@ async def new_subscriber( implementation, config_entry_oauth2_flow.LocalOAuth2Implementation ): raise TypeError(f"Unexpected auth implementation {implementation}") - if (subscription_name := entry.data.get(CONF_SUBSCRIPTION_NAME)) is None: - subscription_name = entry.data[CONF_SUBSCRIBER_ID] + subscription_name = entry.data.get( + CONF_SUBSCRIPTION_NAME, entry.data[CONF_SUBSCRIBER_ID] + ) auth = AsyncConfigEntryAuth( aiohttp_client.async_get_clientsession(hass), config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation), diff --git a/homeassistant/components/nest/camera.py b/homeassistant/components/nest/camera.py index 0a46d67a3ad..30f96f819c1 100644 --- a/homeassistant/components/nest/camera.py +++ b/homeassistant/components/nest/camera.py @@ -2,9 +2,9 @@ from __future__ import annotations -from abc import ABC +from abc import ABC, abstractmethod import asyncio -from collections.abc import Awaitable, Callable +from collections.abc import Callable import datetime import functools import logging @@ -19,7 +19,6 @@ from google_nest_sdm.camera_traits import ( from google_nest_sdm.device import Device from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.exceptions import ApiException -from webrtc_models import RTCIceCandidate from homeassistant.components.camera import ( Camera, @@ -47,11 +46,6 @@ PLACEHOLDER = Path(__file__).parent / "placeholder.png" # Used to schedule an alarm to refresh the stream before expiration STREAM_EXPIRATION_BUFFER = datetime.timedelta(seconds=30) -# Refresh streams with a bounded interval and backoff on failure -MIN_REFRESH_BACKOFF_INTERVAL = datetime.timedelta(minutes=1) -MAX_REFRESH_BACKOFF_INTERVAL = datetime.timedelta(minutes=10) -BACKOFF_MULTIPLIER = 1.5 - async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback @@ -73,68 +67,6 @@ async def async_setup_entry( async_add_entities(entities) -class StreamRefresh: - """Class that will refresh an expiring stream. - - This class will schedule an alarm for the next expiration time of a stream. - When the alarm fires, it runs the provided `refresh_cb` to extend the - lifetime of the stream and return a new expiration time. - - A simple backoff will be applied when the refresh callback fails. - """ - - def __init__( - self, - hass: HomeAssistant, - expires_at: datetime.datetime, - refresh_cb: Callable[[], Awaitable[datetime.datetime | None]], - ) -> None: - """Initialize StreamRefresh.""" - self._hass = hass - self._unsub: Callable[[], None] | None = None - self._min_refresh_interval = MIN_REFRESH_BACKOFF_INTERVAL - self._refresh_cb = refresh_cb - self._schedule_stream_refresh(expires_at - STREAM_EXPIRATION_BUFFER) - - def unsub(self) -> None: - """Invalidates the stream.""" - if self._unsub: - self._unsub() - - async def _handle_refresh(self, _: datetime.datetime) -> None: - """Alarm that fires to check if the stream should be refreshed.""" - self._unsub = None - try: - expires_at = await self._refresh_cb() - except ApiException as err: - _LOGGER.debug("Failed to refresh stream: %s", err) - # Increase backoff until the max backoff interval is reached - self._min_refresh_interval = min( - self._min_refresh_interval * BACKOFF_MULTIPLIER, - MAX_REFRESH_BACKOFF_INTERVAL, - ) - refresh_time = utcnow() + self._min_refresh_interval - else: - if expires_at is None: - return - self._min_refresh_interval = MIN_REFRESH_BACKOFF_INTERVAL # Reset backoff - # Defend against invalid stream expiration time in the past - refresh_time = max( - expires_at - STREAM_EXPIRATION_BUFFER, - utcnow() + self._min_refresh_interval, - ) - self._schedule_stream_refresh(refresh_time) - - def _schedule_stream_refresh(self, refresh_time: datetime.datetime) -> None: - """Schedules an alarm to refresh any streams before expiration.""" - _LOGGER.debug("Scheduling stream refresh for %s", refresh_time) - self._unsub = async_track_point_in_utc_time( - self._hass, - self._handle_refresh, - refresh_time, - ) - - class NestCameraBaseEntity(Camera, ABC): """Devices that support cameras.""" @@ -154,6 +86,41 @@ class NestCameraBaseEntity(Camera, ABC): self.stream_options[CONF_EXTRA_PART_WAIT_TIME] = 3 # The API "name" field is a unique device identifier. self._attr_unique_id = f"{self._device.name}-camera" + self._stream_refresh_unsub: Callable[[], None] | None = None + + @abstractmethod + def _stream_expires_at(self) -> datetime.datetime | None: + """Next time when a stream expires.""" + + @abstractmethod + async def _async_refresh_stream(self) -> None: + """Refresh any stream to extend expiration time.""" + + def _schedule_stream_refresh(self) -> None: + """Schedules an alarm to refresh any streams before expiration.""" + if self._stream_refresh_unsub is not None: + self._stream_refresh_unsub() + + expiration_time = self._stream_expires_at() + if not expiration_time: + return + refresh_time = expiration_time - STREAM_EXPIRATION_BUFFER + _LOGGER.debug("Scheduled next stream refresh for %s", refresh_time) + + self._stream_refresh_unsub = async_track_point_in_utc_time( + self.hass, + self._handle_stream_refresh, + refresh_time, + ) + + async def _handle_stream_refresh(self, _: datetime.datetime) -> None: + """Alarm that fires to check if the stream should be refreshed.""" + _LOGGER.debug("Examining streams to refresh") + self._stream_refresh_unsub = None + try: + await self._async_refresh_stream() + finally: + self._schedule_stream_refresh() async def async_added_to_hass(self) -> None: """Run when entity is added to register update signal handler.""" @@ -161,6 +128,12 @@ class NestCameraBaseEntity(Camera, ABC): self._device.add_update_listener(self.async_write_ha_state) ) + async def async_will_remove_from_hass(self) -> None: + """Invalidates the RTSP token when unloaded.""" + await super().async_will_remove_from_hass() + if self._stream_refresh_unsub: + self._stream_refresh_unsub() + class NestRTSPEntity(NestCameraBaseEntity): """Nest cameras that use RTSP.""" @@ -173,7 +146,6 @@ class NestRTSPEntity(NestCameraBaseEntity): super().__init__(device) self._create_stream_url_lock = asyncio.Lock() self._rtsp_live_stream_trait = device.traits[CameraLiveStreamTrait.NAME] - self._refresh_unsub: Callable[[], None] | None = None @property def use_stream_for_stills(self) -> bool: @@ -201,21 +173,20 @@ class NestRTSPEntity(NestCameraBaseEntity): ) except ApiException as err: raise HomeAssistantError(f"Nest API error: {err}") from err - refresh = StreamRefresh( - self.hass, - self._rtsp_stream.expires_at, - self._async_refresh_stream, - ) - self._refresh_unsub = refresh.unsub + self._schedule_stream_refresh() assert self._rtsp_stream if self._rtsp_stream.expires_at < utcnow(): _LOGGER.warning("Stream already expired") return self._rtsp_stream.rtsp_stream_url - async def _async_refresh_stream(self) -> datetime.datetime | None: + def _stream_expires_at(self) -> datetime.datetime | None: + """Next time when a stream expires.""" + return self._rtsp_stream.expires_at if self._rtsp_stream else None + + async def _async_refresh_stream(self) -> None: """Refresh stream to extend expiration time.""" if not self._rtsp_stream: - return None + return _LOGGER.debug("Extending RTSP stream") try: self._rtsp_stream = await self._rtsp_stream.extend_rtsp_stream() @@ -226,17 +197,14 @@ class NestRTSPEntity(NestCameraBaseEntity): if self.stream: await self.stream.stop() self.stream = None - return None + return # Update the stream worker with the latest valid url if self.stream: self.stream.update_source(self._rtsp_stream.rtsp_stream_url) - return self._rtsp_stream.expires_at async def async_will_remove_from_hass(self) -> None: """Invalidates the RTSP token when unloaded.""" await super().async_will_remove_from_hass() - if self._refresh_unsub is not None: - self._refresh_unsub() if self._rtsp_stream: try: await self._rtsp_stream.stop_stream() @@ -252,23 +220,34 @@ class NestWebRTCEntity(NestCameraBaseEntity): """Initialize the camera.""" super().__init__(device) self._webrtc_sessions: dict[str, WebRtcStream] = {} - self._refresh_unsub: dict[str, Callable[[], None]] = {} @property def frontend_stream_type(self) -> StreamType | None: """Return the type of stream supported by this camera.""" return StreamType.WEB_RTC - async def _async_refresh_stream(self, session_id: str) -> datetime.datetime | None: - """Refresh stream to extend expiration time.""" - if not (webrtc_stream := self._webrtc_sessions.get(session_id)): + def _stream_expires_at(self) -> datetime.datetime | None: + """Next time when a stream expires.""" + if not self._webrtc_sessions: return None - _LOGGER.debug("Extending WebRTC stream %s", webrtc_stream.media_session_id) - webrtc_stream = await webrtc_stream.extend_stream() - if session_id in self._webrtc_sessions: - self._webrtc_sessions[session_id] = webrtc_stream - return webrtc_stream.expires_at - return None + return min(stream.expires_at for stream in self._webrtc_sessions.values()) + + async def _async_refresh_stream(self) -> None: + """Refresh stream to extend expiration time.""" + now = utcnow() + for webrtc_stream in list(self._webrtc_sessions.values()): + if now < (webrtc_stream.expires_at - STREAM_EXPIRATION_BUFFER): + _LOGGER.debug( + "Stream does not yet expire: %s", webrtc_stream.expires_at + ) + continue + _LOGGER.debug("Extending WebRTC stream %s", webrtc_stream.media_session_id) + try: + webrtc_stream = await webrtc_stream.extend_stream() + except ApiException as err: + _LOGGER.debug("Failed to extend stream: %s", err) + else: + self._webrtc_sessions[webrtc_stream.media_session_id] = webrtc_stream async def async_camera_image( self, width: int | None = None, height: int | None = None @@ -296,18 +275,7 @@ class NestWebRTCEntity(NestCameraBaseEntity): ) self._webrtc_sessions[session_id] = stream send_message(WebRTCAnswer(stream.answer_sdp)) - refresh = StreamRefresh( - self.hass, - stream.expires_at, - functools.partial(self._async_refresh_stream, session_id), - ) - self._refresh_unsub[session_id] = refresh.unsub - - async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate - ) -> None: - """Ignore WebRTC candidates for Nest cloud based cameras.""" - return + self._schedule_stream_refresh() @callback def close_webrtc_session(self, session_id: str) -> None: @@ -316,8 +284,6 @@ class NestWebRTCEntity(NestCameraBaseEntity): _LOGGER.debug( "Closing WebRTC session %s, %s", session_id, stream.media_session_id ) - unsub = self._refresh_unsub.pop(session_id) - unsub() async def stop_stream() -> None: try: diff --git a/homeassistant/components/nest/manifest.json b/homeassistant/components/nest/manifest.json index 44eaeeaf62d..976e870cc83 100644 --- a/homeassistant/components/nest/manifest.json +++ b/homeassistant/components/nest/manifest.json @@ -20,5 +20,5 @@ "iot_class": "cloud_push", "loggers": ["google_nest_sdm"], "quality_scale": "platinum", - "requirements": ["google-nest-sdm==6.1.5"] + "requirements": ["google-nest-sdm==6.1.3"] } diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index 817d7ef9bc9..d3f54e5e668 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["nice_go"], - "requirements": ["nice-go==0.3.10"] + "requirements": ["nice-go==0.3.9"] } diff --git a/homeassistant/components/nightscout/sensor.py b/homeassistant/components/nightscout/sensor.py index 620349ec3c3..92291bdc4f9 100644 --- a/homeassistant/components/nightscout/sensor.py +++ b/homeassistant/components/nightscout/sensor.py @@ -9,9 +9,9 @@ from typing import Any from aiohttp import ClientError from py_nightscout import Api as NightscoutAPI -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity +from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_DATE, UnitOfBloodGlucoseConcentration +from homeassistant.const import ATTR_DATE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -37,10 +37,7 @@ async def async_setup_entry( class NightscoutSensor(SensorEntity): """Implementation of a Nightscout sensor.""" - _attr_device_class = SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION - _attr_native_unit_of_measurement = ( - UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER - ) + _attr_native_unit_of_measurement = "mg/dL" _attr_icon = "mdi:cloud-question" def __init__(self, api: NightscoutAPI, name: str, unique_id: str | None) -> None: diff --git a/homeassistant/components/nina/strings.json b/homeassistant/components/nina/strings.json index 98ea88d8798..9747feaddb7 100644 --- a/homeassistant/components/nina/strings.json +++ b/homeassistant/components/nina/strings.json @@ -38,10 +38,12 @@ } } }, + "abort": { + "unknown": "[%key:common::config_flow::error::unknown%]" + }, "error": { "no_selection": "[%key:component::nina::config::error::no_selection%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } } } diff --git a/homeassistant/components/nmap_tracker/config_flow.py b/homeassistant/components/nmap_tracker/config_flow.py index e05150995aa..36645278bae 100644 --- a/homeassistant/components/nmap_tracker/config_flow.py +++ b/homeassistant/components/nmap_tracker/config_flow.py @@ -141,10 +141,6 @@ async def _async_build_schema_with_user_input( class OptionsFlowHandler(OptionsFlow): """Handle a option flow for homekit.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.options = dict(config_entry.options) - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -215,4 +211,4 @@ class NmapTrackerConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() diff --git a/homeassistant/components/nordpool/__init__.py b/homeassistant/components/nordpool/__init__.py deleted file mode 100644 index b688bf74a37..00000000000 --- a/homeassistant/components/nordpool/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -"""The Nord Pool component.""" - -from __future__ import annotations - -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util - -from .const import PLATFORMS -from .coordinator import NordPoolDataUpdateCoordinator - -type NordPoolConfigEntry = ConfigEntry[NordPoolDataUpdateCoordinator] - - -async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: - """Set up Nord Pool from a config entry.""" - - coordinator = NordPoolDataUpdateCoordinator(hass, entry) - await coordinator.fetch_data(dt_util.utcnow()) - entry.runtime_data = coordinator - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: - """Unload Nord Pool config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/nordpool/config_flow.py b/homeassistant/components/nordpool/config_flow.py deleted file mode 100644 index 1d75d825e47..00000000000 --- a/homeassistant/components/nordpool/config_flow.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Adds config flow for Nord Pool integration.""" - -from __future__ import annotations - -from typing import Any - -from pynordpool import ( - Currency, - NordPoolClient, - NordPoolEmptyResponseError, - NordPoolError, -) -from pynordpool.const import AREAS -import voluptuous as vol - -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_CURRENCY -from homeassistant.core import HomeAssistant -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.selector import ( - SelectOptionDict, - SelectSelector, - SelectSelectorConfig, - SelectSelectorMode, -) -from homeassistant.util import dt as dt_util - -from .const import CONF_AREAS, DEFAULT_NAME, DOMAIN - -SELECT_AREAS = [ - SelectOptionDict(value=area, label=name) for area, name in AREAS.items() -] -SELECT_CURRENCY = [currency.value for currency in Currency] - -DATA_SCHEMA = vol.Schema( - { - vol.Required(CONF_AREAS, default=[]): SelectSelector( - SelectSelectorConfig( - options=SELECT_AREAS, - multiple=True, - mode=SelectSelectorMode.DROPDOWN, - sort=True, - ) - ), - vol.Required(CONF_CURRENCY, default="SEK"): SelectSelector( - SelectSelectorConfig( - options=SELECT_CURRENCY, - multiple=False, - mode=SelectSelectorMode.DROPDOWN, - sort=True, - ) - ), - } -) - - -async def test_api(hass: HomeAssistant, user_input: dict[str, Any]) -> dict[str, str]: - """Test fetch data from Nord Pool.""" - client = NordPoolClient(async_get_clientsession(hass)) - try: - await client.async_get_delivery_period( - dt_util.now(), - Currency(user_input[CONF_CURRENCY]), - user_input[CONF_AREAS], - ) - except NordPoolEmptyResponseError: - return {"base": "no_data"} - except NordPoolError: - return {"base": "cannot_connect"} - - return {} - - -class NordpoolConfigFlow(ConfigFlow, domain=DOMAIN): - """Handle a config flow for Nord Pool integration.""" - - VERSION = 1 - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the initial step.""" - errors: dict[str, str] = {} - if user_input: - errors = await test_api(self.hass, user_input) - if not errors: - return self.async_create_entry( - title=DEFAULT_NAME, - data=user_input, - ) - - return self.async_show_form( - step_id="user", - data_schema=DATA_SCHEMA, - errors=errors, - ) - - async def async_step_reconfigure( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the reconfiguration step.""" - errors: dict[str, str] = {} - if user_input: - errors = await test_api(self.hass, user_input) - reconfigure_entry = self._get_reconfigure_entry() - if not errors: - return self.async_update_reload_and_abort( - reconfigure_entry, data_updates=user_input - ) - - return self.async_show_form( - step_id="reconfigure", - data_schema=DATA_SCHEMA, - errors=errors, - ) diff --git a/homeassistant/components/nordpool/const.py b/homeassistant/components/nordpool/const.py deleted file mode 100644 index 19a978d946c..00000000000 --- a/homeassistant/components/nordpool/const.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Constants for Nord Pool.""" - -import logging - -from homeassistant.const import Platform - -LOGGER = logging.getLogger(__package__) - -DEFAULT_SCAN_INTERVAL = 60 -DOMAIN = "nordpool" -PLATFORMS = [Platform.SENSOR] -DEFAULT_NAME = "Nord Pool" - -CONF_AREAS = "areas" diff --git a/homeassistant/components/nordpool/coordinator.py b/homeassistant/components/nordpool/coordinator.py deleted file mode 100644 index fa4e9ca2548..00000000000 --- a/homeassistant/components/nordpool/coordinator.py +++ /dev/null @@ -1,91 +0,0 @@ -"""DataUpdateCoordinator for the Nord Pool integration.""" - -from __future__ import annotations - -from collections.abc import Callable -from datetime import datetime, timedelta -from typing import TYPE_CHECKING - -from pynordpool import ( - Currency, - DeliveryPeriodData, - NordPoolClient, - NordPoolEmptyResponseError, - NordPoolError, - NordPoolResponseError, -) - -from homeassistant.const import CONF_CURRENCY -from homeassistant.core import HomeAssistant -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.event import async_track_point_in_utc_time -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from homeassistant.util import dt as dt_util - -from .const import CONF_AREAS, DOMAIN, LOGGER - -if TYPE_CHECKING: - from . import NordPoolConfigEntry - - -class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): - """A Nord Pool Data Update Coordinator.""" - - config_entry: NordPoolConfigEntry - - def __init__(self, hass: HomeAssistant, config_entry: NordPoolConfigEntry) -> None: - """Initialize the Nord Pool coordinator.""" - super().__init__( - hass, - LOGGER, - config_entry=config_entry, - name=DOMAIN, - ) - self.client = NordPoolClient(session=async_get_clientsession(hass)) - self.unsub: Callable[[], None] | None = None - - def get_next_interval(self, now: datetime) -> datetime: - """Compute next time an update should occur.""" - next_hour = dt_util.utcnow() + timedelta(hours=1) - next_run = datetime( - next_hour.year, - next_hour.month, - next_hour.day, - next_hour.hour, - tzinfo=dt_util.UTC, - ) - LOGGER.debug("Next update at %s", next_run) - return next_run - - async def async_shutdown(self) -> None: - """Cancel any scheduled call, and ignore new runs.""" - await super().async_shutdown() - if self.unsub: - self.unsub() - self.unsub = None - - async def fetch_data(self, now: datetime) -> None: - """Fetch data from Nord Pool.""" - self.unsub = async_track_point_in_utc_time( - self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow()) - ) - try: - data = await self.client.async_get_delivery_period( - dt_util.now(), - Currency(self.config_entry.data[CONF_CURRENCY]), - self.config_entry.data[CONF_AREAS], - ) - except NordPoolEmptyResponseError as error: - LOGGER.debug("Empty response error: %s", error) - self.async_set_update_error(error) - return - except NordPoolResponseError as error: - LOGGER.debug("Response error: %s", error) - self.async_set_update_error(error) - return - except NordPoolError as error: - LOGGER.debug("Connection error: %s", error) - self.async_set_update_error(error) - return - - self.async_set_updated_data(data) diff --git a/homeassistant/components/nordpool/diagnostics.py b/homeassistant/components/nordpool/diagnostics.py deleted file mode 100644 index 3160c2bfa6d..00000000000 --- a/homeassistant/components/nordpool/diagnostics.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Diagnostics support for Nord Pool.""" - -from __future__ import annotations - -from typing import Any - -from homeassistant.core import HomeAssistant - -from . import NordPoolConfigEntry - - -async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: NordPoolConfigEntry -) -> dict[str, Any]: - """Return diagnostics for Nord Pool config entry.""" - return {"raw": entry.runtime_data.data.raw} diff --git a/homeassistant/components/nordpool/entity.py b/homeassistant/components/nordpool/entity.py deleted file mode 100644 index 32240aad12c..00000000000 --- a/homeassistant/components/nordpool/entity.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Base entity for Nord Pool.""" - -from __future__ import annotations - -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity import EntityDescription -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import DOMAIN -from .coordinator import NordPoolDataUpdateCoordinator - - -class NordpoolBaseEntity(CoordinatorEntity[NordPoolDataUpdateCoordinator]): - """Representation of a Nord Pool base entity.""" - - _attr_has_entity_name = True - - def __init__( - self, - coordinator: NordPoolDataUpdateCoordinator, - entity_description: EntityDescription, - area: str, - ) -> None: - """Initiate Nord Pool base entity.""" - super().__init__(coordinator) - self.entity_description = entity_description - self._attr_unique_id = f"{area}-{entity_description.key}" - self.area = area - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, area)}, - name=f"Nord Pool {area}", - ) diff --git a/homeassistant/components/nordpool/icons.json b/homeassistant/components/nordpool/icons.json deleted file mode 100644 index 85434a2d09b..00000000000 --- a/homeassistant/components/nordpool/icons.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "entity": { - "sensor": { - "updated_at": { - "default": "mdi:clock-outline" - }, - "currency": { - "default": "mdi:currency-usd" - }, - "exchange_rate": { - "default": "mdi:currency-usd" - }, - "current_price": { - "default": "mdi:cash" - }, - "last_price": { - "default": "mdi:cash" - }, - "next_price": { - "default": "mdi:cash" - }, - "block_average": { - "default": "mdi:cash-multiple" - }, - "block_min": { - "default": "mdi:cash-multiple" - }, - "block_max": { - "default": "mdi:cash-multiple" - }, - "block_start_time": { - "default": "mdi:clock-time-twelve-outline" - }, - "block_end_time": { - "default": "mdi:clock-time-two-outline" - }, - "daily_average": { - "default": "mdi:cash-multiple" - } - } - } -} diff --git a/homeassistant/components/nordpool/manifest.json b/homeassistant/components/nordpool/manifest.json deleted file mode 100644 index bf093eb3ee9..00000000000 --- a/homeassistant/components/nordpool/manifest.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "domain": "nordpool", - "name": "Nord Pool", - "codeowners": ["@gjohansson-ST"], - "config_flow": true, - "documentation": "https://www.home-assistant.io/integrations/nordpool", - "integration_type": "hub", - "iot_class": "cloud_polling", - "loggers": ["pynordpool"], - "requirements": ["pynordpool==0.2.2"], - "single_config_entry": true -} diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py deleted file mode 100644 index e7e655a6657..00000000000 --- a/homeassistant/components/nordpool/sensor.py +++ /dev/null @@ -1,328 +0,0 @@ -"""Sensor platform for Nord Pool integration.""" - -from __future__ import annotations - -from collections.abc import Callable -from dataclasses import dataclass -from datetime import datetime, timedelta - -from pynordpool import DeliveryPeriodData - -from homeassistant.components.sensor import ( - EntityCategory, - SensorDeviceClass, - SensorEntity, - SensorEntityDescription, - SensorStateClass, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util import dt as dt_util, slugify - -from . import NordPoolConfigEntry -from .const import LOGGER -from .coordinator import NordPoolDataUpdateCoordinator -from .entity import NordpoolBaseEntity - -PARALLEL_UPDATES = 0 - - -def get_prices(data: DeliveryPeriodData) -> dict[str, tuple[float, float, float]]: - """Return previous, current and next prices. - - Output: {"SE3": (10.0, 10.5, 12.1)} - """ - last_price_entries: dict[str, float] = {} - current_price_entries: dict[str, float] = {} - next_price_entries: dict[str, float] = {} - current_time = dt_util.utcnow() - previous_time = current_time - timedelta(hours=1) - next_time = current_time + timedelta(hours=1) - price_data = data.entries - for entry in price_data: - if entry.start <= current_time <= entry.end: - current_price_entries = entry.entry - if entry.start <= previous_time <= entry.end: - last_price_entries = entry.entry - if entry.start <= next_time <= entry.end: - next_price_entries = entry.entry - - result = {} - for area, price in current_price_entries.items(): - result[area] = (last_price_entries[area], price, next_price_entries[area]) - LOGGER.debug("Prices: %s", result) - return result - - -def get_blockprices( - data: DeliveryPeriodData, -) -> dict[str, dict[str, tuple[datetime, datetime, float, float, float]]]: - """Return average, min and max for block prices. - - Output: {"SE3": {"Off-peak 1": (_datetime_, _datetime_, 9.3, 10.5, 12.1)}} - """ - result: dict[str, dict[str, tuple[datetime, datetime, float, float, float]]] = {} - block_prices = data.block_prices - for entry in block_prices: - for _area in entry.average: - if _area not in result: - result[_area] = {} - result[_area][entry.name] = ( - entry.start, - entry.end, - entry.average[_area]["average"], - entry.average[_area]["min"], - entry.average[_area]["max"], - ) - - LOGGER.debug("Block prices: %s", result) - return result - - -@dataclass(frozen=True, kw_only=True) -class NordpoolDefaultSensorEntityDescription(SensorEntityDescription): - """Describes Nord Pool default sensor entity.""" - - value_fn: Callable[[DeliveryPeriodData], str | float | datetime | None] - - -@dataclass(frozen=True, kw_only=True) -class NordpoolPricesSensorEntityDescription(SensorEntityDescription): - """Describes Nord Pool prices sensor entity.""" - - value_fn: Callable[[tuple[float, float, float]], float | None] - - -@dataclass(frozen=True, kw_only=True) -class NordpoolBlockPricesSensorEntityDescription(SensorEntityDescription): - """Describes Nord Pool block prices sensor entity.""" - - value_fn: Callable[ - [tuple[datetime, datetime, float, float, float]], float | datetime | None - ] - - -DEFAULT_SENSOR_TYPES: tuple[NordpoolDefaultSensorEntityDescription, ...] = ( - NordpoolDefaultSensorEntityDescription( - key="updated_at", - translation_key="updated_at", - device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: data.updated_at, - entity_category=EntityCategory.DIAGNOSTIC, - ), - NordpoolDefaultSensorEntityDescription( - key="currency", - translation_key="currency", - value_fn=lambda data: data.currency, - entity_category=EntityCategory.DIAGNOSTIC, - ), - NordpoolDefaultSensorEntityDescription( - key="exchange_rate", - translation_key="exchange_rate", - value_fn=lambda data: data.exchange_rate, - state_class=SensorStateClass.MEASUREMENT, - entity_registry_enabled_default=False, - entity_category=EntityCategory.DIAGNOSTIC, - ), -) -PRICES_SENSOR_TYPES: tuple[NordpoolPricesSensorEntityDescription, ...] = ( - NordpoolPricesSensorEntityDescription( - key="current_price", - translation_key="current_price", - value_fn=lambda data: data[1] / 1000, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=2, - ), - NordpoolPricesSensorEntityDescription( - key="last_price", - translation_key="last_price", - value_fn=lambda data: data[0] / 1000, - suggested_display_precision=2, - ), - NordpoolPricesSensorEntityDescription( - key="next_price", - translation_key="next_price", - value_fn=lambda data: data[2] / 1000, - suggested_display_precision=2, - ), -) -BLOCK_PRICES_SENSOR_TYPES: tuple[NordpoolBlockPricesSensorEntityDescription, ...] = ( - NordpoolBlockPricesSensorEntityDescription( - key="block_average", - translation_key="block_average", - value_fn=lambda data: data[2] / 1000, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=2, - entity_registry_enabled_default=False, - ), - NordpoolBlockPricesSensorEntityDescription( - key="block_min", - translation_key="block_min", - value_fn=lambda data: data[3] / 1000, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=2, - entity_registry_enabled_default=False, - ), - NordpoolBlockPricesSensorEntityDescription( - key="block_max", - translation_key="block_max", - value_fn=lambda data: data[4] / 1000, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=2, - entity_registry_enabled_default=False, - ), - NordpoolBlockPricesSensorEntityDescription( - key="block_start_time", - translation_key="block_start_time", - value_fn=lambda data: data[0], - device_class=SensorDeviceClass.TIMESTAMP, - entity_registry_enabled_default=False, - ), - NordpoolBlockPricesSensorEntityDescription( - key="block_end_time", - translation_key="block_end_time", - value_fn=lambda data: data[1], - device_class=SensorDeviceClass.TIMESTAMP, - entity_registry_enabled_default=False, - ), -) -DAILY_AVERAGE_PRICES_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( - SensorEntityDescription( - key="daily_average", - translation_key="daily_average", - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=2, - entity_registry_enabled_default=False, - ), -) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: NordPoolConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Nord Pool sensor platform.""" - - coordinator = entry.runtime_data - - entities: list[NordpoolBaseEntity] = [] - currency = entry.runtime_data.data.currency - - for area in get_prices(entry.runtime_data.data): - LOGGER.debug("Setting up base sensors for area %s", area) - entities.extend( - NordpoolSensor(coordinator, description, area) - for description in DEFAULT_SENSOR_TYPES - ) - LOGGER.debug( - "Setting up price sensors for area %s with currency %s", area, currency - ) - entities.extend( - NordpoolPriceSensor(coordinator, description, area, currency) - for description in PRICES_SENSOR_TYPES - ) - entities.extend( - NordpoolDailyAveragePriceSensor(coordinator, description, area, currency) - for description in DAILY_AVERAGE_PRICES_SENSOR_TYPES - ) - for block_name in get_blockprices(coordinator.data)[area]: - LOGGER.debug( - "Setting up block price sensors for area %s with currency %s in block %s", - area, - currency, - block_name, - ) - entities.extend( - NordpoolBlockPriceSensor( - coordinator, description, area, currency, block_name - ) - for description in BLOCK_PRICES_SENSOR_TYPES - ) - async_add_entities(entities) - - -class NordpoolSensor(NordpoolBaseEntity, SensorEntity): - """Representation of a Nord Pool sensor.""" - - entity_description: NordpoolDefaultSensorEntityDescription - - @property - def native_value(self) -> str | float | datetime | None: - """Return value of sensor.""" - return self.entity_description.value_fn(self.coordinator.data) - - -class NordpoolPriceSensor(NordpoolBaseEntity, SensorEntity): - """Representation of a Nord Pool price sensor.""" - - entity_description: NordpoolPricesSensorEntityDescription - - def __init__( - self, - coordinator: NordPoolDataUpdateCoordinator, - entity_description: NordpoolPricesSensorEntityDescription, - area: str, - currency: str, - ) -> None: - """Initiate Nord Pool sensor.""" - super().__init__(coordinator, entity_description, area) - self._attr_native_unit_of_measurement = f"{currency}/kWh" - - @property - def native_value(self) -> float | None: - """Return value of sensor.""" - return self.entity_description.value_fn( - get_prices(self.coordinator.data)[self.area] - ) - - -class NordpoolBlockPriceSensor(NordpoolBaseEntity, SensorEntity): - """Representation of a Nord Pool block price sensor.""" - - entity_description: NordpoolBlockPricesSensorEntityDescription - - def __init__( - self, - coordinator: NordPoolDataUpdateCoordinator, - entity_description: NordpoolBlockPricesSensorEntityDescription, - area: str, - currency: str, - block_name: str, - ) -> None: - """Initiate Nord Pool sensor.""" - super().__init__(coordinator, entity_description, area) - if entity_description.device_class is not SensorDeviceClass.TIMESTAMP: - self._attr_native_unit_of_measurement = f"{currency}/kWh" - self._attr_unique_id = f"{slugify(block_name)}-{area}-{entity_description.key}" - self.block_name = block_name - self._attr_translation_placeholders = {"block": block_name} - - @property - def native_value(self) -> float | datetime | None: - """Return value of sensor.""" - return self.entity_description.value_fn( - get_blockprices(self.coordinator.data)[self.area][self.block_name] - ) - - -class NordpoolDailyAveragePriceSensor(NordpoolBaseEntity, SensorEntity): - """Representation of a Nord Pool daily average price sensor.""" - - entity_description: SensorEntityDescription - - def __init__( - self, - coordinator: NordPoolDataUpdateCoordinator, - entity_description: SensorEntityDescription, - area: str, - currency: str, - ) -> None: - """Initiate Nord Pool sensor.""" - super().__init__(coordinator, entity_description, area) - self._attr_native_unit_of_measurement = f"{currency}/kWh" - - @property - def native_value(self) -> float | None: - """Return value of sensor.""" - return self.coordinator.data.area_average[self.area] / 1000 diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json deleted file mode 100644 index 59ba009eb90..00000000000 --- a/homeassistant/components/nordpool/strings.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "config": { - "abort": { - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" - }, - "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "no_data": "API connected but the response was empty" - }, - "step": { - "user": { - "data": { - "currency": "Currency", - "areas": "Areas" - } - }, - "reconfigure": { - "data": { - "currency": "[%key:component::nordpool::config::step::user::data::currency%]", - "areas": "[%key:component::nordpool::config::step::user::data::areas%]" - } - } - } - }, - "entity": { - "sensor": { - "updated_at": { - "name": "Last updated" - }, - "currency": { - "name": "Currency" - }, - "exchange_rate": { - "name": "Exchange rate" - }, - "current_price": { - "name": "Current price" - }, - "last_price": { - "name": "Previous price" - }, - "next_price": { - "name": "Next price" - }, - "block_average": { - "name": "{block} average" - }, - "block_min": { - "name": "{block} lowest price" - }, - "block_max": { - "name": "{block} highest price" - }, - "block_start_time": { - "name": "{block} time from" - }, - "block_end_time": { - "name": "{block} time until" - }, - "daily_average": { - "name": "Daily average" - } - } - } -} diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 23e3ce0910b..ad95c9b5358 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -17,7 +17,6 @@ from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, - UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -110,12 +109,6 @@ class NumberDeviceClass(StrEnum): Unit of measurement: `%` """ - BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" - """Blood glucose concentration. - - Unit of measurement: `mg/dL`, `mmol/L` - """ - CO = "carbon_monoxide" """Carbon Monoxide gas concentration. @@ -169,7 +162,7 @@ class NumberDeviceClass(StrEnum): ENERGY = "energy" """Energy. - Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` + Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` """ ENERGY_STORAGE = "energy_storage" @@ -178,7 +171,7 @@ class NumberDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` + Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` """ FREQUENCY = "frequency" @@ -286,7 +279,7 @@ class NumberDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` + Unit of measurement: `W`, `kW` """ PRECIPITATION = "precipitation" @@ -436,7 +429,6 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { NumberDeviceClass.AQI: {None}, NumberDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure), NumberDeviceClass.BATTERY: {PERCENTAGE}, - NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration), NumberDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION}, NumberDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION}, NumberDeviceClass.CONDUCTIVITY: set(UnitOfConductivity), diff --git a/homeassistant/components/number/icons.json b/homeassistant/components/number/icons.json index 5e0fc6e44d2..a122aaecb09 100644 --- a/homeassistant/components/number/icons.json +++ b/homeassistant/components/number/icons.json @@ -15,9 +15,6 @@ "battery": { "default": "mdi:battery" }, - "blood_glucose_concentration": { - "default": "mdi:spoon-sugar" - }, "carbon_dioxide": { "default": "mdi:molecule-co2" }, diff --git a/homeassistant/components/number/strings.json b/homeassistant/components/number/strings.json index b9aec880ecc..580385172e3 100644 --- a/homeassistant/components/number/strings.json +++ b/homeassistant/components/number/strings.json @@ -43,9 +43,6 @@ "battery": { "name": "[%key:component::sensor::entity_component::battery::name%]" }, - "blood_glucose_concentration": { - "name": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]" - }, "carbon_dioxide": { "name": "[%key:component::sensor::entity_component::carbon_dioxide::name%]" }, diff --git a/homeassistant/components/ollama/strings.json b/homeassistant/components/ollama/strings.json index 248cac34f11..c307f160228 100644 --- a/homeassistant/components/ollama/strings.json +++ b/homeassistant/components/ollama/strings.json @@ -11,11 +11,9 @@ "title": "Downloading model" } }, - "abort": { - "download_failed": "Model downloading failed" - }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "download_failed": "Model downloading failed", "unknown": "[%key:common::config_flow::error::unknown%]" }, "progress": { diff --git a/homeassistant/components/onewire/config_flow.py b/homeassistant/components/onewire/config_flow.py index abb4c884974..3ee0563410c 100644 --- a/homeassistant/components/onewire/config_flow.py +++ b/homeassistant/components/onewire/config_flow.py @@ -2,7 +2,6 @@ from __future__ import annotations -from copy import deepcopy from typing import Any import voluptuous as vol @@ -105,7 +104,7 @@ class OneWireFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OnewireOptionsFlowHandler: """Get the options flow for this handler.""" - return OnewireOptionsFlowHandler(config_entry) + return OnewireOptionsFlowHandler() class OnewireOptionsFlowHandler(OptionsFlow): @@ -126,10 +125,6 @@ class OnewireOptionsFlowHandler(OptionsFlow): current_device: str """Friendly name of the currently selected device.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.options = deepcopy(dict(config_entry.options)) - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/onkyo/config_flow.py b/homeassistant/components/onkyo/config_flow.py index a8ced6fae64..623fa9b2a90 100644 --- a/homeassistant/components/onkyo/config_flow.py +++ b/homeassistant/components/onkyo/config_flow.py @@ -343,9 +343,7 @@ class OnkyoOptionsFlowHandler(OptionsFlow): return self.async_create_entry( data={ - OPTION_VOLUME_RESOLUTION: self.config_entry.options[ - OPTION_VOLUME_RESOLUTION - ], + OPTION_VOLUME_RESOLUTION: self.options[OPTION_VOLUME_RESOLUTION], OPTION_MAX_VOLUME: user_input[OPTION_MAX_VOLUME], OPTION_INPUT_SOURCES: sources_store, } @@ -353,7 +351,7 @@ class OnkyoOptionsFlowHandler(OptionsFlow): schema_dict: dict[Any, Selector] = {} - max_volume: float = self.config_entry.options[OPTION_MAX_VOLUME] + max_volume: float = self.options[OPTION_MAX_VOLUME] schema_dict[vol.Required(OPTION_MAX_VOLUME, default=max_volume)] = ( NumberSelector( NumberSelectorConfig(min=1, max=100, mode=NumberSelectorMode.BOX) diff --git a/homeassistant/components/onvif/config_flow.py b/homeassistant/components/onvif/config_flow.py index 66e566af0bf..830f74b94e8 100644 --- a/homeassistant/components/onvif/config_flow.py +++ b/homeassistant/components/onvif/config_flow.py @@ -109,7 +109,7 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OnvifOptionsFlowHandler: """Get the options flow for this handler.""" - return OnvifOptionsFlowHandler(config_entry) + return OnvifOptionsFlowHandler() def __init__(self) -> None: """Initialize the ONVIF config flow.""" @@ -389,10 +389,6 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN): class OnvifOptionsFlowHandler(OptionsFlow): """Handle ONVIF options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize ONVIF options flow.""" - self.options = dict(config_entry.options) - async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the ONVIF options.""" return await self.async_step_onvif_devices() diff --git a/homeassistant/components/p1_monitor/config_flow.py b/homeassistant/components/p1_monitor/config_flow.py index a7ede186d72..055973e8e37 100644 --- a/homeassistant/components/p1_monitor/config_flow.py +++ b/homeassistant/components/p1_monitor/config_flow.py @@ -57,13 +57,10 @@ class P1MonitorFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema( { vol.Required(CONF_HOST): TextSelector(), - vol.Required(CONF_PORT, default=80): vol.All( - NumberSelector( - NumberSelectorConfig( - min=1, max=65535, mode=NumberSelectorMode.BOX - ), - ), - vol.Coerce(int), + vol.Required(CONF_PORT, default=80): NumberSelector( + NumberSelectorConfig( + mode=NumberSelectorMode.BOX, + ) ), } ), diff --git a/homeassistant/components/palazzetti/config_flow.py b/homeassistant/components/palazzetti/config_flow.py index fe892b6624d..a58461b9ca7 100644 --- a/homeassistant/components/palazzetti/config_flow.py +++ b/homeassistant/components/palazzetti/config_flow.py @@ -6,7 +6,6 @@ from pypalazzetti.client import PalazzettiClient from pypalazzetti.exceptions import CommunicationError import voluptuous as vol -from homeassistant.components import dhcp from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST from homeassistant.helpers import device_registry as dr @@ -17,8 +16,6 @@ from .const import DOMAIN, LOGGER class PalazzettiConfigFlow(ConfigFlow, domain=DOMAIN): """Palazzetti config flow.""" - _discovered_device: PalazzettiClient - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -51,41 +48,3 @@ class PalazzettiConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_HOST): str}), errors=errors, ) - - async def async_step_dhcp( - self, discovery_info: dhcp.DhcpServiceInfo - ) -> ConfigFlowResult: - """Handle DHCP discovery.""" - - LOGGER.debug( - "DHCP discovery detected Palazzetti: %s", discovery_info.macaddress - ) - - await self.async_set_unique_id(dr.format_mac(discovery_info.macaddress)) - self._abort_if_unique_id_configured() - self._discovered_device = PalazzettiClient(hostname=discovery_info.ip) - try: - await self._discovered_device.connect() - except CommunicationError: - return self.async_abort(reason="cannot_connect") - - return await self.async_step_discovery_confirm() - - async def async_step_discovery_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Confirm discovery.""" - if user_input is not None: - return self.async_create_entry( - title=self._discovered_device.name, - data={CONF_HOST: self._discovered_device.host}, - ) - - self._set_confirm_only() - return self.async_show_form( - step_id="discovery_confirm", - description_placeholders={ - "name": self._discovered_device.name, - "host": self._discovered_device.host, - }, - ) diff --git a/homeassistant/components/palazzetti/manifest.json b/homeassistant/components/palazzetti/manifest.json index aff82275e2e..a1b25f563bf 100644 --- a/homeassistant/components/palazzetti/manifest.json +++ b/homeassistant/components/palazzetti/manifest.json @@ -3,17 +3,8 @@ "name": "Palazzetti", "codeowners": ["@dotvav"], "config_flow": true, - "dhcp": [ - { - "hostname": "connbox*", - "macaddress": "40F3857*" - }, - { - "registered_devices": true - } - ], "documentation": "https://www.home-assistant.io/integrations/palazzetti", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["pypalazzetti==0.1.11"] + "requirements": ["pypalazzetti==0.1.10"] } diff --git a/homeassistant/components/palazzetti/strings.json b/homeassistant/components/palazzetti/strings.json index cc10c8ed5c6..fdf50f29f0d 100644 --- a/homeassistant/components/palazzetti/strings.json +++ b/homeassistant/components/palazzetti/strings.json @@ -8,9 +8,6 @@ "data_description": { "host": "The host name or the IP address of the Palazzetti CBox" } - }, - "discovery_confirm": { - "description": "Do you want to add {name} ({host}) to Home Assistant?" } }, "abort": { diff --git a/homeassistant/components/plex/config_flow.py b/homeassistant/components/plex/config_flow.py index ae7cbb12574..22069310804 100644 --- a/homeassistant/components/plex/config_flow.py +++ b/homeassistant/components/plex/config_flow.py @@ -3,7 +3,6 @@ from __future__ import annotations from collections.abc import Mapping -from copy import deepcopy import logging from typing import TYPE_CHECKING, Any @@ -385,7 +384,6 @@ class PlexOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Plex options flow.""" - self.options = deepcopy(dict(config_entry.options)) self.server_id = config_entry.data[CONF_SERVER_IDENTIFIER] async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: diff --git a/homeassistant/components/powerwall/config_flow.py b/homeassistant/components/powerwall/config_flow.py index 0c39392ca19..bacbff63211 100644 --- a/homeassistant/components/powerwall/config_flow.py +++ b/homeassistant/components/powerwall/config_flow.py @@ -251,8 +251,8 @@ class PowerwallConfigFlow(ConfigFlow, domain=DOMAIN): """Handle reauth confirmation.""" errors: dict[str, str] | None = {} description_placeholders: dict[str, str] = {} - reauth_entry = self._get_reauth_entry() if user_input is not None: + reauth_entry = self._get_reauth_entry() errors, _, description_placeholders = await self._async_try_connect( {CONF_IP_ADDRESS: reauth_entry.data[CONF_IP_ADDRESS], **user_input} ) @@ -261,10 +261,6 @@ class PowerwallConfigFlow(ConfigFlow, domain=DOMAIN): reauth_entry, data_updates=user_input ) - self.context["title_placeholders"] = { - "name": reauth_entry.title, - "ip_address": reauth_entry.data[CONF_IP_ADDRESS], - } return self.async_show_form( step_id="reauth_confirm", data_schema=vol.Schema({vol.Optional(CONF_PASSWORD): str}), diff --git a/homeassistant/components/profiler/__init__.py b/homeassistant/components/profiler/__init__.py index 389e3384ad9..9b2b9736574 100644 --- a/homeassistant/components/profiler/__init__.py +++ b/homeassistant/components/profiler/__init__.py @@ -436,10 +436,6 @@ async def _async_generate_memory_profile(hass: HomeAssistant, call: ServiceCall) # Imports deferred to avoid loading modules # in memory since usually only one part of this # integration is used at a time - if sys.version_info >= (3, 13): - raise HomeAssistantError( - "Memory profiling is not supported on Python 3.13. Please use Python 3.12." - ) from guppy import hpy # pylint: disable=import-outside-toplevel start_time = int(time.time() * 1000000) diff --git a/homeassistant/components/profiler/manifest.json b/homeassistant/components/profiler/manifest.json index 8d2814c8c7f..9f27ee7f7d0 100644 --- a/homeassistant/components/profiler/manifest.json +++ b/homeassistant/components/profiler/manifest.json @@ -7,7 +7,7 @@ "quality_scale": "internal", "requirements": [ "pyprof2calltree==1.4.5", - "guppy3==3.1.4.post1;python_version<'3.13'", + "guppy3==3.1.4.post1", "objgraph==3.5.0" ], "single_config_entry": true diff --git a/homeassistant/components/proxy/manifest.json b/homeassistant/components/proxy/manifest.json index f13799422df..1e70c4d3e10 100644 --- a/homeassistant/components/proxy/manifest.json +++ b/homeassistant/components/proxy/manifest.json @@ -3,5 +3,5 @@ "name": "Camera Proxy", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/proxy", - "requirements": ["Pillow==11.0.0"] + "requirements": ["Pillow==10.4.0"] } diff --git a/homeassistant/components/qnap/sensor.py b/homeassistant/components/qnap/sensor.py index 383a4e5f572..526516bfcdd 100644 --- a/homeassistant/components/qnap/sensor.py +++ b/homeassistant/components/qnap/sensor.py @@ -13,6 +13,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( + ATTR_NAME, PERCENTAGE, EntityCategory, UnitOfDataRate, @@ -374,6 +375,17 @@ class QNAPMemorySensor(QNAPSensor): return None + # Deprecated since Home Assistant 2024.6.0 + # Can be removed completely in 2024.12.0 + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return the state attributes.""" + if self.coordinator.data: + data = self.coordinator.data["system_stats"]["memory"] + size = round(float(data["total"]) / 1024, 2) + return {ATTR_MEMORY_SIZE: f"{size} {UnitOfInformation.GIBIBYTES}"} + return None + class QNAPNetworkSensor(QNAPSensor): """A QNAP sensor that monitors network stats.""" @@ -402,6 +414,22 @@ class QNAPNetworkSensor(QNAPSensor): return None + # Deprecated since Home Assistant 2024.6.0 + # Can be removed completely in 2024.12.0 + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return the state attributes.""" + if self.coordinator.data: + data = self.coordinator.data["system_stats"]["nics"][self.monitor_device] + return { + ATTR_IP: data["ip"], + ATTR_MASK: data["mask"], + ATTR_MAC: data["mac"], + ATTR_MAX_SPEED: data["max_speed"], + ATTR_PACKETS_ERR: data["err_packets"], + } + return None + class QNAPSystemSensor(QNAPSensor): """A QNAP sensor that monitors overall system health.""" @@ -427,6 +455,25 @@ class QNAPSystemSensor(QNAPSensor): return None + # Deprecated since Home Assistant 2024.6.0 + # Can be removed completely in 2024.12.0 + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return the state attributes.""" + if self.coordinator.data: + data = self.coordinator.data["system_stats"] + days = int(data["uptime"]["days"]) + hours = int(data["uptime"]["hours"]) + minutes = int(data["uptime"]["minutes"]) + + return { + ATTR_NAME: data["system"]["name"], + ATTR_MODEL: data["system"]["model"], + ATTR_SERIAL: data["system"]["serial_number"], + ATTR_UPTIME: f"{days:0>2d}d {hours:0>2d}h {minutes:0>2d}m", + } + return None + class QNAPDriveSensor(QNAPSensor): """A QNAP sensor that monitors HDD/SSD drive stats.""" @@ -486,3 +533,17 @@ class QNAPVolumeSensor(QNAPSensor): return used_gb / total_gb * 100 return None + + # Deprecated since Home Assistant 2024.6.0 + # Can be removed completely in 2024.12.0 + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return the state attributes.""" + if self.coordinator.data: + data = self.coordinator.data["volumes"][self.monitor_device] + total_gb = int(data["total_size"]) / 1024 / 1024 / 1024 + + return { + ATTR_VOLUME_SIZE: f"{round(total_gb, 1)} {UnitOfInformation.GIBIBYTES}" + } + return None diff --git a/homeassistant/components/qrcode/manifest.json b/homeassistant/components/qrcode/manifest.json index 3fcc895c2b9..14f2d093f37 100644 --- a/homeassistant/components/qrcode/manifest.json +++ b/homeassistant/components/qrcode/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/qrcode", "iot_class": "calculated", "loggers": ["pyzbar"], - "requirements": ["Pillow==11.0.0", "pyzbar==0.1.7"] + "requirements": ["Pillow==10.4.0", "pyzbar==0.1.7"] } diff --git a/homeassistant/components/recorder/pool.py b/homeassistant/components/recorder/pool.py index fc2a8ccb1cc..30f8fa8d07a 100644 --- a/homeassistant/components/recorder/pool.py +++ b/homeassistant/components/recorder/pool.py @@ -16,7 +16,7 @@ from sqlalchemy.pool import ( StaticPool, ) -from homeassistant.helpers.frame import ReportBehavior, report_usage +from homeassistant.helpers.frame import report from homeassistant.util.loop import raise_for_blocking_call _LOGGER = logging.getLogger(__name__) @@ -108,14 +108,14 @@ class RecorderPool(SingletonThreadPool, NullPool): # raise_for_blocking_call will raise an exception def _do_get_db_connection_protected(self) -> ConnectionPoolEntry: - report_usage( + report( ( "accesses the database without the database executor; " f"{ADVISE_MSG} " "for faster database operations" ), exclude_integrations={"recorder"}, - core_behavior=ReportBehavior.LOG, + error_if_core=False, ) return NullPool._create_connection(self) # noqa: SLF001 diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 7243af9d4d5..4ffe7c72971 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -28,7 +28,6 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import ( BaseUnitConverter, - BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -129,10 +128,6 @@ QUERY_STATISTICS_SUMMARY_SUM = ( STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = { - **{ - unit: BloodGlucoseConcentrationConverter - for unit in BloodGlucoseConcentrationConverter.VALID_UNITS - }, **{unit: ConductivityConverter for unit in ConductivityConverter.VALID_UNITS}, **{unit: DataRateConverter for unit in DataRateConverter.VALID_UNITS}, **{unit: DistanceConverter for unit in DistanceConverter.VALID_UNITS}, diff --git a/homeassistant/components/recorder/websocket_api.py b/homeassistant/components/recorder/websocket_api.py index f4dce73fa47..ac917e903df 100644 --- a/homeassistant/components/recorder/websocket_api.py +++ b/homeassistant/components/recorder/websocket_api.py @@ -16,7 +16,6 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import ( - BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -55,9 +54,6 @@ UPDATE_STATISTICS_METADATA_TIME_OUT = 10 UNIT_SCHEMA = vol.Schema( { - vol.Optional("blood_glucose_concentration"): vol.In( - BloodGlucoseConcentrationConverter.VALID_UNITS - ), vol.Optional("conductivity"): vol.In(ConductivityConverter.VALID_UNITS), vol.Optional("data_rate"): vol.In(DataRateConverter.VALID_UNITS), vol.Optional("distance"): vol.In(DistanceConverter.VALID_UNITS), diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index d333a8a0201..7f4a15ffe21 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -246,12 +246,6 @@ "off": "mdi:music-note-off" } }, - "vehicle_tone": { - "default": "mdi:music-note", - "state": { - "off": "mdi:music-note-off" - } - }, "visitor_tone": { "default": "mdi:music-note", "state": { diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 7921bdb6ed5..23a46c5e1c9 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.11.1"] + "requirements": ["reolink-aio==0.10.4"] } diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py index a444997a907..1306c881059 100644 --- a/homeassistant/components/reolink/select.py +++ b/homeassistant/components/reolink/select.py @@ -197,16 +197,6 @@ CHIME_SELECT_ENTITIES = ( value=lambda chime: ChimeToneEnum(chime.tone("people")).name, method=lambda chime, name: chime.set_tone("people", ChimeToneEnum[name].value), ), - ReolinkChimeSelectEntityDescription( - key="vehicle_tone", - cmd_key="GetDingDongCfg", - translation_key="vehicle_tone", - entity_category=EntityCategory.CONFIG, - get_options=[method.name for method in ChimeToneEnum], - supported=lambda chime: "vehicle" in chime.chime_event_types, - value=lambda chime: ChimeToneEnum(chime.tone("vehicle")).name, - method=lambda chime, name: chime.set_tone("vehicle", ChimeToneEnum[name].value), - ), ReolinkChimeSelectEntityDescription( key="visitor_tone", cmd_key="GetDingDongCfg", diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 1d699b7b658..fbc88ed1b50 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -606,22 +606,6 @@ "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" } }, - "vehicle_tone": { - "name": "Vehicle ringtone", - "state": { - "off": "[%key:common::state::off%]", - "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", - "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", - "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", - "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", - "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", - "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", - "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", - "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", - "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", - "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" - } - }, "visitor_tone": { "name": "Visitor ringtone", "state": { diff --git a/homeassistant/components/ring/event.py b/homeassistant/components/ring/event.py index 71a4bc8aea5..e6d9d25542f 100644 --- a/homeassistant/components/ring/event.py +++ b/homeassistant/components/ring/event.py @@ -96,7 +96,7 @@ class RingEvent(RingBaseEntity[RingListenCoordinator, RingDeviceT], EventEntity) @callback def _handle_coordinator_update(self) -> None: - if (alert := self._get_coordinator_alert()) and not alert.is_update: + if alert := self._get_coordinator_alert(): self._async_handle_event(alert.kind) super()._handle_coordinator_update() diff --git a/homeassistant/components/ring/manifest.json b/homeassistant/components/ring/manifest.json index e431c680081..4e0514ba7f9 100644 --- a/homeassistant/components/ring/manifest.json +++ b/homeassistant/components/ring/manifest.json @@ -30,5 +30,5 @@ "iot_class": "cloud_polling", "loggers": ["ring_doorbell"], "quality_scale": "silver", - "requirements": ["ring-doorbell==0.9.12"] + "requirements": ["ring-doorbell==0.9.8"] } diff --git a/homeassistant/components/roborock/config_flow.py b/homeassistant/components/roborock/config_flow.py index 200614b024e..e01bb904adf 100644 --- a/homeassistant/components/roborock/config_flow.py +++ b/homeassistant/components/roborock/config_flow.py @@ -3,7 +3,6 @@ from __future__ import annotations from collections.abc import Mapping -from copy import deepcopy import logging from typing import Any @@ -173,16 +172,12 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> RoborockOptionsFlowHandler: """Create the options flow.""" - return RoborockOptionsFlowHandler(config_entry) + return RoborockOptionsFlowHandler() class RoborockOptionsFlowHandler(OptionsFlow): """Handle an option flow for Roborock.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.options = deepcopy(dict(config_entry.options)) - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/roborock/coordinator.py b/homeassistant/components/roborock/coordinator.py index fe592074f71..20bc50f9855 100644 --- a/homeassistant/components/roborock/coordinator.py +++ b/homeassistant/components/roborock/coordinator.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from datetime import timedelta import logging @@ -106,12 +107,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): async def _async_update_data(self) -> DeviceProp: """Update data via library.""" try: - # Update device props and standard api information - await self._update_device_prop() - # Set the new map id from the updated device props + await asyncio.gather(*(self._update_device_prop(), self.get_rooms())) self._set_current_map() - # Get the rooms for that map id. - await self.get_rooms() except RoborockException as ex: raise UpdateFailed(ex) from ex return self.roborock_device_info.props diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index c305e4710fc..79a9bf77578 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["roborock"], "requirements": [ - "python-roborock==2.7.2", + "python-roborock==2.6.1", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/homeassistant/components/roborock/select.py b/homeassistant/components/roborock/select.py index 73cb95d2d7c..3dfe0e72a7b 100644 --- a/homeassistant/components/roborock/select.py +++ b/homeassistant/components/roborock/select.py @@ -135,9 +135,6 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): RoborockCommand.LOAD_MULTI_MAP, [map_id], ) - # Update the current map id manually so that nothing gets broken - # if another service hits the api. - self.coordinator.current_map = map_id # We need to wait after updating the map # so that other commands will be executed correctly. await asyncio.sleep(MAP_SLEEP) @@ -151,9 +148,6 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): @property def current_option(self) -> str | None: """Get the current status of the select entity from device_status.""" - if ( - (current_map := self.coordinator.current_map) is not None - and current_map in self.coordinator.maps - ): # 63 means it is searching for a map. + if (current_map := self.coordinator.current_map) is not None: return self.coordinator.maps[current_map].name return None diff --git a/homeassistant/components/ruckus_unleashed/manifest.json b/homeassistant/components/ruckus_unleashed/manifest.json index 8d56f3a5563..2066b65221e 100644 --- a/homeassistant/components/ruckus_unleashed/manifest.json +++ b/homeassistant/components/ruckus_unleashed/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aioruckus"], - "requirements": ["aioruckus==0.42"] + "requirements": ["aioruckus==0.41"] } diff --git a/homeassistant/components/russound_rio/const.py b/homeassistant/components/russound_rio/const.py index af52e89d399..1b38dc8ce5c 100644 --- a/homeassistant/components/russound_rio/const.py +++ b/homeassistant/components/russound_rio/const.py @@ -17,7 +17,7 @@ RUSSOUND_RIO_EXCEPTIONS = ( ) -CONNECT_TIMEOUT = 15 +CONNECT_TIMEOUT = 5 MP_FEATURES_BY_FLAG = { FeatureFlag.COMMANDS_ZONE_MUTE_OFF_ON: MediaPlayerEntityFeature.VOLUME_MUTE diff --git a/homeassistant/components/russound_rio/entity.py b/homeassistant/components/russound_rio/entity.py index 9790ff43e68..0233305bb1f 100644 --- a/homeassistant/components/russound_rio/entity.py +++ b/homeassistant/components/russound_rio/entity.py @@ -96,4 +96,6 @@ class RussoundBaseEntity(Entity): async def async_will_remove_from_hass(self) -> None: """Remove callbacks.""" - self._client.unregister_state_update_callbacks(self._state_update_callback) + await self._client.unregister_state_update_callbacks( + self._state_update_callback + ) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index ab77ca3ab6a..96fc0fb53db 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_push", "loggers": ["aiorussound"], "quality_scale": "silver", - "requirements": ["aiorussound==4.1.0"] + "requirements": ["aiorussound==4.0.5"] } diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 45818d3e25b..316e4d2be7c 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from aiorussound import Controller -from aiorussound.models import PlayStatus, Source +from aiorussound.models import Source from aiorussound.rio import ZoneControlSurface from homeassistant.components.media_player import ( @@ -132,18 +132,11 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): def state(self) -> MediaPlayerState | None: """Return the state of the device.""" status = self._zone.status - play_status = self._source.play_status - if not status: + if status == "ON": + return MediaPlayerState.ON + if status == "OFF": return MediaPlayerState.OFF - if play_status == PlayStatus.PLAYING: - return MediaPlayerState.PLAYING - if play_status == PlayStatus.PAUSED: - return MediaPlayerState.PAUSED - if play_status == PlayStatus.TRANSITIONING: - return MediaPlayerState.BUFFERING - if play_status == PlayStatus.STOPPED: - return MediaPlayerState.IDLE - return MediaPlayerState.ON + return None @property def source(self): @@ -182,7 +175,7 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): Value is returned based on a range (0..50). Therefore float divide by 50 to get to the required range. """ - return self._zone.volume / 50.0 + return float(self._zone.volume or "0") / 50.0 @command async def async_turn_off(self) -> None: diff --git a/homeassistant/components/sense/manifest.json b/homeassistant/components/sense/manifest.json index df2317c3a6c..72d1d045c9a 100644 --- a/homeassistant/components/sense/manifest.json +++ b/homeassistant/components/sense/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/sense", "iot_class": "cloud_polling", "loggers": ["sense_energy"], - "requirements": ["sense-energy==0.13.3"] + "requirements": ["sense-energy==0.13.2"] } diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index f4573f873a2..da0b48a23a0 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -17,7 +17,6 @@ from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, - UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -48,7 +47,6 @@ from homeassistant.helpers.deprecation import ( ) from homeassistant.util.unit_conversion import ( BaseUnitConverter, - BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -129,12 +127,6 @@ class SensorDeviceClass(StrEnum): Unit of measurement: `%` """ - BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" - """Blood glucose concentration. - - Unit of measurement: `mg/dL`, `mmol/L` - """ - CO = "carbon_monoxide" """Carbon Monoxide gas concentration. @@ -190,7 +182,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring energy consumption, for example electric energy consumption. - Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `Wh`, `kWh`, `MWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ ENERGY_STORAGE = "energy_storage" @@ -199,7 +191,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` + Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` """ FREQUENCY = "frequency" @@ -307,7 +299,7 @@ class SensorDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` + Unit of measurement: `W`, `kW` """ PRECIPITATION = "precipitation" @@ -501,7 +493,6 @@ STATE_CLASSES: Final[list[str]] = [cls.value for cls in SensorStateClass] UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] = { SensorDeviceClass.ATMOSPHERIC_PRESSURE: PressureConverter, - SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: BloodGlucoseConcentrationConverter, SensorDeviceClass.CONDUCTIVITY: ConductivityConverter, SensorDeviceClass.CURRENT: ElectricCurrentConverter, SensorDeviceClass.DATA_RATE: DataRateConverter, @@ -533,7 +524,6 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { SensorDeviceClass.AQI: {None}, SensorDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure), SensorDeviceClass.BATTERY: {PERCENTAGE}, - SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration), SensorDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION}, SensorDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION}, SensorDeviceClass.CONDUCTIVITY: set(UnitOfConductivity), @@ -609,7 +599,6 @@ DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass]] = { SensorDeviceClass.AQI: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.ATMOSPHERIC_PRESSURE: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.BATTERY: {SensorStateClass.MEASUREMENT}, - SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CO: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CO2: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CONDUCTIVITY: {SensorStateClass.MEASUREMENT}, diff --git a/homeassistant/components/sensor/device_condition.py b/homeassistant/components/sensor/device_condition.py index 56ecb36adb3..f2b51899312 100644 --- a/homeassistant/components/sensor/device_condition.py +++ b/homeassistant/components/sensor/device_condition.py @@ -37,7 +37,6 @@ CONF_IS_APPARENT_POWER = "is_apparent_power" CONF_IS_AQI = "is_aqi" CONF_IS_ATMOSPHERIC_PRESSURE = "is_atmospheric_pressure" CONF_IS_BATTERY_LEVEL = "is_battery_level" -CONF_IS_BLOOD_GLUCOSE_CONCENTRATION = "is_blood_glucose_concentration" CONF_IS_CO = "is_carbon_monoxide" CONF_IS_CO2 = "is_carbon_dioxide" CONF_IS_CONDUCTIVITY = "is_conductivity" @@ -88,9 +87,6 @@ ENTITY_CONDITIONS = { SensorDeviceClass.AQI: [{CONF_TYPE: CONF_IS_AQI}], SensorDeviceClass.ATMOSPHERIC_PRESSURE: [{CONF_TYPE: CONF_IS_ATMOSPHERIC_PRESSURE}], SensorDeviceClass.BATTERY: [{CONF_TYPE: CONF_IS_BATTERY_LEVEL}], - SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: [ - {CONF_TYPE: CONF_IS_BLOOD_GLUCOSE_CONCENTRATION} - ], SensorDeviceClass.CO: [{CONF_TYPE: CONF_IS_CO}], SensorDeviceClass.CO2: [{CONF_TYPE: CONF_IS_CO2}], SensorDeviceClass.CONDUCTIVITY: [{CONF_TYPE: CONF_IS_CONDUCTIVITY}], @@ -155,7 +151,6 @@ CONDITION_SCHEMA = vol.All( CONF_IS_AQI, CONF_IS_ATMOSPHERIC_PRESSURE, CONF_IS_BATTERY_LEVEL, - CONF_IS_BLOOD_GLUCOSE_CONCENTRATION, CONF_IS_CO, CONF_IS_CO2, CONF_IS_CONDUCTIVITY, diff --git a/homeassistant/components/sensor/device_trigger.py b/homeassistant/components/sensor/device_trigger.py index ffee10d9f40..b07b3fac11e 100644 --- a/homeassistant/components/sensor/device_trigger.py +++ b/homeassistant/components/sensor/device_trigger.py @@ -36,7 +36,6 @@ CONF_APPARENT_POWER = "apparent_power" CONF_AQI = "aqi" CONF_ATMOSPHERIC_PRESSURE = "atmospheric_pressure" CONF_BATTERY_LEVEL = "battery_level" -CONF_BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" CONF_CO = "carbon_monoxide" CONF_CO2 = "carbon_dioxide" CONF_CONDUCTIVITY = "conductivity" @@ -87,9 +86,6 @@ ENTITY_TRIGGERS = { SensorDeviceClass.AQI: [{CONF_TYPE: CONF_AQI}], SensorDeviceClass.ATMOSPHERIC_PRESSURE: [{CONF_TYPE: CONF_ATMOSPHERIC_PRESSURE}], SensorDeviceClass.BATTERY: [{CONF_TYPE: CONF_BATTERY_LEVEL}], - SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: [ - {CONF_TYPE: CONF_BLOOD_GLUCOSE_CONCENTRATION} - ], SensorDeviceClass.CO: [{CONF_TYPE: CONF_CO}], SensorDeviceClass.CO2: [{CONF_TYPE: CONF_CO2}], SensorDeviceClass.CONDUCTIVITY: [{CONF_TYPE: CONF_CONDUCTIVITY}], @@ -155,7 +151,6 @@ TRIGGER_SCHEMA = vol.All( CONF_AQI, CONF_ATMOSPHERIC_PRESSURE, CONF_BATTERY_LEVEL, - CONF_BLOOD_GLUCOSE_CONCENTRATION, CONF_CO, CONF_CO2, CONF_CONDUCTIVITY, diff --git a/homeassistant/components/sensor/icons.json b/homeassistant/components/sensor/icons.json index ea4c902e665..6132fcbc1e9 100644 --- a/homeassistant/components/sensor/icons.json +++ b/homeassistant/components/sensor/icons.json @@ -12,9 +12,6 @@ "atmospheric_pressure": { "default": "mdi:thermometer-lines" }, - "blood_glucose_concentration": { - "default": "mdi:spoon-sugar" - }, "carbon_dioxide": { "default": "mdi:molecule-co2" }, diff --git a/homeassistant/components/sensor/strings.json b/homeassistant/components/sensor/strings.json index 6d529e72c3b..71bead342c4 100644 --- a/homeassistant/components/sensor/strings.json +++ b/homeassistant/components/sensor/strings.json @@ -6,7 +6,6 @@ "is_aqi": "Current {entity_name} air quality index", "is_atmospheric_pressure": "Current {entity_name} atmospheric pressure", "is_battery_level": "Current {entity_name} battery level", - "is_blood_glucose_concentration": "Current {entity_name} blood glucose concentration", "is_carbon_monoxide": "Current {entity_name} carbon monoxide concentration level", "is_carbon_dioxide": "Current {entity_name} carbon dioxide concentration level", "is_conductivity": "Current {entity_name} conductivity", @@ -57,7 +56,6 @@ "aqi": "{entity_name} air quality index changes", "atmospheric_pressure": "{entity_name} atmospheric pressure changes", "battery_level": "{entity_name} battery level changes", - "blood_glucose_concentration": "{entity_name} blood glucose concentration changes", "carbon_monoxide": "{entity_name} carbon monoxide concentration changes", "carbon_dioxide": "{entity_name} carbon dioxide concentration changes", "conductivity": "{entity_name} conductivity changes", @@ -151,9 +149,6 @@ "battery": { "name": "Battery" }, - "blood_glucose_concentration": { - "name": "Blood glucose concentration" - }, "carbon_monoxide": { "name": "Carbon monoxide" }, diff --git a/homeassistant/components/seven_segments/manifest.json b/homeassistant/components/seven_segments/manifest.json index af00a1fdfed..2f39644d6d3 100644 --- a/homeassistant/components/seven_segments/manifest.json +++ b/homeassistant/components/seven_segments/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@fabaff"], "documentation": "https://www.home-assistant.io/integrations/seven_segments", "iot_class": "local_polling", - "requirements": ["Pillow==11.0.0"] + "requirements": ["Pillow==10.4.0"] } diff --git a/homeassistant/components/seventeentrack/services.py b/homeassistant/components/seventeentrack/services.py index 54c23e6d619..0833bc0a97b 100644 --- a/homeassistant/components/seventeentrack/services.py +++ b/homeassistant/components/seventeentrack/services.py @@ -1,8 +1,8 @@ """Services for the seventeentrack integration.""" -from typing import Any, Final +from typing import Final -from pyseventeentrack.package import PACKAGE_STATUS_MAP, Package +from pyseventeentrack.package import PACKAGE_STATUS_MAP import voluptuous as vol from homeassistant.config_entries import ConfigEntry, ConfigEntryState @@ -81,7 +81,18 @@ def setup_services(hass: HomeAssistant) -> None: return { "packages": [ - package_to_dict(package) + { + ATTR_DESTINATION_COUNTRY: package.destination_country, + ATTR_ORIGIN_COUNTRY: package.origin_country, + ATTR_PACKAGE_TYPE: package.package_type, + ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, + ATTR_TRACKING_NUMBER: package.tracking_number, + ATTR_LOCATION: package.location, + ATTR_STATUS: package.status, + ATTR_TIMESTAMP: package.timestamp.isoformat(), + ATTR_INFO_TEXT: package.info_text, + ATTR_FRIENDLY_NAME: package.friendly_name, + } for package in live_packages if slugify(package.status) in package_states or package_states == [] ] @@ -99,22 +110,6 @@ def setup_services(hass: HomeAssistant) -> None: await seventeen_coordinator.client.profile.archive_package(tracking_number) - def package_to_dict(package: Package) -> dict[str, Any]: - result = { - ATTR_DESTINATION_COUNTRY: package.destination_country, - ATTR_ORIGIN_COUNTRY: package.origin_country, - ATTR_PACKAGE_TYPE: package.package_type, - ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, - ATTR_TRACKING_NUMBER: package.tracking_number, - ATTR_LOCATION: package.location, - ATTR_STATUS: package.status, - ATTR_INFO_TEXT: package.info_text, - ATTR_FRIENDLY_NAME: package.friendly_name, - } - if timestamp := package.timestamp: - result[ATTR_TIMESTAMP] = timestamp.isoformat() - return result - async def _validate_service(config_entry_id): entry: ConfigEntry | None = hass.config_entries.async_get_entry(config_entry_id) if not entry: diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index a66fbb20f48..6332e139244 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -603,7 +603,7 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): async def _async_update_data(self) -> None: """Fetch data.""" - if self.update_sleep_period() or self.hass.is_stopping: + if self.update_sleep_period(): return if self.sleep_period: diff --git a/homeassistant/components/shelly/update.py b/homeassistant/components/shelly/update.py index f22547acf50..fb586ae8b85 100644 --- a/homeassistant/components/shelly/update.py +++ b/homeassistant/components/shelly/update.py @@ -238,8 +238,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): ) -> None: """Initialize update entity.""" super().__init__(coordinator, key, attribute, description) - self._ota_in_progress = False - self._ota_progress_percentage: int | None = None + self._ota_in_progress: bool | int = False self._attr_release_url = get_release_url( coordinator.device.gen, coordinator.model, description.beta ) @@ -257,12 +256,11 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): if self.in_progress is not False: event_type = event["event"] if event_type == OTA_BEGIN: - self._ota_progress_percentage = 0 + self._ota_in_progress = 0 elif event_type == OTA_PROGRESS: - self._ota_progress_percentage = event["progress_percent"] + self._ota_in_progress = event["progress_percent"] elif event_type in (OTA_ERROR, OTA_SUCCESS): self._ota_in_progress = False - self._ota_progress_percentage = None self.async_write_ha_state() @property @@ -280,15 +278,10 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): return self.installed_version @property - def in_progress(self) -> bool: + def in_progress(self) -> bool | int: """Update installation in progress.""" return self._ota_in_progress - @property - def update_percentage(self) -> int | None: - """Update installation progress.""" - return self._ota_progress_percentage - async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: @@ -317,7 +310,6 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): await self.coordinator.async_shutdown_device_and_start_reauth() else: self._ota_in_progress = True - self._ota_progress_percentage = None LOGGER.debug("OTA update call for %s successful", self.coordinator.name) diff --git a/homeassistant/components/sia/config_flow.py b/homeassistant/components/sia/config_flow.py index a23978145e7..c421151f7bb 100644 --- a/homeassistant/components/sia/config_flow.py +++ b/homeassistant/components/sia/config_flow.py @@ -103,7 +103,7 @@ class SIAConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SIAOptionsFlowHandler: """Get the options flow for this handler.""" - return SIAOptionsFlowHandler(config_entry) + return SIAOptionsFlowHandler() def __init__(self) -> None: """Initialize the config flow.""" @@ -179,9 +179,8 @@ class SIAConfigFlow(ConfigFlow, domain=DOMAIN): class SIAOptionsFlowHandler(OptionsFlow): """Handle SIA options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize SIA options flow.""" - self.options = deepcopy(dict(config_entry.options)) self.hub: SIAHub | None = None self.accounts_todo: list = [] diff --git a/homeassistant/components/sighthound/manifest.json b/homeassistant/components/sighthound/manifest.json index 7d08367cf7d..875c98acb6d 100644 --- a/homeassistant/components/sighthound/manifest.json +++ b/homeassistant/components/sighthound/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/sighthound", "iot_class": "cloud_polling", "loggers": ["simplehound"], - "requirements": ["Pillow==11.0.0", "simplehound==0.3"] + "requirements": ["Pillow==10.4.0", "simplehound==0.3"] } diff --git a/homeassistant/components/sky_remote/__init__.py b/homeassistant/components/sky_remote/__init__.py deleted file mode 100644 index 4daad78c558..00000000000 --- a/homeassistant/components/sky_remote/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -"""The Sky Remote Control integration.""" - -import logging - -from skyboxremote import RemoteControl, SkyBoxConnectionError - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PORT, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady - -PLATFORMS = [Platform.REMOTE] - -_LOGGER = logging.getLogger(__name__) - - -type SkyRemoteConfigEntry = ConfigEntry[RemoteControl] - - -async def async_setup_entry(hass: HomeAssistant, entry: SkyRemoteConfigEntry) -> bool: - """Set up Sky remote.""" - host = entry.data[CONF_HOST] - port = entry.data[CONF_PORT] - - _LOGGER.debug("Setting up Host: %s, Port: %s", host, port) - remote = RemoteControl(host, port) - try: - await remote.check_connectable() - except SkyBoxConnectionError as e: - raise ConfigEntryNotReady from e - - entry.runtime_data = remote - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload a config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/sky_remote/config_flow.py b/homeassistant/components/sky_remote/config_flow.py deleted file mode 100644 index a55dfb2a52b..00000000000 --- a/homeassistant/components/sky_remote/config_flow.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Config flow for sky_remote.""" - -import logging -from typing import Any - -from skyboxremote import RemoteControl, SkyBoxConnectionError -import voluptuous as vol - -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_PORT -import homeassistant.helpers.config_validation as cv - -from .const import DEFAULT_PORT, DOMAIN, LEGACY_PORT - -DATA_SCHEMA = vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - } -) - - -async def async_find_box_port(host: str) -> int: - """Find port box uses for communication.""" - logging.debug("Attempting to find port to connect to %s on", host) - remote = RemoteControl(host, DEFAULT_PORT) - try: - await remote.check_connectable() - except SkyBoxConnectionError: - # Try legacy port if the default one failed - remote = RemoteControl(host, LEGACY_PORT) - await remote.check_connectable() - return LEGACY_PORT - return DEFAULT_PORT - - -class SkyRemoteConfigFlow(ConfigFlow, domain=DOMAIN): - """Handle a config flow for Sky Remote.""" - - VERSION = 1 - MINOR_VERSION = 1 - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the user step.""" - - errors: dict[str, str] = {} - if user_input is not None: - logging.debug("user_input: %s", user_input) - self._async_abort_entries_match(user_input) - try: - port = await async_find_box_port(user_input[CONF_HOST]) - except SkyBoxConnectionError: - logging.exception("while finding port of skybox") - errors["base"] = "cannot_connect" - else: - return self.async_create_entry( - title=user_input[CONF_HOST], - data={**user_input, CONF_PORT: port}, - ) - - return self.async_show_form( - step_id="user", data_schema=DATA_SCHEMA, errors=errors - ) diff --git a/homeassistant/components/sky_remote/const.py b/homeassistant/components/sky_remote/const.py deleted file mode 100644 index e67744a741b..00000000000 --- a/homeassistant/components/sky_remote/const.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Constants.""" - -DOMAIN = "sky_remote" - -DEFAULT_PORT = 49160 -LEGACY_PORT = 5900 diff --git a/homeassistant/components/sky_remote/manifest.json b/homeassistant/components/sky_remote/manifest.json deleted file mode 100644 index b00ff309b10..00000000000 --- a/homeassistant/components/sky_remote/manifest.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "domain": "sky_remote", - "name": "Sky Remote Control", - "codeowners": ["@dunnmj", "@saty9"], - "config_flow": true, - "documentation": "https://www.home-assistant.io/integrations/sky_remote", - "integration_type": "device", - "iot_class": "assumed_state", - "requirements": ["skyboxremote==0.0.6"] -} diff --git a/homeassistant/components/sky_remote/remote.py b/homeassistant/components/sky_remote/remote.py deleted file mode 100644 index 05a464f73a6..00000000000 --- a/homeassistant/components/sky_remote/remote.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Home Assistant integration to control a sky box using the remote platform.""" - -from collections.abc import Iterable -import logging -from typing import Any - -from skyboxremote import VALID_KEYS, RemoteControl - -from homeassistant.components.remote import RemoteEntity -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import SkyRemoteConfigEntry -from .const import DOMAIN - -_LOGGER = logging.getLogger(__name__) - - -async def async_setup_entry( - hass: HomeAssistant, - config: SkyRemoteConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the Sky remote platform.""" - async_add_entities( - [SkyRemote(config.runtime_data, config.entry_id)], - True, - ) - - -class SkyRemote(RemoteEntity): - """Representation of a Sky Remote.""" - - _attr_has_entity_name = True - _attr_name = None - - def __init__(self, remote: RemoteControl, unique_id: str) -> None: - """Initialize the Sky Remote.""" - self._remote = remote - self._attr_unique_id = unique_id - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, unique_id)}, - manufacturer="SKY", - model="Sky Box", - name=remote.host, - ) - - def turn_on(self, activity: str | None = None, **kwargs: Any) -> None: - """Send the power on command.""" - self.send_command(["sky"]) - - def turn_off(self, activity: str | None = None, **kwargs: Any) -> None: - """Send the power command.""" - self.send_command(["power"]) - - def send_command(self, command: Iterable[str], **kwargs: Any) -> None: - """Send a list of commands to the device.""" - for cmd in command: - if cmd not in VALID_KEYS: - raise ServiceValidationError( - f"{cmd} is not in Valid Keys: {VALID_KEYS}" - ) - try: - self._remote.send_keys(command) - except ValueError as err: - _LOGGER.error("Invalid command: %s. Error: %s", command, err) - return - _LOGGER.debug("Successfully sent command %s", command) diff --git a/homeassistant/components/sky_remote/strings.json b/homeassistant/components/sky_remote/strings.json deleted file mode 100644 index af794490c43..00000000000 --- a/homeassistant/components/sky_remote/strings.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "config": { - "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" - }, - "step": { - "user": { - "title": "Add Sky Remote", - "data": { - "host": "[%key:common::config_flow::data::host%]" - }, - "data_description": { - "host": "Hostname or IP address of your Sky device" - } - } - } - } -} diff --git a/homeassistant/components/smarty/__init__.py b/homeassistant/components/smarty/__init__.py index 0d043804c3d..0e5ca216621 100644 --- a/homeassistant/components/smarty/__init__.py +++ b/homeassistant/components/smarty/__init__.py @@ -30,13 +30,7 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) -PLATFORMS = [ - Platform.BINARY_SENSOR, - Platform.BUTTON, - Platform.FAN, - Platform.SENSOR, - Platform.SWITCH, -] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.FAN, Platform.SENSOR, Platform.SWITCH] async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool: diff --git a/homeassistant/components/smarty/button.py b/homeassistant/components/smarty/button.py deleted file mode 100644 index b8e31cf6fc8..00000000000 --- a/homeassistant/components/smarty/button.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Platform to control a Salda Smarty XP/XV ventilation unit.""" - -from __future__ import annotations - -from collections.abc import Callable -from dataclasses import dataclass -import logging -from typing import Any - -from pysmarty2 import Smarty - -from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .coordinator import SmartyConfigEntry, SmartyCoordinator -from .entity import SmartyEntity - -_LOGGER = logging.getLogger(__name__) - - -@dataclass(frozen=True, kw_only=True) -class SmartyButtonDescription(ButtonEntityDescription): - """Class describing Smarty button.""" - - press_fn: Callable[[Smarty], bool | None] - - -ENTITIES: tuple[SmartyButtonDescription, ...] = ( - SmartyButtonDescription( - key="reset_filters_timer", - translation_key="reset_filters_timer", - press_fn=lambda smarty: smarty.reset_filters_timer(), - ), -) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: SmartyConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the Smarty Button Platform.""" - - coordinator = entry.runtime_data - - async_add_entities( - SmartyButton(coordinator, description) for description in ENTITIES - ) - - -class SmartyButton(SmartyEntity, ButtonEntity): - """Representation of a Smarty Button.""" - - entity_description: SmartyButtonDescription - - def __init__( - self, - coordinator: SmartyCoordinator, - entity_description: SmartyButtonDescription, - ) -> None: - """Initialize the entity.""" - super().__init__(coordinator) - self.entity_description = entity_description - self._attr_unique_id = ( - f"{coordinator.config_entry.entry_id}_{entity_description.key}" - ) - - async def async_press(self, **kwargs: Any) -> None: - """Press the button.""" - await self.hass.async_add_executor_job( - self.entity_description.press_fn, self.coordinator.client - ) - await self.coordinator.async_refresh() diff --git a/homeassistant/components/smarty/strings.json b/homeassistant/components/smarty/strings.json index 341a300a26e..5553a1c0135 100644 --- a/homeassistant/components/smarty/strings.json +++ b/homeassistant/components/smarty/strings.json @@ -28,10 +28,6 @@ "deprecated_yaml_import_issue_auth_error": { "title": "YAML import failed due to an authentication error", "description": "Configuring {integration_title} using YAML is being removed but there was an authentication error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually." - }, - "deprecated_yaml_import_issue_cannot_connect": { - "title": "YAML import failed due to a connection error", - "description": "Configuring {integration_title} using YAML is being removed but there was a connect error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually." } }, "entity": { @@ -46,11 +42,6 @@ "name": "Boost state" } }, - "button": { - "reset_filters_timer": { - "name": "Reset filters timer" - } - }, "sensor": { "supply_air_temperature": { "name": "Supply air temperature" diff --git a/homeassistant/components/somfy_mylink/config_flow.py b/homeassistant/components/somfy_mylink/config_flow.py index c2d85160175..f92c4909dd5 100644 --- a/homeassistant/components/somfy_mylink/config_flow.py +++ b/homeassistant/components/somfy_mylink/config_flow.py @@ -2,7 +2,6 @@ from __future__ import annotations -from copy import deepcopy import logging from typing import Any @@ -122,15 +121,14 @@ class SomfyConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for somfy_mylink.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.options = deepcopy(dict(config_entry.options)) self._target_id: str | None = None @callback diff --git a/homeassistant/components/sonarr/config_flow.py b/homeassistant/components/sonarr/config_flow.py index e1cedba10e7..c868c04f7d0 100644 --- a/homeassistant/components/sonarr/config_flow.py +++ b/homeassistant/components/sonarr/config_flow.py @@ -93,13 +93,6 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): errors = {} if user_input is not None: - # aiopyarr defaults to the service port if one isn't given - # this is counter to standard practice where http = 80 - # and https = 443. - if CONF_URL in user_input: - url = yarl.URL(user_input[CONF_URL]) - user_input[CONF_URL] = f"{url.scheme}://{url.host}:{url.port}{url.path}" - if self.source == SOURCE_REAUTH: user_input = {**self._get_reauth_entry().data, **user_input} diff --git a/homeassistant/components/sonos/manifest.json b/homeassistant/components/sonos/manifest.json index 76a7d0bfa91..d6c5eb298d8 100644 --- a/homeassistant/components/sonos/manifest.json +++ b/homeassistant/components/sonos/manifest.json @@ -8,7 +8,7 @@ "documentation": "https://www.home-assistant.io/integrations/sonos", "iot_class": "local_push", "loggers": ["soco"], - "requirements": ["soco==0.30.6", "sonos-websocket==0.1.3"], + "requirements": ["soco==0.30.4", "sonos-websocket==0.1.3"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:ZonePlayer:1" diff --git a/homeassistant/components/spotify/manifest.json b/homeassistant/components/spotify/manifest.json index 8f8f7e0d588..8cf8d735553 100644 --- a/homeassistant/components/spotify/manifest.json +++ b/homeassistant/components/spotify/manifest.json @@ -9,6 +9,6 @@ "iot_class": "cloud_polling", "loggers": ["spotipy"], "quality_scale": "silver", - "requirements": ["spotifyaio==0.8.8"], + "requirements": ["spotifyaio==0.8.5"], "zeroconf": ["_spotify-connect._tcp.local."] } diff --git a/homeassistant/components/srp_energy/strings.json b/homeassistant/components/srp_energy/strings.json index eca4f465435..191d10a70dd 100644 --- a/homeassistant/components/srp_energy/strings.json +++ b/homeassistant/components/srp_energy/strings.json @@ -17,8 +17,7 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", - "unknown": "Unexpected error" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" } }, "entity": { diff --git a/homeassistant/components/statistics/strings.json b/homeassistant/components/statistics/strings.json index 3e6fec9d986..a060c88da24 100644 --- a/homeassistant/components/statistics/strings.json +++ b/homeassistant/components/statistics/strings.json @@ -23,10 +23,10 @@ "state_characteristic": { "description": "Read the documention for further details on available options and how to use them.", "data": { - "state_characteristic": "Statistic characteristic" + "state_characteristic": "State_characteristic" }, "data_description": { - "state_characteristic": "The statistic characteristic that should be used as the state of the sensor." + "state_characteristic": "The characteristic that should be used as the state of the statistics sensor." } }, "options": { diff --git a/homeassistant/components/steam_online/config_flow.py b/homeassistant/components/steam_online/config_flow.py index 69009fca8c4..605f27edb19 100644 --- a/homeassistant/components/steam_online/config_flow.py +++ b/homeassistant/components/steam_online/config_flow.py @@ -42,7 +42,7 @@ class SteamFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: SteamConfigEntry, ) -> SteamOptionsFlowHandler: """Get the options flow for this handler.""" - return SteamOptionsFlowHandler(config_entry) + return SteamOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -121,10 +121,6 @@ def _batch_ids(ids: list[str]) -> Iterator[list[str]]: class SteamOptionsFlowHandler(OptionsFlow): """Handle Steam client options.""" - def __init__(self, entry: SteamConfigEntry) -> None: - """Initialize options flow.""" - self.options = dict(entry.options) - async def async_step_init( self, user_input: dict[str, dict[str, str]] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/stream/manifest.json b/homeassistant/components/stream/manifest.json index fdf81d99e65..23494a06744 100644 --- a/homeassistant/components/stream/manifest.json +++ b/homeassistant/components/stream/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.5", "av==13.1.0", "numpy==2.1.3"] + "requirements": ["PyTurboJPEG==1.7.5", "av==13.1.0", "numpy==1.26.4"] } diff --git a/homeassistant/components/subaru/strings.json b/homeassistant/components/subaru/strings.json index 00da729dccd..78625192e4a 100644 --- a/homeassistant/components/subaru/strings.json +++ b/homeassistant/components/subaru/strings.json @@ -37,13 +37,13 @@ "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "incorrect_pin": "Incorrect PIN", "bad_pin_format": "PIN should be 4 digits", + "two_factor_request_failed": "Request for 2FA code failed, please try again", "bad_validation_code_format": "Validation code should be 6 digits", "incorrect_validation_code": "Incorrect validation code" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "two_factor_request_failed": "Request for 2FA code failed, please try again" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "options": { diff --git a/homeassistant/components/suez_water/config_flow.py b/homeassistant/components/suez_water/config_flow.py index ac09cf4a1d3..28b211dc808 100644 --- a/homeassistant/components/suez_water/config_flow.py +++ b/homeassistant/components/suez_water/config_flow.py @@ -5,7 +5,8 @@ from __future__ import annotations import logging from typing import Any -from pysuez import PySuezError, SuezClient +from pysuez import SuezClient +from pysuez.client import PySuezError import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -20,34 +21,28 @@ STEP_USER_DATA_SCHEMA = vol.Schema( { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, - vol.Optional(CONF_COUNTER_ID): str, + vol.Required(CONF_COUNTER_ID): str, } ) -async def validate_input(data: dict[str, Any]) -> None: +def validate_input(data: dict[str, Any]) -> None: """Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ try: - counter_id = data.get(CONF_COUNTER_ID) client = SuezClient( data[CONF_USERNAME], data[CONF_PASSWORD], - counter_id, + data[CONF_COUNTER_ID], + provider=None, ) - if not await client.check_credentials(): + if not client.check_credentials(): raise InvalidAuth except PySuezError as ex: raise CannotConnect from ex - if counter_id is None: - try: - data[CONF_COUNTER_ID] = await client.find_counter() - except PySuezError as ex: - raise CounterNotFound from ex - class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Suez Water.""" @@ -63,13 +58,11 @@ class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(user_input[CONF_USERNAME]) self._abort_if_unique_id_configured() try: - await validate_input(user_input) + await self.hass.async_add_executor_job(validate_input, user_input) except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" - except CounterNotFound: - errors["base"] = "counter_not_found" except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" @@ -89,7 +82,3 @@ class CannotConnect(HomeAssistantError): class InvalidAuth(HomeAssistantError): """Error to indicate there is invalid auth.""" - - -class CounterNotFound(HomeAssistantError): - """Error to indicate we cannot automatically found the counter id.""" diff --git a/homeassistant/components/suez_water/coordinator.py b/homeassistant/components/suez_water/coordinator.py index 224929c606e..adcbd39c01b 100644 --- a/homeassistant/components/suez_water/coordinator.py +++ b/homeassistant/components/suez_water/coordinator.py @@ -1,46 +1,39 @@ """Suez water update coordinator.""" -from collections.abc import Mapping +import asyncio from dataclasses import dataclass from datetime import date -from typing import Any -from pysuez import PySuezError, SuezClient +from pysuez import SuezClient +from pysuez.client import PySuezError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import _LOGGER, HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import CONF_COUNTER_ID, DATA_REFRESH_INTERVAL, DOMAIN @dataclass -class SuezWaterAggregatedAttributes: - """Class containing aggregated sensor extra attributes.""" +class AggregatedSensorData: + """Hold suez water aggregated sensor data.""" - this_month_consumption: dict[date, float] - previous_month_consumption: dict[date, float] - last_year_overall: dict[str, float] - this_year_overall: dict[str, float] + value: float + current_month: dict[date, float] + previous_month: dict[date, float] + previous_year: dict[str, float] + current_year: dict[str, float] history: dict[date, float] highest_monthly_consumption: float + attribution: str -@dataclass -class SuezWaterData: - """Class used to hold all fetch data from suez api.""" - - aggregated_value: float - aggregated_attr: Mapping[str, Any] - price: float - - -class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]): +class SuezWaterCoordinator(DataUpdateCoordinator[AggregatedSensorData]): """Suez water coordinator.""" - _suez_client: SuezClient + _sync_client: SuezClient config_entry: ConfigEntry def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: @@ -55,34 +48,61 @@ class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]): ) async def _async_setup(self) -> None: - self._suez_client = SuezClient( - username=self.config_entry.data[CONF_USERNAME], - password=self.config_entry.data[CONF_PASSWORD], - counter_id=self.config_entry.data[CONF_COUNTER_ID], - ) - if not await self._suez_client.check_credentials(): - raise ConfigEntryError("Invalid credentials for suez water") + self._sync_client = await self.hass.async_add_executor_job(self._get_client) - async def _async_update_data(self) -> SuezWaterData: + async def _async_update_data(self) -> AggregatedSensorData: """Fetch data from API endpoint.""" + async with asyncio.timeout(30): + return await self.hass.async_add_executor_job(self._fetch_data) + + def _fetch_data(self) -> AggregatedSensorData: + """Fetch latest data from Suez.""" try: - aggregated = await self._suez_client.fetch_aggregated_data() - data = SuezWaterData( - aggregated_value=aggregated.value, - aggregated_attr={ - "this_month_consumption": aggregated.current_month, - "previous_month_consumption": aggregated.previous_month, - "highest_monthly_consumption": aggregated.highest_monthly_consumption, - "last_year_overall": aggregated.previous_year, - "this_year_overall": aggregated.current_year, - "history": aggregated.history, - }, - price=(await self._suez_client.get_price()).price, - ) + self._sync_client.update() except PySuezError as err: - _LOGGER.exception(err) raise UpdateFailed( f"Suez coordinator error communicating with API: {err}" ) from err - _LOGGER.debug("Successfully fetched suez data") - return data + current_month = {} + for item in self._sync_client.attributes["thisMonthConsumption"]: + current_month[item] = self._sync_client.attributes["thisMonthConsumption"][ + item + ] + previous_month = {} + for item in self._sync_client.attributes["previousMonthConsumption"]: + previous_month[item] = self._sync_client.attributes[ + "previousMonthConsumption" + ][item] + highest_monthly_consumption = self._sync_client.attributes[ + "highestMonthlyConsumption" + ] + previous_year = self._sync_client.attributes["lastYearOverAll"] + current_year = self._sync_client.attributes["thisYearOverAll"] + history = {} + for item in self._sync_client.attributes["history"]: + history[item] = self._sync_client.attributes["history"][item] + _LOGGER.debug("Retrieved consumption: " + str(self._sync_client.state)) + return AggregatedSensorData( + self._sync_client.state, + current_month, + previous_month, + previous_year, + current_year, + history, + highest_monthly_consumption, + self._sync_client.attributes["attribution"], + ) + + def _get_client(self) -> SuezClient: + try: + client = SuezClient( + username=self.config_entry.data[CONF_USERNAME], + password=self.config_entry.data[CONF_PASSWORD], + counter_id=self.config_entry.data[CONF_COUNTER_ID], + provider=None, + ) + if not client.check_credentials(): + raise ConfigEntryError + except PySuezError as ex: + raise ConfigEntryNotReady from ex + return client diff --git a/homeassistant/components/suez_water/manifest.json b/homeassistant/components/suez_water/manifest.json index 5eb05b9acb7..fa7f8f6461d 100644 --- a/homeassistant/components/suez_water/manifest.json +++ b/homeassistant/components/suez_water/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/suez_water", "iot_class": "cloud_polling", "loggers": ["pysuez", "regex"], - "requirements": ["pysuezV2==1.3.1"] + "requirements": ["pysuezV2==0.2.2"] } diff --git a/homeassistant/components/suez_water/sensor.py b/homeassistant/components/suez_water/sensor.py index 2ba699a9af1..22a61c835e1 100644 --- a/homeassistant/components/suez_water/sensor.py +++ b/homeassistant/components/suez_water/sensor.py @@ -2,53 +2,19 @@ from __future__ import annotations -from collections.abc import Callable, Mapping -from dataclasses import dataclass +from collections.abc import Mapping from typing import Any -from pysuez.const import ATTRIBUTION - -from homeassistant.components.sensor import ( - SensorDeviceClass, - SensorEntity, - SensorEntityDescription, -) +from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CURRENCY_EURO, UnitOfVolume +from homeassistant.const import UnitOfVolume from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import CONF_COUNTER_ID, DOMAIN -from .coordinator import SuezWaterCoordinator, SuezWaterData - - -@dataclass(frozen=True, kw_only=True) -class SuezWaterSensorEntityDescription(SensorEntityDescription): - """Describes Suez water sensor entity.""" - - value_fn: Callable[[SuezWaterData], float | str | None] - attr_fn: Callable[[SuezWaterData], Mapping[str, Any] | None] = lambda _: None - - -SENSORS: tuple[SuezWaterSensorEntityDescription, ...] = ( - SuezWaterSensorEntityDescription( - key="water_usage_yesterday", - translation_key="water_usage_yesterday", - native_unit_of_measurement=UnitOfVolume.LITERS, - device_class=SensorDeviceClass.WATER, - value_fn=lambda suez_data: suez_data.aggregated_value, - attr_fn=lambda suez_data: suez_data.aggregated_attr, - ), - SuezWaterSensorEntityDescription( - key="water_price", - translation_key="water_price", - native_unit_of_measurement=CURRENCY_EURO, - device_class=SensorDeviceClass.MONETARY, - value_fn=lambda suez_data: suez_data.price, - ), -) +from .coordinator import SuezWaterCoordinator async def async_setup_entry( @@ -58,42 +24,46 @@ async def async_setup_entry( ) -> None: """Set up Suez Water sensor from a config entry.""" coordinator = hass.data[DOMAIN][entry.entry_id] - counter_id = entry.data[CONF_COUNTER_ID] - - async_add_entities( - SuezWaterSensor(coordinator, counter_id, description) for description in SENSORS - ) + async_add_entities([SuezAggregatedSensor(coordinator, entry.data[CONF_COUNTER_ID])]) -class SuezWaterSensor(CoordinatorEntity[SuezWaterCoordinator], SensorEntity): - """Representation of a Suez water sensor.""" +class SuezAggregatedSensor(CoordinatorEntity[SuezWaterCoordinator], SensorEntity): + """Representation of a Sensor.""" _attr_has_entity_name = True - _attr_attribution = ATTRIBUTION - entity_description: SuezWaterSensorEntityDescription + _attr_translation_key = "water_usage_yesterday" + _attr_native_unit_of_measurement = UnitOfVolume.LITERS + _attr_device_class = SensorDeviceClass.WATER - def __init__( - self, - coordinator: SuezWaterCoordinator, - counter_id: int, - entity_description: SuezWaterSensorEntityDescription, - ) -> None: - """Initialize the suez water sensor entity.""" + def __init__(self, coordinator: SuezWaterCoordinator, counter_id: int) -> None: + """Initialize the data object.""" super().__init__(coordinator) - self._attr_unique_id = f"{counter_id}_{entity_description.key}" + self._attr_extra_state_attributes = {} + self._attr_unique_id = f"{counter_id}_water_usage_yesterday" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, str(counter_id))}, entry_type=DeviceEntryType.SERVICE, manufacturer="Suez", ) - self.entity_description = entity_description @property - def native_value(self) -> float | str | None: - """Return the state of the sensor.""" - return self.entity_description.value_fn(self.coordinator.data) + def native_value(self) -> float: + """Return the current daily usage.""" + return self.coordinator.data.value @property - def extra_state_attributes(self) -> Mapping[str, Any] | None: - """Return extra state of the sensor.""" - return self.entity_description.attr_fn(self.coordinator.data) + def attribution(self) -> str: + """Return data attribution message.""" + return self.coordinator.data.attribution + + @property + def extra_state_attributes(self) -> Mapping[str, Any]: + """Return aggregated data.""" + return { + "this_month_consumption": self.coordinator.data.current_month, + "previous_month_consumption": self.coordinator.data.previous_month, + "highest_monthly_consumption": self.coordinator.data.highest_monthly_consumption, + "last_year_overall": self.coordinator.data.previous_year, + "this_year_overall": self.coordinator.data.current_year, + "history": self.coordinator.data.history, + } diff --git a/homeassistant/components/suez_water/strings.json b/homeassistant/components/suez_water/strings.json index 6be2affab97..f9abd70fc19 100644 --- a/homeassistant/components/suez_water/strings.json +++ b/homeassistant/components/suez_water/strings.json @@ -12,8 +12,7 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]", - "counter_not_found": "Could not find counter id automatically" + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" @@ -23,9 +22,6 @@ "sensor": { "water_usage_yesterday": { "name": "Water usage yesterday" - }, - "water_price": { - "name": "Water price" } } } diff --git a/homeassistant/components/switchbot_cloud/__init__.py b/homeassistant/components/switchbot_cloud/__init__.py index 625b4698301..a2738ed446f 100644 --- a/homeassistant/components/switchbot_cloud/__init__.py +++ b/homeassistant/components/switchbot_cloud/__init__.py @@ -85,9 +85,6 @@ def make_device_data( "Meter", "MeterPlus", "WoIOSensor", - "Hub 2", - "MeterPro", - "MeterPro(CO2)", ]: devices_data.sensors.append( prepare_device(hass, api, device, coordinators_by_id) diff --git a/homeassistant/components/switchbot_cloud/sensor.py b/homeassistant/components/switchbot_cloud/sensor.py index 90135ad96b3..ac612aea119 100644 --- a/homeassistant/components/switchbot_cloud/sensor.py +++ b/homeassistant/components/switchbot_cloud/sensor.py @@ -9,11 +9,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONCENTRATION_PARTS_PER_MILLION, - PERCENTAGE, - UnitOfTemperature, -) +from homeassistant.const import PERCENTAGE, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -25,7 +21,6 @@ from .entity import SwitchBotCloudEntity SENSOR_TYPE_TEMPERATURE = "temperature" SENSOR_TYPE_HUMIDITY = "humidity" SENSOR_TYPE_BATTERY = "battery" -SENSOR_TYPE_CO2 = "CO2" METER_PLUS_SENSOR_DESCRIPTIONS = ( SensorEntityDescription( @@ -48,16 +43,6 @@ METER_PLUS_SENSOR_DESCRIPTIONS = ( ), ) -METER_PRO_CO2_SENSOR_DESCRIPTIONS = ( - *METER_PLUS_SENSOR_DESCRIPTIONS, - SensorEntityDescription( - key=SENSOR_TYPE_CO2, - native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, - state_class=SensorStateClass.MEASUREMENT, - device_class=SensorDeviceClass.CO2, - ), -) - async def async_setup_entry( hass: HomeAssistant, @@ -70,11 +55,7 @@ async def async_setup_entry( async_add_entities( SwitchBotCloudSensor(data.api, device, coordinator, description) for device, coordinator in data.devices.sensors - for description in ( - METER_PRO_CO2_SENSOR_DESCRIPTIONS - if device.device_type == "MeterPro(CO2)" - else METER_PLUS_SENSOR_DESCRIPTIONS - ) + for description in METER_PLUS_SENSOR_DESCRIPTIONS ) diff --git a/homeassistant/components/switcher_kis/cover.py b/homeassistant/components/switcher_kis/cover.py index dc3b6d96aed..c56fa7442fb 100644 --- a/homeassistant/components/switcher_kis/cover.py +++ b/homeassistant/components/switcher_kis/cover.py @@ -41,20 +41,16 @@ async def async_setup_entry( def async_add_cover(coordinator: SwitcherDataUpdateCoordinator) -> None: """Add cover from Switcher device.""" entities: list[CoverEntity] = [] - if coordinator.data.device_type.category in ( DeviceCategory.SHUTTER, DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT, - DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT, ): - number_of_covers = len(cast(SwitcherShutter, coordinator.data).position) - if number_of_covers == 1: - entities.append(SwitcherSingleCoverEntity(coordinator, 0)) - else: - entities.extend( - SwitcherMultiCoverEntity(coordinator, i) - for i in range(number_of_covers) - ) + entities.append(SwitcherSingleCoverEntity(coordinator, 0)) + if ( + coordinator.data.device_type.category + == DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT + ): + entities.extend(SwitcherDualCoverEntity(coordinator, i) for i in range(2)) async_add_entities(entities) config_entry.async_on_unload( @@ -156,8 +152,8 @@ class SwitcherSingleCoverEntity(SwitcherBaseCoverEntity): self._update_data() -class SwitcherMultiCoverEntity(SwitcherBaseCoverEntity): - """Representation of a Switcher multiple cover entity.""" +class SwitcherDualCoverEntity(SwitcherBaseCoverEntity): + """Representation of a Switcher dual cover entity.""" _attr_translation_key = "cover" diff --git a/homeassistant/components/switcher_kis/light.py b/homeassistant/components/switcher_kis/light.py index bd87176bcf0..4b6df6db6ed 100644 --- a/homeassistant/components/switcher_kis/light.py +++ b/homeassistant/components/switcher_kis/light.py @@ -35,20 +35,16 @@ async def async_setup_entry( def async_add_light(coordinator: SwitcherDataUpdateCoordinator) -> None: """Add light from Switcher device.""" entities: list[LightEntity] = [] - - if coordinator.data.device_type.category in ( - DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT, - DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT, - DeviceCategory.LIGHT, + if ( + coordinator.data.device_type.category + == DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT ): - number_of_lights = len(cast(SwitcherLight, coordinator.data).light) - if number_of_lights == 1: - entities.append(SwitcherSingleLightEntity(coordinator, 0)) - else: - entities.extend( - SwitcherMultiLightEntity(coordinator, i) - for i in range(number_of_lights) - ) + entities.extend(SwitcherDualLightEntity(coordinator, i) for i in range(2)) + if ( + coordinator.data.device_type.category + == DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT + ): + entities.append(SwitcherSingleLightEntity(coordinator, 0)) async_add_entities(entities) config_entry.async_on_unload( @@ -137,8 +133,8 @@ class SwitcherSingleLightEntity(SwitcherBaseLightEntity): self._attr_unique_id = f"{coordinator.device_id}-{coordinator.mac_address}" -class SwitcherMultiLightEntity(SwitcherBaseLightEntity): - """Representation of a Switcher multiple light entity.""" +class SwitcherDualLightEntity(SwitcherBaseLightEntity): + """Representation of a Switcher dual light entity.""" _attr_translation_key = "light" diff --git a/homeassistant/components/systemmonitor/manifest.json b/homeassistant/components/systemmonitor/manifest.json index 4c6ae0653d3..236f25bb1ed 100644 --- a/homeassistant/components/systemmonitor/manifest.json +++ b/homeassistant/components/systemmonitor/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/systemmonitor", "iot_class": "local_push", "loggers": ["psutil"], - "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.1.0"] + "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.0.0"] } diff --git a/homeassistant/components/tedee/__init__.py b/homeassistant/components/tedee/__init__.py index 528a5052678..cd593f68e3a 100644 --- a/homeassistant/components/tedee/__init__.py +++ b/homeassistant/components/tedee/__init__.py @@ -7,7 +7,7 @@ from typing import Any from aiohttp.hdrs import METH_POST from aiohttp.web import Request, Response -from aiotedee.exception import TedeeDataUpdateException, TedeeWebhookException +from pytedee_async.exception import TedeeDataUpdateException, TedeeWebhookException from homeassistant.components.http import HomeAssistantView from homeassistant.components.webhook import ( diff --git a/homeassistant/components/tedee/binary_sensor.py b/homeassistant/components/tedee/binary_sensor.py index b586db7c2a7..5eab7bfa254 100644 --- a/homeassistant/components/tedee/binary_sensor.py +++ b/homeassistant/components/tedee/binary_sensor.py @@ -3,8 +3,8 @@ from collections.abc import Callable from dataclasses import dataclass -from aiotedee import TedeeLock -from aiotedee.lock import TedeeLockState +from pytedee_async import TedeeLock +from pytedee_async.lock import TedeeLockState from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, diff --git a/homeassistant/components/tedee/config_flow.py b/homeassistant/components/tedee/config_flow.py index 422d818d1b5..65d4ec12e80 100644 --- a/homeassistant/components/tedee/config_flow.py +++ b/homeassistant/components/tedee/config_flow.py @@ -4,7 +4,7 @@ from collections.abc import Mapping import logging from typing import Any -from aiotedee import ( +from pytedee_async import ( TedeeAuthException, TedeeClient, TedeeClientException, diff --git a/homeassistant/components/tedee/coordinator.py b/homeassistant/components/tedee/coordinator.py index 445585a1a2c..de3090a3f78 100644 --- a/homeassistant/components/tedee/coordinator.py +++ b/homeassistant/components/tedee/coordinator.py @@ -8,7 +8,7 @@ import logging import time from typing import Any -from aiotedee import ( +from pytedee_async import ( TedeeClient, TedeeClientException, TedeeDataUpdateException, @@ -16,7 +16,7 @@ from aiotedee import ( TedeeLock, TedeeWebhookException, ) -from aiotedee.bridge import TedeeBridge +from pytedee_async.bridge import TedeeBridge from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST diff --git a/homeassistant/components/tedee/entity.py b/homeassistant/components/tedee/entity.py index 96cc6f2b3f5..c72e293a292 100644 --- a/homeassistant/components/tedee/entity.py +++ b/homeassistant/components/tedee/entity.py @@ -1,6 +1,6 @@ """Bases for Tedee entities.""" -from aiotedee.lock import TedeeLock +from pytedee_async.lock import TedeeLock from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo diff --git a/homeassistant/components/tedee/lock.py b/homeassistant/components/tedee/lock.py index 6e89a48f2a0..34d313f3e48 100644 --- a/homeassistant/components/tedee/lock.py +++ b/homeassistant/components/tedee/lock.py @@ -2,7 +2,7 @@ from typing import Any -from aiotedee import TedeeClientException, TedeeLock, TedeeLockState +from pytedee_async import TedeeClientException, TedeeLock, TedeeLockState from homeassistant.components.lock import LockEntity, LockEntityFeature from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/tedee/manifest.json b/homeassistant/components/tedee/manifest.json index bca51f08f93..4f071267a25 100644 --- a/homeassistant/components/tedee/manifest.json +++ b/homeassistant/components/tedee/manifest.json @@ -6,7 +6,7 @@ "dependencies": ["http", "webhook"], "documentation": "https://www.home-assistant.io/integrations/tedee", "iot_class": "local_push", - "loggers": ["aiotedee"], + "loggers": ["pytedee_async"], "quality_scale": "platinum", - "requirements": ["aiotedee==0.2.20"] + "requirements": ["pytedee-async==0.2.20"] } diff --git a/homeassistant/components/tedee/sensor.py b/homeassistant/components/tedee/sensor.py index 90f76317fff..33894a5eb52 100644 --- a/homeassistant/components/tedee/sensor.py +++ b/homeassistant/components/tedee/sensor.py @@ -3,7 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass -from aiotedee import TedeeLock +from pytedee_async import TedeeLock from homeassistant.components.sensor import ( SensorDeviceClass, diff --git a/homeassistant/components/tedee/strings.json b/homeassistant/components/tedee/strings.json index b6966fa2933..2dc0e23968c 100644 --- a/homeassistant/components/tedee/strings.json +++ b/homeassistant/components/tedee/strings.json @@ -38,8 +38,7 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", - "unique_id_mismatch": "You selected a different bridge than the one this config entry was configured with, this is not allowed." + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]", diff --git a/homeassistant/components/template/manifest.json b/homeassistant/components/template/manifest.json index f1225f74f06..57188aebaa3 100644 --- a/homeassistant/components/template/manifest.json +++ b/homeassistant/components/template/manifest.json @@ -2,7 +2,7 @@ "domain": "template", "name": "Template", "after_dependencies": ["group"], - "codeowners": ["@PhracturedBlue", "@home-assistant/core"], + "codeowners": ["@PhracturedBlue", "@tetienne", "@home-assistant/core"], "config_flow": true, "dependencies": ["blueprint"], "documentation": "https://www.home-assistant.io/integrations/template", diff --git a/homeassistant/components/template/trigger_entity.py b/homeassistant/components/template/trigger_entity.py index 5130f332d5b..df84ce057c3 100644 --- a/homeassistant/components/template/trigger_entity.py +++ b/homeassistant/components/template/trigger_entity.py @@ -3,7 +3,6 @@ from __future__ import annotations from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.template import TemplateStateFromEntityId from homeassistant.helpers.trigger_template_entity import TriggerBaseEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -42,11 +41,11 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module def _process_data(self) -> None: """Process new data.""" + this = None + if state := self.hass.states.get(self.entity_id): + this = state.as_dict() run_variables = self.coordinator.data["run_variables"] - variables = { - "this": TemplateStateFromEntityId(self.hass, self.entity_id), - **(run_variables or {}), - } + variables = {"this": this, **(run_variables or {})} self._render_templates(variables) diff --git a/homeassistant/components/tensorflow/manifest.json b/homeassistant/components/tensorflow/manifest.json index 86fd83ad088..4f2b6f19285 100644 --- a/homeassistant/components/tensorflow/manifest.json +++ b/homeassistant/components/tensorflow/manifest.json @@ -9,7 +9,7 @@ "tensorflow==2.5.0", "tf-models-official==2.5.0", "pycocotools==2.0.6", - "numpy==2.1.3", - "Pillow==11.0.0" + "numpy==1.26.4", + "Pillow==10.4.0" ] } diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index e7030b568b3..70db4a183aa 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -134,7 +134,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - signing = product["command_signing"] == "required" if signing: if not tesla.private_key: - await tesla.get_private_key(hass.config.path("tesla_fleet.key")) + await tesla.get_private_key("config/tesla_fleet.key") api = VehicleSigned(tesla.vehicle, vin) else: api = VehicleSpecific(tesla.vehicle, vin) diff --git a/homeassistant/components/tesla_fleet/cover.py b/homeassistant/components/tesla_fleet/cover.py index f270734424f..2a14c4f039b 100644 --- a/homeassistant/components/tesla_fleet/cover.py +++ b/homeassistant/components/tesla_fleet/cover.py @@ -177,7 +177,13 @@ class TeslaFleetRearTrunkEntity(TeslaFleetVehicleEntity, CoverEntity): def _async_update_attrs(self) -> None: """Update the entity attributes.""" - self._attr_is_closed = self._value == CLOSED + value = self._value + if value == CLOSED: + self._attr_is_closed = True + elif value == OPEN: + self._attr_is_closed = False + else: + self._attr_is_closed = None async def async_open_cover(self, **kwargs: Any) -> None: """Open rear trunk.""" diff --git a/homeassistant/components/tesla_fleet/oauth.py b/homeassistant/components/tesla_fleet/oauth.py index 8b43460436b..00976abf56f 100644 --- a/homeassistant/components/tesla_fleet/oauth.py +++ b/homeassistant/components/tesla_fleet/oauth.py @@ -49,7 +49,6 @@ class TeslaSystemImplementation(config_entry_oauth2_flow.LocalOAuth2Implementati def extra_authorize_data(self) -> dict[str, Any]: """Extra data that needs to be appended to the authorize url.""" return { - "prompt": "login", "scope": " ".join(SCOPES), "code_challenge": self.code_challenge, # PKCE } @@ -84,4 +83,4 @@ class TeslaUserImplementation(AuthImplementation): @property def extra_authorize_data(self) -> dict[str, Any]: """Extra data that needs to be appended to the authorize url.""" - return {"prompt": "login", "scope": " ".join(SCOPES)} + return {"scope": " ".join(SCOPES)} diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index aa1d2b42660..b884f9bbc5c 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -135,11 +135,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - elif "energy_site_id" in product and Scope.ENERGY_DEVICE_DATA in scopes: site_id = product["energy_site_id"] - powerwall = ( - product["components"]["battery"] or product["components"]["solar"] - ) - wall_connector = "wall_connectors" in product["components"] - if not powerwall and not wall_connector: + if not ( + product["components"]["battery"] + or product["components"]["solar"] + or "wall_connectors" in product["components"] + ): LOGGER.debug( "Skipping Energy Site %s as it has no components", site_id, @@ -162,11 +162,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - info_coordinator=TeslemetryEnergySiteInfoCoordinator( hass, api, product ), - history_coordinator=( - TeslemetryEnergyHistoryCoordinator(hass, api) - if powerwall - else None - ), + history_coordinator=TeslemetryEnergyHistoryCoordinator(hass, api), id=site_id, device=device, ) @@ -189,7 +185,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - *( energysite.history_coordinator.async_config_entry_first_refresh() for energysite in energysites - if energysite.history_coordinator ), ) diff --git a/homeassistant/components/teslemetry/cover.py b/homeassistant/components/teslemetry/cover.py index 8775da931d5..190f729d99f 100644 --- a/homeassistant/components/teslemetry/cover.py +++ b/homeassistant/components/teslemetry/cover.py @@ -182,7 +182,13 @@ class TeslemetryRearTrunkEntity(TeslemetryVehicleEntity, CoverEntity): def _async_update_attrs(self) -> None: """Update the entity attributes.""" - self._attr_is_closed = self._value == CLOSED + value = self._value + if value == CLOSED: + self._attr_is_closed = True + elif value == OPEN: + self._attr_is_closed = False + else: + self._attr_is_closed = None async def async_open_cover(self, **kwargs: Any) -> None: """Open rear trunk.""" diff --git a/homeassistant/components/teslemetry/entity.py b/homeassistant/components/teslemetry/entity.py index d14f3a42734..ca40d4d00ce 100644 --- a/homeassistant/components/teslemetry/entity.py +++ b/homeassistant/components/teslemetry/entity.py @@ -175,8 +175,6 @@ class TeslemetryEnergyHistoryEntity(TeslemetryEntity): ) -> None: """Initialize common aspects of a Teslemetry Energy Site Info entity.""" - assert data.history_coordinator - self.api = data.api self._attr_unique_id = f"{data.id}-{key}" self._attr_device_info = data.device diff --git a/homeassistant/components/teslemetry/models.py b/homeassistant/components/teslemetry/models.py index d3969b30a7c..7f8bd37425a 100644 --- a/homeassistant/components/teslemetry/models.py +++ b/homeassistant/components/teslemetry/models.py @@ -49,6 +49,6 @@ class TeslemetryEnergyData: api: EnergySpecific live_coordinator: TeslemetryEnergySiteLiveCoordinator info_coordinator: TeslemetryEnergySiteInfoCoordinator - history_coordinator: TeslemetryEnergyHistoryCoordinator | None + history_coordinator: TeslemetryEnergyHistoryCoordinator id: int device: DeviceInfo diff --git a/homeassistant/components/teslemetry/sensor.py b/homeassistant/components/teslemetry/sensor.py index 95876cc2cf9..ba7d930fcd0 100644 --- a/homeassistant/components/teslemetry/sensor.py +++ b/homeassistant/components/teslemetry/sensor.py @@ -482,7 +482,8 @@ async def async_setup_entry( TeslemetryEnergyHistorySensorEntity(energysite, description) for energysite in entry.runtime_data.energysites for description in ENERGY_HISTORY_DESCRIPTIONS - if energysite.history_coordinator + if energysite.info_coordinator.data.get("components_battery") + or energysite.info_coordinator.data.get("components_solar") ), ) ) diff --git a/homeassistant/components/thethingsnetwork/__init__.py b/homeassistant/components/thethingsnetwork/__init__.py index d3c6c8356cb..253ce7a052e 100644 --- a/homeassistant/components/thethingsnetwork/__init__.py +++ b/homeassistant/components/thethingsnetwork/__init__.py @@ -2,15 +2,55 @@ import logging +import voluptuous as vol + from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_HOST from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN, PLATFORMS, TTN_API_HOST +from .const import CONF_APP_ID, DOMAIN, PLATFORMS, TTN_API_HOST from .coordinator import TTNCoordinator _LOGGER = logging.getLogger(__name__) +CONFIG_SCHEMA = vol.Schema( + { + # Configuration via yaml not longer supported - keeping to warn about migration + DOMAIN: vol.Schema( + { + vol.Required(CONF_APP_ID): cv.string, + vol.Required("access_key"): cv.string, + } + ) + }, + extra=vol.ALLOW_EXTRA, +) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Initialize of The Things Network component.""" + + if DOMAIN in config: + ir.async_create_issue( + hass, + DOMAIN, + "manual_migration", + breaks_in_ha_version="2024.12.0", + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="manual_migration", + translation_placeholders={ + "domain": DOMAIN, + "v2_v3_migration_url": "https://www.thethingsnetwork.org/forum/c/v2-to-v3-upgrade/102", + "v2_deprecation_url": "https://www.thethingsnetwork.org/forum/t/the-things-network-v2-is-permanently-shutting-down-completed/50710", + }, + ) + + return True + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Establish connection with The Things Network.""" diff --git a/homeassistant/components/thethingsnetwork/strings.json b/homeassistant/components/thethingsnetwork/strings.json index f5a4fcef8fd..98572cb318c 100644 --- a/homeassistant/components/thethingsnetwork/strings.json +++ b/homeassistant/components/thethingsnetwork/strings.json @@ -22,5 +22,11 @@ "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]" } + }, + "issues": { + "manual_migration": { + "description": "Configuring {domain} using YAML was removed as part of migrating to [The Things Network v3]({v2_v3_migration_url}). [The Things Network v2 has shutted down]({v2_deprecation_url}).\n\nPlease remove the {domain} entry from the configuration.yaml and add re-add the integration using the config_flow", + "title": "The {domain} YAML configuration is not supported" + } } } diff --git a/homeassistant/components/threshold/binary_sensor.py b/homeassistant/components/threshold/binary_sensor.py index 3d52d2225be..da7d92f7051 100644 --- a/homeassistant/components/threshold/binary_sensor.py +++ b/homeassistant/components/threshold/binary_sensor.py @@ -61,29 +61,15 @@ _LOGGER = logging.getLogger(__name__) DEFAULT_NAME: Final = "Threshold" - -def no_missing_threshold(value: dict) -> dict: - """Validate data point list is greater than polynomial degrees.""" - if value.get(CONF_LOWER) is None and value.get(CONF_UPPER) is None: - raise vol.Invalid("Lower or Upper thresholds are not provided") - - return value - - -PLATFORM_SCHEMA = vol.All( - BINARY_SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_ENTITY_ID): cv.entity_id, - vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, - vol.Optional(CONF_HYSTERESIS, default=DEFAULT_HYSTERESIS): vol.Coerce( - float - ), - vol.Optional(CONF_LOWER): vol.Coerce(float), - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_UPPER): vol.Coerce(float), - } - ), - no_missing_threshold, +PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_ENTITY_ID): cv.entity_id, + vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, + vol.Optional(CONF_HYSTERESIS, default=DEFAULT_HYSTERESIS): vol.Coerce(float), + vol.Optional(CONF_LOWER): vol.Coerce(float), + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_UPPER): vol.Coerce(float), + } ) @@ -140,6 +126,9 @@ async def async_setup_platform( hysteresis: float = config[CONF_HYSTERESIS] device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) + if lower is None and upper is None: + raise ValueError("Lower or Upper thresholds not provided") + async_add_entities( [ ThresholdSensor( diff --git a/homeassistant/components/tibber/__init__.py b/homeassistant/components/tibber/__init__.py index 9b5c7ee1168..ce05b8070f6 100644 --- a/homeassistant/components/tibber/__init__.py +++ b/homeassistant/components/tibber/__init__.py @@ -6,9 +6,15 @@ import aiohttp import tibber from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform +from homeassistant.const import ( + CONF_ACCESS_TOKEN, + CONF_NAME, + EVENT_HOMEASSISTANT_STOP, + Platform, +) from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import discovery from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType @@ -67,6 +73,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + # Use discovery to load platform legacy notify platform + # The use of the legacy notify service was deprecated with HA Core 2024.6 + # Support will be removed with HA Core 2024.12 + hass.async_create_task( + discovery.async_load_platform( + hass, + Platform.NOTIFY, + DOMAIN, + {CONF_NAME: DOMAIN}, + hass.data[DATA_HASS_CONFIG], + ) + ) + return True diff --git a/homeassistant/components/tibber/manifest.json b/homeassistant/components/tibber/manifest.json index bc9304ab59d..205bc1352eb 100644 --- a/homeassistant/components/tibber/manifest.json +++ b/homeassistant/components/tibber/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["tibber"], "quality_scale": "silver", - "requirements": ["pyTibber==0.30.8"] + "requirements": ["pyTibber==0.30.4"] } diff --git a/homeassistant/components/tibber/notify.py b/homeassistant/components/tibber/notify.py index fdeeeba68ef..1c9f86ed502 100644 --- a/homeassistant/components/tibber/notify.py +++ b/homeassistant/components/tibber/notify.py @@ -2,21 +2,38 @@ from __future__ import annotations +from collections.abc import Callable +from typing import Any + from tibber import Tibber from homeassistant.components.notify import ( + ATTR_TITLE, ATTR_TITLE_DEFAULT, + BaseNotificationService, NotifyEntity, NotifyEntityFeature, + migrate_notify_issue, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import DOMAIN as TIBBER_DOMAIN +async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, +) -> TibberNotificationService: + """Get the Tibber notification service.""" + tibber_connection: Tibber = hass.data[TIBBER_DOMAIN] + return TibberNotificationService(tibber_connection.send_notification) + + async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: @@ -24,6 +41,31 @@ async def async_setup_entry( async_add_entities([TibberNotificationEntity(entry.entry_id)]) +class TibberNotificationService(BaseNotificationService): + """Implement the notification service for Tibber.""" + + def __init__(self, notify: Callable) -> None: + """Initialize the service.""" + self._notify = notify + + async def async_send_message(self, message: str = "", **kwargs: Any) -> None: + """Send a message to Tibber devices.""" + migrate_notify_issue( + self.hass, + TIBBER_DOMAIN, + "Tibber", + "2024.12.0", + service_name=self._service_name, + ) + title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) + try: + await self._notify(title=title, message=message) + except TimeoutError as exc: + raise HomeAssistantError( + translation_domain=TIBBER_DOMAIN, translation_key="send_message_timeout" + ) from exc + + class TibberNotificationEntity(NotifyEntity): """Implement the notification entity service for Tibber.""" diff --git a/homeassistant/components/todoist/strings.json b/homeassistant/components/todoist/strings.json index 721b491bbf5..5b083ac58bf 100644 --- a/homeassistant/components/todoist/strings.json +++ b/homeassistant/components/todoist/strings.json @@ -78,7 +78,7 @@ "description": "When should user be reminded of this task, in natural language." }, "reminder_date_lang": { - "name": "Reminder date language", + "name": "Reminder data language", "description": "The language of reminder_date_string." }, "reminder_date": { diff --git a/homeassistant/components/tplink/__init__.py b/homeassistant/components/tplink/__init__.py index ee1d90e70b4..ceeb1120ed8 100644 --- a/homeassistant/components/tplink/__init__.py +++ b/homeassistant/components/tplink/__init__.py @@ -31,7 +31,6 @@ from homeassistant.const import ( CONF_MAC, CONF_MODEL, CONF_PASSWORD, - CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant, callback @@ -142,7 +141,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TPLinkConfigEntry) -> bo entry_credentials_hash = entry.data.get(CONF_CREDENTIALS_HASH) entry_use_http = entry.data.get(CONF_USES_HTTP, False) entry_aes_keys = entry.data.get(CONF_AES_KEYS) - port_override = entry.data.get(CONF_PORT) conn_params: Device.ConnectionParameters | None = None if conn_params_dict := entry.data.get(CONF_CONNECTION_PARAMETERS): @@ -159,7 +157,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TPLinkConfigEntry) -> bo timeout=CONNECT_TIMEOUT, http_client=client, aes_keys=entry_aes_keys, - port_override=port_override, ) if conn_params: config.connection_type = conn_params diff --git a/homeassistant/components/tplink/config_flow.py b/homeassistant/components/tplink/config_flow.py index 63f1b4e125b..a9f665e12fd 100644 --- a/homeassistant/components/tplink/config_flow.py +++ b/homeassistant/components/tplink/config_flow.py @@ -32,7 +32,6 @@ from homeassistant.const import ( CONF_MAC, CONF_MODEL, CONF_PASSWORD, - CONF_PORT, CONF_USERNAME, ) from homeassistant.core import callback @@ -70,7 +69,6 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): MINOR_VERSION = CONF_CONFIG_ENTRY_MINOR_VERSION host: str | None = None - port: int | None = None def __init__(self) -> None: """Initialize the config flow.""" @@ -262,26 +260,6 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): step_id="discovery_confirm", description_placeholders=placeholders ) - @staticmethod - def _async_get_host_port(host_str: str) -> tuple[str, int | None]: - """Parse the host string for host and port.""" - if "[" in host_str: - _, _, bracketed = host_str.partition("[") - host, _, port_str = bracketed.partition("]") - _, _, port_str = port_str.partition(":") - else: - host, _, port_str = host_str.partition(":") - - if not port_str: - return host, None - - try: - port = int(port_str) - except ValueError: - return host, None - - return host, port - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -292,29 +270,14 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: if not (host := user_input[CONF_HOST]): return await self.async_step_pick_device() - - host, port = self._async_get_host_port(host) - - match_dict = {CONF_HOST: host} - if port: - self.port = port - match_dict[CONF_PORT] = port - self._async_abort_entries_match(match_dict) - + self._async_abort_entries_match({CONF_HOST: host}) self.host = host credentials = await get_credentials(self.hass) try: device = await self._async_try_discover_and_update( - host, - credentials, - raise_on_progress=False, - raise_on_timeout=False, - port=port, + host, credentials, raise_on_progress=False, raise_on_timeout=False ) or await self._async_try_connect_all( - host, - credentials=credentials, - raise_on_progress=False, - port=port, + host, credentials=credentials, raise_on_progress=False ) except AuthenticationError: return await self.async_step_user_auth_confirm() @@ -355,10 +318,7 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): ) else: device = await self._async_try_connect_all( - self.host, - credentials=credentials, - raise_on_progress=False, - port=self.port, + self.host, credentials=credentials, raise_on_progress=False ) except AuthenticationError as ex: errors[CONF_PASSWORD] = "invalid_auth" @@ -460,8 +420,6 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): data[CONF_AES_KEYS] = device.config.aes_keys if device.credentials_hash: data[CONF_CREDENTIALS_HASH] = device.credentials_hash - if port := device.config.port_override: - data[CONF_PORT] = port return self.async_create_entry( title=f"{device.alias} {device.model}", data=data, @@ -472,8 +430,6 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): host: str, credentials: Credentials | None, raise_on_progress: bool, - *, - port: int | None = None, ) -> Device | None: """Try to connect to the device speculatively. @@ -485,15 +441,12 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): host, credentials=credentials, http_client=create_async_tplink_clientsession(self.hass), - port=port, ) else: # This will just try the legacy protocol that doesn't require auth # and doesn't use http try: - device = await Device.connect( - config=DeviceConfig(host, port_override=port) - ) + device = await Device.connect(config=DeviceConfig(host)) except Exception: # noqa: BLE001 return None if device: @@ -509,8 +462,6 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): credentials: Credentials | None, raise_on_progress: bool, raise_on_timeout: bool, - *, - port: int | None = None, ) -> Device | None: """Try to discover the device and call update. @@ -519,9 +470,7 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self._discovered_device = None try: self._discovered_device = await Discover.discover_single( - host, - credentials=credentials, - port=port, + host, credentials=credentials ) except TimeoutError as ex: if raise_on_timeout: @@ -577,7 +526,6 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): reauth_entry = self._get_reauth_entry() entry_data = reauth_entry.data host = entry_data[CONF_HOST] - port = entry_data.get(CONF_PORT) if user_input: username = user_input[CONF_USERNAME] @@ -589,12 +537,8 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): credentials=credentials, raise_on_progress=False, raise_on_timeout=False, - port=port, ) or await self._async_try_connect_all( - host, - credentials=credentials, - raise_on_progress=False, - port=port, + host, credentials=credentials, raise_on_progress=False ) except AuthenticationError as ex: errors[CONF_PASSWORD] = "invalid_auth" diff --git a/homeassistant/components/tplink_omada/manifest.json b/homeassistant/components/tplink_omada/manifest.json index af20b54675b..6bde656dc30 100644 --- a/homeassistant/components/tplink_omada/manifest.json +++ b/homeassistant/components/tplink_omada/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink_omada", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["tplink-omada-client==1.4.3"] + "requirements": ["tplink-omada-client==1.4.2"] } diff --git a/homeassistant/components/trend/manifest.json b/homeassistant/components/trend/manifest.json index d7981105fd2..56b4b811171 100644 --- a/homeassistant/components/trend/manifest.json +++ b/homeassistant/components/trend/manifest.json @@ -7,5 +7,5 @@ "integration_type": "helper", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["numpy==2.1.3"] + "requirements": ["numpy==1.26.4"] } diff --git a/homeassistant/components/tuya/__init__.py b/homeassistant/components/tuya/__init__.py index c8a639cd239..47143f3595c 100644 --- a/homeassistant/components/tuya/__init__.py +++ b/homeassistant/components/tuya/__init__.py @@ -146,21 +146,14 @@ class DeviceListener(SharingDeviceListener): self.hass = hass self.manager = manager - def update_device( - self, device: CustomerDevice, updated_status_properties: list[str] | None - ) -> None: + def update_device(self, device: CustomerDevice) -> None: """Update device status.""" LOGGER.debug( - "Received update for device %s: %s (updated properties: %s)", + "Received update for device %s: %s", device.id, self.manager.device_map[device.id].status, - updated_status_properties, - ) - dispatcher_send( - self.hass, - f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{device.id}", - updated_status_properties, ) + dispatcher_send(self.hass, f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{device.id}") def add_device(self, device: CustomerDevice) -> None: """Add device added listener.""" diff --git a/homeassistant/components/tuya/entity.py b/homeassistant/components/tuya/entity.py index cc258560067..4d3710f7570 100644 --- a/homeassistant/components/tuya/entity.py +++ b/homeassistant/components/tuya/entity.py @@ -283,15 +283,10 @@ class TuyaEntity(Entity): async_dispatcher_connect( self.hass, f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{self.device.id}", - self._handle_state_update, + self.async_write_ha_state, ) ) - async def _handle_state_update( - self, updated_status_properties: list[str] | None - ) -> None: - self.async_write_ha_state() - def _send_command(self, commands: list[dict[str, Any]]) -> None: """Send command to the device.""" LOGGER.debug("Sending commands for device %s: %s", self.device.id, commands) diff --git a/homeassistant/components/tuya/manifest.json b/homeassistant/components/tuya/manifest.json index b53e6fa27d8..305a74160de 100644 --- a/homeassistant/components/tuya/manifest.json +++ b/homeassistant/components/tuya/manifest.json @@ -43,5 +43,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["tuya_iot"], - "requirements": ["tuya-device-sharing-sdk==0.2.1"] + "requirements": ["tuya-device-sharing-sdk==0.1.9"] } diff --git a/homeassistant/components/tuya/sensor.py b/homeassistant/components/tuya/sensor.py index b9677037b7e..fd8efcac95d 100644 --- a/homeassistant/components/tuya/sensor.py +++ b/homeassistant/components/tuya/sensor.py @@ -203,17 +203,6 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { device_class=SensorDeviceClass.CO2, state_class=SensorStateClass.MEASUREMENT, ), - TuyaSensorEntityDescription( - key=DPCode.CH2O_VALUE, - translation_key="formaldehyde", - state_class=SensorStateClass.MEASUREMENT, - ), - TuyaSensorEntityDescription( - key=DPCode.VOC_VALUE, - translation_key="voc", - device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - state_class=SensorStateClass.MEASUREMENT, - ), *BATTERY_SENSORS, ), # Two-way temperature and humidity switch diff --git a/homeassistant/components/twitch/config_flow.py b/homeassistant/components/twitch/config_flow.py index ed196897c11..dbaef59c236 100644 --- a/homeassistant/components/twitch/config_flow.py +++ b/homeassistant/components/twitch/config_flow.py @@ -78,10 +78,7 @@ class OAuth2FlowHandler( reauth_entry = self._get_reauth_entry() self._abort_if_unique_id_mismatch( reason="wrong_account", - description_placeholders={ - "title": reauth_entry.title, - "username": str(reauth_entry.unique_id), - }, + description_placeholders={"title": reauth_entry.title}, ) new_channels = reauth_entry.options[CONF_CHANNELS] diff --git a/homeassistant/components/unifi/config_flow.py b/homeassistant/components/unifi/config_flow.py index 63c8533aa2e..44969191fe6 100644 --- a/homeassistant/components/unifi/config_flow.py +++ b/homeassistant/components/unifi/config_flow.py @@ -21,6 +21,7 @@ import voluptuous as vol from homeassistant.components import ssdp from homeassistant.config_entries import ( SOURCE_REAUTH, + ConfigEntry, ConfigEntryState, ConfigFlow, ConfigFlowResult, @@ -37,7 +38,6 @@ from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import format_mac -from . import UnifiConfigEntry from .const import ( CONF_ALLOW_BANDWIDTH_SENSORS, CONF_ALLOW_UPTIME_SENSORS, @@ -78,10 +78,10 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: UnifiConfigEntry, + config_entry: ConfigEntry, ) -> UnifiOptionsFlowHandler: """Get the options flow for this handler.""" - return UnifiOptionsFlowHandler(config_entry) + return UnifiOptionsFlowHandler() def __init__(self) -> None: """Initialize the UniFi Network flow.""" @@ -247,10 +247,6 @@ class UnifiOptionsFlowHandler(OptionsFlow): hub: UnifiHub - def __init__(self, config_entry: UnifiConfigEntry) -> None: - """Initialize UniFi Network options flow.""" - self.options = dict(config_entry.options) - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/utility_meter/manifest.json b/homeassistant/components/utility_meter/manifest.json index 31a2d4e9584..25e803e6a2d 100644 --- a/homeassistant/components/utility_meter/manifest.json +++ b/homeassistant/components/utility_meter/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["croniter"], "quality_scale": "internal", - "requirements": ["cronsim==2.6"] + "requirements": ["croniter==2.0.2"] } diff --git a/homeassistant/components/utility_meter/sensor.py b/homeassistant/components/utility_meter/sensor.py index 19ef3c1f3a8..6b8c07c7ef7 100644 --- a/homeassistant/components/utility_meter/sensor.py +++ b/homeassistant/components/utility_meter/sensor.py @@ -9,7 +9,7 @@ from decimal import Decimal, DecimalException, InvalidOperation import logging from typing import Any, Self -from cronsim import CronSim +from croniter import croniter import voluptuous as vol from homeassistant.components.sensor import ( @@ -379,13 +379,14 @@ class UtilityMeterSensor(RestoreSensor): self.entity_id = suggested_entity_id self._parent_meter = parent_meter self._sensor_source_id = source_entity + self._state = None self._last_period = Decimal(0) self._last_reset = dt_util.utcnow() self._last_valid_state = None self._collecting = None - self._attr_name = name + self._name = name self._input_device_class = None - self._attr_native_unit_of_measurement = None + self._unit_of_measurement = None self._period = meter_type if meter_type is not None: # For backwards compatibility reasons we convert the period and offset into a cron pattern @@ -404,22 +405,12 @@ class UtilityMeterSensor(RestoreSensor): self._tariff = tariff self._tariff_entity = tariff_entity self._next_reset = None - self.scheduler = ( - CronSim( - self._cron_pattern, - dt_util.now( - dt_util.get_default_time_zone() - ), # we need timezone for DST purposes (see issue #102984) - ) - if self._cron_pattern - else None - ) def start(self, attributes: Mapping[str, Any]) -> None: """Initialize unit and state upon source initial update.""" self._input_device_class = attributes.get(ATTR_DEVICE_CLASS) - self._attr_native_unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) - self._attr_native_value = 0 + self._unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) + self._state = 0 self.async_write_ha_state() @staticmethod @@ -494,13 +485,13 @@ class UtilityMeterSensor(RestoreSensor): ) return - if self.native_value is None: + if self._state is None: # First state update initializes the utility_meter sensors for sensor in self.hass.data[DATA_UTILITY][self._parent_meter][ DATA_TARIFF_SENSORS ]: sensor.start(new_state_attributes) - if self.native_unit_of_measurement is None: + if self._unit_of_measurement is None: _LOGGER.warning( "Source sensor %s has no unit of measurement. Please %s", self._sensor_source_id, @@ -511,12 +502,10 @@ class UtilityMeterSensor(RestoreSensor): adjustment := self.calculate_adjustment(old_state, new_state) ) is not None and (self._sensor_net_consumption or adjustment >= 0): # If net_consumption is off, the adjustment must be non-negative - self._attr_native_value += adjustment # type: ignore[operator] # self._attr_native_value will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line + self._state += adjustment # type: ignore[operator] # self._state will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line self._input_device_class = new_state_attributes.get(ATTR_DEVICE_CLASS) - self._attr_native_unit_of_measurement = new_state_attributes.get( - ATTR_UNIT_OF_MEASUREMENT - ) + self._unit_of_measurement = new_state_attributes.get(ATTR_UNIT_OF_MEASUREMENT) self._last_valid_state = new_state_val self.async_write_ha_state() @@ -545,7 +534,7 @@ class UtilityMeterSensor(RestoreSensor): _LOGGER.debug( "%s - %s - source <%s>", - self.name, + self._name, COLLECTING if self._collecting is not None else PAUSED, self._sensor_source_id, ) @@ -554,10 +543,11 @@ class UtilityMeterSensor(RestoreSensor): async def _program_reset(self): """Program the reset of the utility meter.""" - if self.scheduler: - self._next_reset = next(self.scheduler) - - _LOGGER.debug("Next reset of %s is %s", self.entity_id, self._next_reset) + if self._cron_pattern is not None: + tz = dt_util.get_default_time_zone() + self._next_reset = croniter(self._cron_pattern, dt_util.now(tz)).get_next( + datetime + ) # we need timezone for DST purposes (see issue #102984) self.async_on_remove( async_track_point_in_time( self.hass, @@ -585,16 +575,14 @@ class UtilityMeterSensor(RestoreSensor): return _LOGGER.debug("Reset utility meter <%s>", self.entity_id) self._last_reset = dt_util.utcnow() - self._last_period = ( - Decimal(self.native_value) if self.native_value else Decimal(0) - ) - self._attr_native_value = 0 + self._last_period = Decimal(self._state) if self._state else Decimal(0) + self._state = 0 self.async_write_ha_state() async def async_calibrate(self, value): """Calibrate the Utility Meter with a given value.""" - _LOGGER.debug("Calibrate %s = %s type(%s)", self.name, value, type(value)) - self._attr_native_value = Decimal(str(value)) + _LOGGER.debug("Calibrate %s = %s type(%s)", self._name, value, type(value)) + self._state = Decimal(str(value)) self.async_write_ha_state() async def async_added_to_hass(self): @@ -610,11 +598,10 @@ class UtilityMeterSensor(RestoreSensor): ) if (last_sensor_data := await self.async_get_last_sensor_data()) is not None: - self._attr_native_value = last_sensor_data.native_value + # new introduced in 2022.04 + self._state = last_sensor_data.native_value self._input_device_class = last_sensor_data.input_device_class - self._attr_native_unit_of_measurement = ( - last_sensor_data.native_unit_of_measurement - ) + self._unit_of_measurement = last_sensor_data.native_unit_of_measurement self._last_period = last_sensor_data.last_period self._last_reset = last_sensor_data.last_reset self._last_valid_state = last_sensor_data.last_valid_state @@ -622,6 +609,39 @@ class UtilityMeterSensor(RestoreSensor): # Null lambda to allow cancelling the collection on tariff change self._collecting = lambda: None + elif state := await self.async_get_last_state(): + # legacy to be removed on 2022.10 (we are keeping this to avoid utility_meter counter losses) + try: + self._state = Decimal(state.state) + except InvalidOperation: + _LOGGER.error( + "Could not restore state <%s>. Resetting utility_meter.%s", + state.state, + self.name, + ) + else: + self._unit_of_measurement = state.attributes.get( + ATTR_UNIT_OF_MEASUREMENT + ) + self._last_period = ( + Decimal(state.attributes[ATTR_LAST_PERIOD]) + if state.attributes.get(ATTR_LAST_PERIOD) + and is_number(state.attributes[ATTR_LAST_PERIOD]) + else Decimal(0) + ) + self._last_valid_state = ( + Decimal(state.attributes[ATTR_LAST_VALID_STATE]) + if state.attributes.get(ATTR_LAST_VALID_STATE) + and is_number(state.attributes[ATTR_LAST_VALID_STATE]) + else None + ) + self._last_reset = dt_util.as_utc( + dt_util.parse_datetime(state.attributes.get(ATTR_LAST_RESET)) + ) + if state.attributes.get(ATTR_STATUS) == COLLECTING: + # Null lambda to allow cancelling the collection on tariff change + self._collecting = lambda: None + @callback def async_source_tracking(event): """Wait for source to be ready, then start meter.""" @@ -646,7 +666,7 @@ class UtilityMeterSensor(RestoreSensor): _LOGGER.debug( "<%s> collecting %s from %s", self.name, - self.native_unit_of_measurement, + self._unit_of_measurement, self._sensor_source_id, ) self._collecting = async_track_state_change_event( @@ -661,15 +681,22 @@ class UtilityMeterSensor(RestoreSensor): self._collecting() self._collecting = None + @property + def name(self): + """Return the name of the sensor.""" + return self._name + + @property + def native_value(self): + """Return the state of the sensor.""" + return self._state + @property def device_class(self): """Return the device class of the sensor.""" if self._input_device_class is not None: return self._input_device_class - if ( - self.native_unit_of_measurement - in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY] - ): + if self._unit_of_measurement in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY]: return SensorDeviceClass.ENERGY return None @@ -682,6 +709,11 @@ class UtilityMeterSensor(RestoreSensor): else SensorStateClass.TOTAL_INCREASING ) + @property + def native_unit_of_measurement(self): + """Return the unit the value is expressed in.""" + return self._unit_of_measurement + @property def extra_state_attributes(self): """Return the state attributes of the sensor.""" diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 098a17e90f0..58a262e769f 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -94,7 +94,6 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): | FanEntityFeature.TURN_ON ) _attr_name = None - _attr_translation_key = "vesync" _enable_turn_on_off_backwards_compatibility = False def __init__(self, fan) -> None: diff --git a/homeassistant/components/vesync/icons.json b/homeassistant/components/vesync/icons.json index e4769acc9a5..cfdefb2ed09 100644 --- a/homeassistant/components/vesync/icons.json +++ b/homeassistant/components/vesync/icons.json @@ -1,20 +1,4 @@ { - "entity": { - "fan": { - "vesync": { - "state_attributes": { - "preset_mode": { - "state": { - "auto": "mdi:fan-auto", - "sleep": "mdi:sleep", - "pet": "mdi:paw", - "turbo": "mdi:weather-tornado" - } - } - } - } - } - }, "services": { "update_devices": { "service": "mdi:update" diff --git a/homeassistant/components/vesync/strings.json b/homeassistant/components/vesync/strings.json index b6e4e2fd957..5ff0aa58722 100644 --- a/homeassistant/components/vesync/strings.json +++ b/homeassistant/components/vesync/strings.json @@ -42,20 +42,6 @@ "current_voltage": { "name": "Current voltage" } - }, - "fan": { - "vesync": { - "state_attributes": { - "preset_mode": { - "state": { - "auto": "Auto", - "sleep": "Sleep", - "pet": "Pet", - "turbo": "Turbo" - } - } - } - } } }, "services": { diff --git a/homeassistant/components/vodafone_station/sensor.py b/homeassistant/components/vodafone_station/sensor.py index 307fcaf0ea8..136aa94b43a 100644 --- a/homeassistant/components/vodafone_station/sensor.py +++ b/homeassistant/components/vodafone_station/sensor.py @@ -22,7 +22,7 @@ from .const import _LOGGER, DOMAIN, LINE_TYPES from .coordinator import VodafoneStationRouter NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"] -UPTIME_DEVIATION = 60 +UPTIME_DEVIATION = 45 @dataclass(frozen=True, kw_only=True) @@ -43,10 +43,12 @@ def _calculate_uptime( ) -> datetime: """Calculate device uptime.""" + assert isinstance(last_value, datetime) + delta_uptime = coordinator.api.convert_uptime(coordinator.data.sensors[key]) if ( - not isinstance(last_value, datetime) + not last_value or abs((delta_uptime - last_value).total_seconds()) > UPTIME_DEVIATION ): return delta_uptime diff --git a/homeassistant/components/water_heater/strings.json b/homeassistant/components/water_heater/strings.json index 07e132a0b5b..741b277d84d 100644 --- a/homeassistant/components/water_heater/strings.json +++ b/homeassistant/components/water_heater/strings.json @@ -1,5 +1,4 @@ { - "title": "Water heater", "device_automation": { "action_type": { "turn_on": "[%key:common::device_automation::action_type::turn_on%]", @@ -8,7 +7,7 @@ }, "entity_component": { "_": { - "name": "[%key:component::water_heater::title%]", + "name": "Water heater", "state": { "off": "[%key:common::state::off%]", "eco": "Eco", diff --git a/homeassistant/components/websocket_api/http.py b/homeassistant/components/websocket_api/http.py index e7d57aebab6..11aca19bab9 100644 --- a/homeassistant/components/websocket_api/http.py +++ b/homeassistant/components/websocket_api/http.py @@ -330,7 +330,13 @@ class WebSocketHandler: if TYPE_CHECKING: assert writer is not None - send_bytes_text = partial(writer.send_frame, opcode=WSMsgType.TEXT) + # aiohttp 3.11.0 changed the method name from _send_frame to send_frame + if hasattr(writer, "send_frame"): + send_frame = writer.send_frame # pragma: no cover + else: + send_frame = writer._send_frame # noqa: SLF001 + + send_bytes_text = partial(send_frame, opcode=WSMsgType.TEXT) auth = AuthPhase( logger, hass, self._send_message, self._cancel, request, send_bytes_text ) diff --git a/homeassistant/components/weheat/manifest.json b/homeassistant/components/weheat/manifest.json index ef89a2f1acb..d32e0ce4047 100644 --- a/homeassistant/components/weheat/manifest.json +++ b/homeassistant/components/weheat/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/weheat", "iot_class": "cloud_polling", - "requirements": ["weheat==2024.11.02"] + "requirements": ["weheat==2024.09.23"] } diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index f9e8328ae53..a0a86be5da3 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -9,5 +9,5 @@ "iot_class": "cloud_push", "loggers": ["aiowithings"], "quality_scale": "platinum", - "requirements": ["aiowithings==3.1.3"] + "requirements": ["aiowithings==3.1.1"] } diff --git a/homeassistant/components/zeroconf/usage.py b/homeassistant/components/zeroconf/usage.py index 8ddfdbd592d..b9d51cd3c36 100644 --- a/homeassistant/components/zeroconf/usage.py +++ b/homeassistant/components/zeroconf/usage.py @@ -4,7 +4,7 @@ from typing import Any import zeroconf -from homeassistant.helpers.frame import ReportBehavior, report_usage +from homeassistant.helpers.frame import report from .models import HaZeroconf @@ -16,14 +16,14 @@ def install_multiple_zeroconf_catcher(hass_zc: HaZeroconf) -> None: """ def new_zeroconf_new(self: zeroconf.Zeroconf, *k: Any, **kw: Any) -> HaZeroconf: - report_usage( + report( ( "attempted to create another Zeroconf instance. Please use the shared" " Zeroconf via await" " homeassistant.components.zeroconf.async_get_instance(hass)" ), exclude_integrations={"zeroconf"}, - core_behavior=ReportBehavior.LOG, + error_if_core=False, ) return hass_zc diff --git a/homeassistant/components/zha/config_flow.py b/homeassistant/components/zha/config_flow.py index f3f7f38772d..1c7e0d105c4 100644 --- a/homeassistant/components/zha/config_flow.py +++ b/homeassistant/components/zha/config_flow.py @@ -33,7 +33,6 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.selector import FileSelector, FileSelectorConfig from homeassistant.util import dt as dt_util @@ -105,26 +104,25 @@ async def list_serial_ports(hass: HomeAssistant) -> list[ListPortInfo]: yellow_radio.description = "Yellow Zigbee module" yellow_radio.manufacturer = "Nabu Casa" - if is_hassio(hass): - # Present the multi-PAN addon as a setup option, if it's available - multipan_manager = ( - await silabs_multiprotocol_addon.get_multiprotocol_addon_manager(hass) + # Present the multi-PAN addon as a setup option, if it's available + multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager( + hass + ) + + try: + addon_info = await multipan_manager.async_get_addon_info() + except (AddonError, KeyError): + addon_info = None + + if addon_info is not None and addon_info.state != AddonState.NOT_INSTALLED: + addon_port = ListPortInfo( + device=silabs_multiprotocol_addon.get_zigbee_socket(), + skip_link_detection=True, ) - try: - addon_info = await multipan_manager.async_get_addon_info() - except (AddonError, KeyError): - addon_info = None - - if addon_info is not None and addon_info.state != AddonState.NOT_INSTALLED: - addon_port = ListPortInfo( - device=silabs_multiprotocol_addon.get_zigbee_socket(), - skip_link_detection=True, - ) - - addon_port.description = "Multiprotocol add-on" - addon_port.manufacturer = "Nabu Casa" - ports.append(addon_port) + addon_port.description = "Multiprotocol add-on" + addon_port.manufacturer = "Nabu Casa" + ports.append(addon_port) return ports diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index bd49e85b601..7d3bd8273ec 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -56,7 +56,6 @@ from zwave_js_server.model.utils import ( async_parse_qr_code_string, async_try_parse_dsk_from_qr_code_string, ) -from zwave_js_server.model.value import ConfigurationValueFormat from zwave_js_server.util.node import async_set_config_parameter from homeassistant.components import websocket_api @@ -107,8 +106,6 @@ PROPERTY = "property" PROPERTY_KEY = "property_key" ENDPOINT = "endpoint" VALUE = "value" -VALUE_SIZE = "value_size" -VALUE_FORMAT = "value_format" # constants for log config commands CONFIG = "config" @@ -419,8 +416,6 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_rebuild_node_routes) websocket_api.async_register_command(hass, websocket_set_config_parameter) websocket_api.async_register_command(hass, websocket_get_config_parameters) - websocket_api.async_register_command(hass, websocket_get_raw_config_parameter) - websocket_api.async_register_command(hass, websocket_set_raw_config_parameter) websocket_api.async_register_command(hass, websocket_subscribe_log_updates) websocket_api.async_register_command(hass, websocket_update_log_config) websocket_api.async_register_command(hass, websocket_get_log_config) @@ -1765,72 +1760,6 @@ async def websocket_get_config_parameters( ) -@websocket_api.require_admin -@websocket_api.websocket_command( - { - vol.Required(TYPE): "zwave_js/set_raw_config_parameter", - vol.Required(DEVICE_ID): str, - vol.Required(PROPERTY): int, - vol.Required(VALUE): int, - vol.Required(VALUE_SIZE): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)), - vol.Required(VALUE_FORMAT): vol.Coerce(ConfigurationValueFormat), - } -) -@websocket_api.async_response -@async_handle_failed_command -@async_get_node -async def websocket_set_raw_config_parameter( - hass: HomeAssistant, - connection: ActiveConnection, - msg: dict[str, Any], - node: Node, -) -> None: - """Set a custom config parameter value for a Z-Wave node.""" - result = await node.async_set_raw_config_parameter_value( - msg[VALUE], - msg[PROPERTY], - value_size=msg[VALUE_SIZE], - value_format=msg[VALUE_FORMAT], - ) - - connection.send_result( - msg[ID], - { - STATUS: result.status, - }, - ) - - -@websocket_api.require_admin -@websocket_api.websocket_command( - { - vol.Required(TYPE): "zwave_js/get_raw_config_parameter", - vol.Required(DEVICE_ID): str, - vol.Required(PROPERTY): int, - } -) -@websocket_api.async_response -@async_handle_failed_command -@async_get_node -async def websocket_get_raw_config_parameter( - hass: HomeAssistant, - connection: ActiveConnection, - msg: dict[str, Any], - node: Node, -) -> None: - """Get a custom config parameter value for a Z-Wave node.""" - value = await node.async_get_raw_config_parameter_value( - msg[PROPERTY], - ) - - connection.send_result( - msg[ID], - { - VALUE: value, - }, - ) - - def filename_is_present_if_logging_to_file(obj: dict) -> dict: """Validate that filename is provided if log_to_file is True.""" if obj.get(LOG_TO_FILE, False) and FILENAME not in obj: diff --git a/homeassistant/components/zwave_js/manifest.json b/homeassistant/components/zwave_js/manifest.json index 3631bf1163b..a37b3560526 100644 --- a/homeassistant/components/zwave_js/manifest.json +++ b/homeassistant/components/zwave_js/manifest.json @@ -10,7 +10,7 @@ "iot_class": "local_push", "loggers": ["zwave_js_server"], "quality_scale": "platinum", - "requirements": ["pyserial==3.5", "zwave-js-server-python==0.59.1"], + "requirements": ["pyserial==3.5", "zwave-js-server-python==0.58.1"], "usb": [ { "vid": "0658", diff --git a/homeassistant/components/zwave_js/services.py b/homeassistant/components/zwave_js/services.py index d1cb66ceafc..969a235bb41 100644 --- a/homeassistant/components/zwave_js/services.py +++ b/homeassistant/components/zwave_js/services.py @@ -529,15 +529,8 @@ class ZWaveServices: for node_or_endpoint, result in get_valid_responses_from_results( nodes_or_endpoints_list, _results ): - if value_size is None: - # async_set_config_parameter still returns (Value, SetConfigParameterResult) - zwave_value = result[0] - cmd_status = result[1] - else: - # async_set_raw_config_parameter_value now returns just SetConfigParameterResult - cmd_status = result - zwave_value = f"parameter {property_or_property_name}" - + zwave_value = result[0] + cmd_status = result[1] if cmd_status.status == CommandStatus.ACCEPTED: msg = "Set configuration parameter %s on Node %s with value %s" else: diff --git a/homeassistant/components/zwave_js/services.yaml b/homeassistant/components/zwave_js/services.yaml index acf6e9a0665..f5063fdfd93 100644 --- a/homeassistant/components/zwave_js/services.yaml +++ b/homeassistant/components/zwave_js/services.yaml @@ -51,6 +51,16 @@ set_lock_configuration: min: 0 max: 65535 unit_of_measurement: sec + outside_handles_can_open_door_configuration: + required: false + example: [true, true, true, false] + selector: + object: + inside_handles_can_open_door_configuration: + required: false + example: [true, true, true, false] + selector: + object: auto_relock_time: required: false example: 1 diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 28789bbf9f4..ca7d5153e6e 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -523,6 +523,10 @@ "description": "Duration in seconds the latch stays retracted.", "name": "Hold and release time" }, + "inside_handles_can_open_door_configuration": { + "description": "A list of four booleans which indicate which inside handles can open the door.", + "name": "Inside handles can open door configuration" + }, "lock_timeout": { "description": "Seconds until lock mode times out. Should only be used if operation type is `timed`.", "name": "Lock timeout" @@ -531,6 +535,10 @@ "description": "The operation type of the lock.", "name": "Operation Type" }, + "outside_handles_can_open_door_configuration": { + "description": "A list of four booleans which indicate which outside handles can open the door.", + "name": "Outside handles can open door configuration" + }, "twist_assist": { "description": "Enable Twist Assist.", "name": "Twist assist" diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index f1748c6b7fb..6a95707dcda 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -63,7 +63,7 @@ from .helpers.event import ( RANDOM_MICROSECOND_MIN, async_call_later, ) -from .helpers.frame import ReportBehavior, report, report_usage +from .helpers.frame import report from .helpers.json import json_bytes, json_bytes_sorted, json_fragment from .helpers.typing import UNDEFINED, ConfigType, DiscoveryInfoType, UndefinedType from .loader import async_suggest_report_issue @@ -1507,14 +1507,10 @@ class ConfigEntriesFlowManager( version=result["version"], ) - if existing_entry is not None: - # Unload and remove the existing entry - await self.config_entries._async_remove(existing_entry.entry_id) # noqa: SLF001 await self.config_entries.async_add(entry) if existing_entry is not None: - # Clean up devices and entities belonging to the existing entry - self.config_entries._async_clean_up(existing_entry) # noqa: SLF001 + await self.config_entries.async_remove(existing_entry.entry_id) result["result"] = entry return result @@ -1904,21 +1900,7 @@ class ConfigEntries: self._async_schedule_save() async def async_remove(self, entry_id: str) -> dict[str, Any]: - """Remove, unload and clean up after an entry.""" - unload_success, entry = await self._async_remove(entry_id) - self._async_clean_up(entry) - - for discovery_domain in entry.discovery_keys: - async_dispatcher_send_internal( - self.hass, - signal_discovered_config_entry_removed(discovery_domain), - entry, - ) - - return {"require_restart": not unload_success} - - async def _async_remove(self, entry_id: str) -> tuple[bool, ConfigEntry]: - """Remove and unload an entry.""" + """Remove an entry.""" if (entry := self.async_get_entry(entry_id)) is None: raise UnknownEntry @@ -1934,13 +1916,6 @@ class ConfigEntries: self.async_update_issues() self._async_schedule_save() - return (unload_success, entry) - - @callback - def _async_clean_up(self, entry: ConfigEntry) -> None: - """Clean up after an entry.""" - entry_id = entry.entry_id - dev_reg = device_registry.async_get(self.hass) ent_reg = entity_registry.async_get(self.hass) @@ -1959,6 +1934,13 @@ class ConfigEntries: ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) self._async_dispatch(ConfigEntryChange.REMOVED, entry) + for discovery_domain in entry.discovery_keys: + async_dispatcher_send_internal( + self.hass, + signal_discovered_config_entry_removed(discovery_domain), + entry, + ) + return {"require_restart": not unload_success} @callback def _async_shutdown(self, event: Event) -> None: @@ -2176,12 +2158,7 @@ class ConfigEntries: if unique_id is not UNDEFINED and entry.unique_id != unique_id: # Deprecated in 2024.11, should fail in 2025.11 if ( - # flipr creates duplicates during migration, and asks users to - # remove the duplicate. We don't need warn about it here too. - # We should remove the special case for "flipr" in HA Core 2025.4, - # when the flipr migration period ends - entry.domain != "flipr" - and unique_id is not None + unique_id is not None and self.async_entry_for_domain_unique_id(entry.domain, unique_id) is not None ): @@ -2459,24 +2436,7 @@ class ConfigEntries: issues.add(issue.issue_id) for domain, unique_ids in self._entries._domain_unique_id_index.items(): # noqa: SLF001 - # flipr creates duplicates during migration, and asks users to - # remove the duplicate. We don't need warn about it here too. - # We should remove the special case for "flipr" in HA Core 2025.4, - # when the flipr migration period ends - if domain == "flipr": - continue for unique_id, entries in unique_ids.items(): - # We might mutate the list of entries, so we need a copy to not mess up - # the index - entries = list(entries) - - # There's no need to raise an issue for ignored entries, we can - # safely remove them once we no longer allow unique id collisions. - # Iterate over a copy of the copy to allow mutating while iterating - for entry in list(entries): - if entry.source == SOURCE_IGNORE: - entries.remove(entry) - if len(entries) < 2: continue issue_id = f"{ISSUE_UNIQUE_ID_COLLISION}_{domain}_{unique_id}" @@ -3100,6 +3060,7 @@ class OptionsFlowManager( class OptionsFlow(ConfigEntryBaseFlow): """Base class for config options flows.""" + _options: dict[str, Any] handler: str _config_entry: ConfigEntry @@ -3158,39 +3119,51 @@ class OptionsFlow(ConfigEntryBaseFlow): @config_entry.setter def config_entry(self, value: ConfigEntry) -> None: """Set the config entry value.""" - report_usage( + report( "sets option flow config_entry explicitly, which is deprecated " "and will stop working in 2025.12", - core_behavior=ReportBehavior.ERROR, - core_integration_behavior=ReportBehavior.ERROR, - custom_integration_behavior=ReportBehavior.LOG, + error_if_integration=False, + error_if_core=True, ) self._config_entry = value + @property + def options(self) -> dict[str, Any]: + """Return a mutable copy of the config entry options. + + Please note that this is not available inside `__init__` method, and + can only be referenced after initialisation. + """ + if not hasattr(self, "_options"): + self._options = deepcopy(dict(self.config_entry.options)) + return self._options + + @options.setter + def options(self, value: dict[str, Any]) -> None: + """Set the options value.""" + report( + "sets option flow options explicitly, which is deprecated " + "and will stop working in 2025.12", + error_if_integration=False, + error_if_core=True, + ) + self._options = value + class OptionsFlowWithConfigEntry(OptionsFlow): - """Base class for options flows with config entry and options. - - This class is being phased out, and should not be referenced in new code. - It is kept only for backward compatibility, and only for custom integrations. - """ + """Base class for options flows with config entry and options.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" self._config_entry = config_entry self._options = deepcopy(dict(config_entry.options)) - report_usage( - "inherits from OptionsFlowWithConfigEntry", - core_behavior=ReportBehavior.ERROR, - core_integration_behavior=ReportBehavior.ERROR, - custom_integration_behavior=ReportBehavior.IGNORE, + report( + "inherits from OptionsFlowWithConfigEntry, which is deprecated " + "and will stop working in 2025.12", + error_if_integration=False, + error_if_core=True, ) - @property - def options(self) -> dict[str, Any]: - """Return a mutable copy of the config entry options.""" - return self._options - class EntityRegistryDisabledHandler: """Handler when entities related to config entries updated disabled_by.""" diff --git a/homeassistant/const.py b/homeassistant/const.py index 4082a076b94..1da3b819f9f 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -29,9 +29,9 @@ PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) -REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0) +REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) # Truthy date string triggers showing related deprecation warning messages. -REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "2025.2" +REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "" # Format for platform files PLATFORM_FORMAT: Final = "{platform}.{domain}" @@ -725,9 +725,6 @@ class UnitOfPower(StrEnum): WATT = "W" KILO_WATT = "kW" - MEGA_WATT = "MW" - GIGA_WATT = "GW" - TERA_WATT = "TW" BTU_PER_HOUR = "BTU/h" @@ -773,8 +770,6 @@ class UnitOfEnergy(StrEnum): WATT_HOUR = "Wh" KILO_WATT_HOUR = "kWh" MEGA_WATT_HOUR = "MWh" - GIGA_WATT_HOUR = "GWh" - TERA_WATT_HOUR = "TWh" CALORIE = "cal" KILO_CALORIE = "kcal" MEGA_CALORIE = "Mcal" @@ -1358,13 +1353,6 @@ CONCENTRATION_PARTS_PER_MILLION: Final = "ppm" CONCENTRATION_PARTS_PER_BILLION: Final = "ppb" -class UnitOfBloodGlucoseConcentration(StrEnum): - """Blood glucose concentration units.""" - - MILLIGRAMS_PER_DECILITER = "mg/dL" - MILLIMOLE_PER_LITER = "mmol/L" - - # Speed units class UnitOfSpeed(StrEnum): """Speed units.""" diff --git a/homeassistant/core.py b/homeassistant/core.py index cdfb5570b44..ab852056353 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -656,12 +656,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report_usage( + frame.report( "calls `async_add_job`, which is deprecated and will be removed in Home " "Assistant 2025.4; Please review " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" " for replacement options", - core_behavior=frame.ReportBehavior.LOG, + error_if_core=False, ) if target is None: @@ -712,12 +712,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report_usage( + frame.report( "calls `async_add_hass_job`, which is deprecated and will be removed in Home " "Assistant 2025.5; Please review " "https://developers.home-assistant.io/blog/2024/04/07/deprecate_add_hass_job" " for replacement options", - core_behavior=frame.ReportBehavior.LOG, + error_if_core=False, ) return self._async_add_hass_job(hassjob, *args, background=background) @@ -986,12 +986,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report_usage( + frame.report( "calls `async_run_job`, which is deprecated and will be removed in Home " "Assistant 2025.4; Please review " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" " for replacement options", - core_behavior=frame.ReportBehavior.LOG, + error_if_core=False, ) if asyncio.iscoroutine(target): @@ -1635,10 +1635,10 @@ class EventBus: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report_usage( + frame.report( "calls `async_listen` with run_immediately, which is" " deprecated and will be removed in Home Assistant 2025.5", - core_behavior=frame.ReportBehavior.LOG, + error_if_core=False, ) if event_filter is not None and not is_callback_check_partial(event_filter): @@ -1705,10 +1705,10 @@ class EventBus: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report_usage( + frame.report( "calls `async_listen_once` with run_immediately, which is " "deprecated and will be removed in Home Assistant 2025.5", - core_behavior=frame.ReportBehavior.LOG, + error_if_core=False, ) one_time_listener: _OneTimeListener[_DataT] = _OneTimeListener( diff --git a/homeassistant/core_config.py b/homeassistant/core_config.py index 5c773c57bc4..25f745f110c 100644 --- a/homeassistant/core_config.py +++ b/homeassistant/core_config.py @@ -60,7 +60,7 @@ from .core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from .generated.currencies import HISTORIC_CURRENCIES from .helpers import config_validation as cv, issue_registry as ir from .helpers.entity_values import EntityValues -from .helpers.frame import ReportBehavior, report_usage +from .helpers.frame import report from .helpers.storage import Store from .helpers.typing import UNDEFINED, UndefinedType from .util import dt as dt_util, location @@ -695,11 +695,11 @@ class Config: It will be removed in Home Assistant 2025.6. """ - report_usage( + report( "set the time zone using set_time_zone instead of async_set_time_zone" " which will stop working in Home Assistant 2025.6", - core_integration_behavior=ReportBehavior.ERROR, - custom_integration_behavior=ReportBehavior.ERROR, + error_if_core=True, + error_if_integration=True, ) if time_zone := dt_util.get_time_zone(time_zone_str): self.time_zone = time_zone_str diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index 9d041c9b8d3..1fb6439a8c4 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -26,7 +26,7 @@ from .helpers.deprecation import ( check_if_deprecated_constant, dir_with_deprecated_constants, ) -from .helpers.frame import ReportBehavior, report_usage +from .helpers.frame import report from .loader import async_suggest_report_issue from .util import uuid as uuid_util @@ -530,12 +530,12 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): if not isinstance(result["type"], FlowResultType): result["type"] = FlowResultType(result["type"]) # type: ignore[unreachable] - report_usage( + report( ( "does not use FlowResultType enum for data entry flow result type. " "This is deprecated and will stop working in Home Assistant 2025.1" ), - core_behavior=ReportBehavior.LOG, + error_if_core=False, ) if ( diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index a105efc2685..c4612898cb2 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -8,26 +8,6 @@ from __future__ import annotations from typing import Final BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ - { - "domain": "acaia", - "manufacturer_id": 16962, - }, - { - "domain": "acaia", - "local_name": "ACAIA*", - }, - { - "domain": "acaia", - "local_name": "PYXIS-*", - }, - { - "domain": "acaia", - "local_name": "LUNAR-*", - }, - { - "domain": "acaia", - "local_name": "PROCHBT001", - }, { "domain": "airthings_ble", "manufacturer_id": 820, diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index ffe61b915c6..923b2ec1606 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -24,7 +24,6 @@ FLOWS = { ], "integration": [ "abode", - "acaia", "accuweather", "acmeda", "adax", @@ -392,7 +391,6 @@ FLOWS = { "myuplink", "nam", "nanoleaf", - "nasweb", "neato", "nest", "netatmo", @@ -409,7 +407,6 @@ FLOWS = { "nina", "nmap_tracker", "nobo_hub", - "nordpool", "notion", "nuheat", "nuki", @@ -538,7 +535,6 @@ FLOWS = { "simplefin", "simplepush", "simplisafe", - "sky_remote", "skybell", "slack", "sleepiq", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 7dacf9a0bca..cd20b88b285 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -379,15 +379,6 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "gateway*", "macaddress": "F8811A*", }, - { - "domain": "palazzetti", - "hostname": "connbox*", - "macaddress": "40F3857*", - }, - { - "domain": "palazzetti", - "registered_devices": True, - }, { "domain": "powerwall", "hostname": "1118431-*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index f007db87868..449d36da474 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -11,12 +11,6 @@ "config_flow": true, "iot_class": "cloud_push" }, - "acaia": { - "name": "Acaia", - "integration_type": "device", - "config_flow": true, - "iot_class": "local_push" - }, "accuweather": { "name": "AccuWeather", "integration_type": "service", @@ -4022,12 +4016,6 @@ "config_flow": true, "iot_class": "local_push" }, - "nasweb": { - "name": "NASweb", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_push" - }, "neato": { "name": "Neato Botvac", "integration_type": "hub", @@ -4193,13 +4181,6 @@ "config_flow": true, "iot_class": "local_push" }, - "nordpool": { - "name": "Nord Pool", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling", - "single_config_entry": true - }, "norway_air": { "name": "Om Luftkvalitet i Norge (Norway Air)", "integration_type": "hub", @@ -5614,22 +5595,11 @@ "config_flow": false, "iot_class": "local_push" }, - "sky": { - "name": "Sky", - "integrations": { - "sky_hub": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling", - "name": "Sky Hub" - }, - "sky_remote": { - "integration_type": "device", - "config_flow": true, - "iot_class": "assumed_state", - "name": "Sky Remote Control" - } - } + "sky_hub": { + "name": "Sky Hub", + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling" }, "skybeacon": { "name": "Skybeacon", diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 2b35ebade76..81ac10f86cc 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -719,14 +719,14 @@ def template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value should be a string") if not (hass := _async_get_hass_or_none()): # pylint: disable-next=import-outside-toplevel - from .frame import ReportBehavior, report_usage + from .frame import report - report_usage( + report( ( "validates schema outside the event loop, " "which will stop working in HA Core 2025.10" ), - core_behavior=ReportBehavior.LOG, + error_if_core=False, ) template_value = template_helper.Template(str(value), hass) @@ -748,14 +748,14 @@ def dynamic_template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value does not contain a dynamic template") if not (hass := _async_get_hass_or_none()): # pylint: disable-next=import-outside-toplevel - from .frame import ReportBehavior, report_usage + from .frame import report - report_usage( + report( ( "validates schema outside the event loop, " "which will stop working in HA Core 2025.10" ), - core_behavior=ReportBehavior.LOG, + error_if_core=False, ) template_value = template_helper.Template(str(value), hass) diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 61a798dbd75..02ea8103192 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -997,14 +997,14 @@ class TrackTemplateResultInfo: continue # pylint: disable-next=import-outside-toplevel - from .frame import ReportBehavior, report_usage + from .frame import report - report_usage( + report( ( "calls async_track_template_result with template without hass, " "which will stop working in HA Core 2025.10" ), - core_behavior=ReportBehavior.LOG, + error_if_core=False, ) track_template_.template.hass = hass diff --git a/homeassistant/helpers/frame.py b/homeassistant/helpers/frame.py index eda98099713..fd7e014b2ff 100644 --- a/homeassistant/helpers/frame.py +++ b/homeassistant/helpers/frame.py @@ -5,7 +5,6 @@ from __future__ import annotations import asyncio from collections.abc import Callable from dataclasses import dataclass -import enum import functools import linecache import logging @@ -145,72 +144,24 @@ def report( If error_if_integration is True, raise instead of log if an integration is found when unwinding the stack frame. """ - core_behavior = ReportBehavior.ERROR if error_if_core else ReportBehavior.LOG - core_integration_behavior = ( - ReportBehavior.ERROR if error_if_integration else ReportBehavior.LOG - ) - custom_integration_behavior = core_integration_behavior - - if log_custom_component_only: - if core_behavior is ReportBehavior.LOG: - core_behavior = ReportBehavior.IGNORE - if core_integration_behavior is ReportBehavior.LOG: - core_integration_behavior = ReportBehavior.IGNORE - - report_usage( - what, - core_behavior=core_behavior, - core_integration_behavior=core_integration_behavior, - custom_integration_behavior=custom_integration_behavior, - exclude_integrations=exclude_integrations, - level=level, - ) - - -class ReportBehavior(enum.Enum): - """Enum for behavior on code usage.""" - - IGNORE = enum.auto() - """Ignore the code usage.""" - LOG = enum.auto() - """Log the code usage.""" - ERROR = enum.auto() - """Raise an error on code usage.""" - - -def report_usage( - what: str, - *, - core_behavior: ReportBehavior = ReportBehavior.ERROR, - core_integration_behavior: ReportBehavior = ReportBehavior.LOG, - custom_integration_behavior: ReportBehavior = ReportBehavior.LOG, - exclude_integrations: set[str] | None = None, - level: int = logging.WARNING, -) -> None: - """Report incorrect code usage. - - Similar to `report` but allows more fine-grained reporting. - """ try: integration_frame = get_integration_frame( exclude_integrations=exclude_integrations ) except MissingIntegrationFrame as err: msg = f"Detected code that {what}. Please report this issue." - if core_behavior is ReportBehavior.ERROR: + if error_if_core: raise RuntimeError(msg) from err - if core_behavior is ReportBehavior.LOG: + if not log_custom_component_only: _LOGGER.warning(msg, stack_info=True) return - integration_behavior = core_integration_behavior - if integration_frame.custom_integration: - integration_behavior = custom_integration_behavior - - if integration_behavior is not ReportBehavior.IGNORE: - _report_integration( - what, integration_frame, level, integration_behavior is ReportBehavior.ERROR - ) + if ( + error_if_integration + or not log_custom_component_only + or integration_frame.custom_integration + ): + _report_integration(what, integration_frame, level, error_if_integration) def _report_integration( diff --git a/homeassistant/helpers/schema_config_entry_flow.py b/homeassistant/helpers/schema_config_entry_flow.py index af8c4c6402d..b956a58398a 100644 --- a/homeassistant/helpers/schema_config_entry_flow.py +++ b/homeassistant/helpers/schema_config_entry_flow.py @@ -421,6 +421,8 @@ class SchemaOptionsFlowHandler(OptionsFlow): options, which is the union of stored options and user input from the options flow steps. """ + # Although `self.options` is most likely unused, it is safer to keep both + # `self.options` and `self._common_handler.options` referring to the same object self._options = copy.deepcopy(dict(config_entry.options)) self._common_handler = SchemaCommonFlowHandler(self, options_flow, self.options) self._async_options_flow_finished = async_options_flow_finished @@ -435,11 +437,6 @@ class SchemaOptionsFlowHandler(OptionsFlow): if async_setup_preview: setattr(self, "async_setup_preview", async_setup_preview) - @property - def options(self) -> dict[str, Any]: - """Return a mutable copy of the config entry options.""" - return self._options - @staticmethod def _async_step( step_id: str, diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index e3da52604cb..33e8f3d3d6e 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -1277,14 +1277,14 @@ def async_register_entity_service( schema = cv.make_entity_service_schema(schema) elif not cv.is_entity_service_schema(schema): # pylint: disable-next=import-outside-toplevel - from .frame import ReportBehavior, report_usage + from .frame import report - report_usage( + report( ( "registers an entity service with a non entity service schema " "which will stop working in HA Core 2025.9" ), - core_behavior=ReportBehavior.LOG, + error_if_core=False, ) service_func: str | HassJob[..., Any] diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 2eab666bbd4..753464c35d5 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -515,18 +515,18 @@ class Template: will be non optional in Home Assistant Core 2025.10. """ # pylint: disable-next=import-outside-toplevel - from .frame import ReportBehavior, report_usage + from .frame import report if not isinstance(template, str): raise TypeError("Expected template to be a string") if not hass: - report_usage( + report( ( "creates a template object without passing hass, " "which will stop working in HA Core 2025.10" ), - core_behavior=ReportBehavior.LOG, + error_if_core=False, ) self.template: str = template.strip() diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index 87d55891e90..f5c2a2a1288 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -29,7 +29,7 @@ from homeassistant.util.dt import utcnow from . import entity, event from .debounce import Debouncer -from .frame import report_usage +from .frame import report from .typing import UNDEFINED, UndefinedType REQUEST_REFRESH_DEFAULT_COOLDOWN = 10 @@ -286,20 +286,24 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): to ensure that multiple retries do not cause log spam. """ if self.config_entry is None: - report_usage( + report( "uses `async_config_entry_first_refresh`, which is only supported " "for coordinators with a config entry and will stop working in " - "Home Assistant 2025.11" + "Home Assistant 2025.11", + error_if_core=True, + error_if_integration=False, ) elif ( self.config_entry.state is not config_entries.ConfigEntryState.SETUP_IN_PROGRESS ): - report_usage( + report( "uses `async_config_entry_first_refresh`, which is only supported " f"when entry state is {config_entries.ConfigEntryState.SETUP_IN_PROGRESS}, " f"but it is in state {self.config_entry.state}, " "This will stop working in Home Assistant 2025.11", + error_if_core=True, + error_if_integration=False, ) if await self.__wrap_async_setup(): await self._async_refresh( diff --git a/homeassistant/loader.py b/homeassistant/loader.py index d2e04df04c4..221a2c7ce19 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -1556,18 +1556,16 @@ class Components: raise ImportError(f"Unable to load {comp_name}") # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from .helpers.frame import ReportBehavior, report_usage + from .helpers.frame import report # pylint: disable=import-outside-toplevel - report_usage( + report( ( f"accesses hass.components.{comp_name}." " This is deprecated and will stop working in Home Assistant 2025.3, it" f" should be updated to import functions used from {comp_name} directly" ), - core_behavior=ReportBehavior.IGNORE, - core_integration_behavior=ReportBehavior.IGNORE, - custom_integration_behavior=ReportBehavior.LOG, + error_if_core=False, + log_custom_component_only=True, ) wrapped = ModuleWrapper(self._hass, component) @@ -1587,18 +1585,16 @@ class Helpers: helper = importlib.import_module(f"homeassistant.helpers.{helper_name}") # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from .helpers.frame import ReportBehavior, report_usage + from .helpers.frame import report # pylint: disable=import-outside-toplevel - report_usage( + report( ( f"accesses hass.helpers.{helper_name}." " This is deprecated and will stop working in Home Assistant 2025.5, it" f" should be updated to import functions used from {helper_name} directly" ), - core_behavior=ReportBehavior.IGNORE, - core_integration_behavior=ReportBehavior.IGNORE, - custom_integration_behavior=ReportBehavior.LOG, + error_if_core=False, + log_custom_component_only=True, ) wrapped = ModuleWrapper(self._hass, helper) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 5bc539beb86..aeaa4aa7dcd 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -5,7 +5,7 @@ aiodiscover==2.1.0 aiodns==3.2.0 aiohasupervisor==0.2.1 aiohttp-fast-zlib==0.1.1 -aiohttp==3.11.0 +aiohttp==3.10.10 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 @@ -13,7 +13,6 @@ async-interrupt==1.2.0 async-upnp-client==0.41.0 atomicwrites-homeassistant==1.4.1 attrs==24.2.0 -audioop-lts==0.2.1;python_version>='3.13' av==13.1.0 awesomeversion==24.6.0 bcrypt==4.2.0 @@ -28,23 +27,23 @@ ciso8601==2.3.1 cryptography==43.0.1 dbus-fast==2.24.3 fnv-hash-fast==1.0.2 -go2rtc-client==0.1.1 -ha-ffmpeg==3.2.2 +go2rtc-client==0.0.1b4 +ha-ffmpeg==3.2.1 habluetooth==3.6.0 hass-nabucasa==0.84.0 -hassil==2.0.1 +hassil==1.7.4 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241106.2 -home-assistant-intents==2024.11.13 +home-assistant-frontend==20241105.0 +home-assistant-intents==2024.11.4 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.11 +orjson==3.10.10 packaging>=23.1 paho-mqtt==1.6.1 -Pillow==11.0.0 +Pillow==10.4.0 propcache==0.2.0 psutil-home-assistant==0.0.1 PyJWT==2.9.0 @@ -60,12 +59,10 @@ PyYAML==6.0.2 requests==2.32.3 securetar==2024.2.1 SQLAlchemy==2.0.31 -standard-aifc==3.13.0;python_version>='3.13' -standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 -uv==0.5.0 +uv==0.4.28 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 @@ -84,9 +81,9 @@ httplib2>=0.19.0 # gRPC is an implicit dependency that we want to make explicit so we manage # upgrades intentionally. It is a large package to build from source and we # want to ensure we have wheels built. -grpcio==1.67.1 -grpcio-status==1.67.1 -grpcio-reflection==1.67.1 +grpcio==1.66.2 +grpcio-status==1.66.2 +grpcio-reflection==1.66.2 # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -115,8 +112,7 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==2.1.3 -pandas~=2.2.3 +numpy==1.26.4 # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 @@ -127,7 +123,7 @@ backoff>=2.0 # Required to avoid breaking (#101042). # v2 has breaking changes (#99218). -pydantic==1.10.19 +pydantic==1.10.18 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 @@ -171,9 +167,12 @@ get-mac==1000000000.0.0 charset-normalizer==3.4.0 # dacite: Ensure we have a version that is able to handle type unions for -# NAM, Brother, and GIOS. +# Roborock, NAM, Brother, and GIOS. dacite>=1.7.0 +# Musle wheels for pandas 2.2.0 cannot be build for any architecture. +pandas==2.1.4 + # chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x chacha20poly1305-reuseable>=0.13.0 @@ -181,8 +180,8 @@ chacha20poly1305-reuseable>=0.13.0 # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 pycountry>=23.12.11 -# scapy==2.6.0 causes CI failures due to a race condition -scapy>=2.6.1 +# scapy<2.5.0 will not work with python3.12 +scapy>=2.5.0 # tuf isn't updated to deal with breaking changes in securesystemslib==1.0. # Only tuf>=4 includes a constraint to <1.0. diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 1bf3561e66a..6bc595bd487 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -10,7 +10,6 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, UNIT_NOT_RECOGNIZED_TEMPLATE, - UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -174,17 +173,6 @@ class DistanceConverter(BaseUnitConverter): } -class BloodGlucoseConcentrationConverter(BaseUnitConverter): - """Utility to convert blood glucose concentration values.""" - - UNIT_CLASS = "blood_glucose_concentration" - _UNIT_CONVERSION: dict[str | None, float] = { - UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER: 18, - UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER: 1, - } - VALID_UNITS = set(UnitOfBloodGlucoseConcentration) - - class ConductivityConverter(BaseUnitConverter): """Utility to convert electric current values.""" @@ -234,8 +222,6 @@ class EnergyConverter(BaseUnitConverter): UnitOfEnergy.WATT_HOUR: 1e3, UnitOfEnergy.KILO_WATT_HOUR: 1, UnitOfEnergy.MEGA_WATT_HOUR: 1 / 1e3, - UnitOfEnergy.GIGA_WATT_HOUR: 1 / 1e6, - UnitOfEnergy.TERA_WATT_HOUR: 1 / 1e9, UnitOfEnergy.CALORIE: _WH_TO_CAL * 1e3, UnitOfEnergy.KILO_CALORIE: _WH_TO_CAL, UnitOfEnergy.MEGA_CALORIE: _WH_TO_CAL / 1e3, @@ -306,16 +292,10 @@ class PowerConverter(BaseUnitConverter): _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPower.WATT: 1, UnitOfPower.KILO_WATT: 1 / 1000, - UnitOfPower.MEGA_WATT: 1 / 1e6, - UnitOfPower.GIGA_WATT: 1 / 1e9, - UnitOfPower.TERA_WATT: 1 / 1e12, } VALID_UNITS = { UnitOfPower.WATT, UnitOfPower.KILO_WATT, - UnitOfPower.MEGA_WATT, - UnitOfPower.GIGA_WATT, - UnitOfPower.TERA_WATT, } diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index 39d38a8f47d..39ac17d94f9 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -25,6 +25,7 @@ except ImportError: from propcache import cached_property from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.frame import report from .const import SECRET_YAML from .objects import Input, NodeDictClass, NodeListClass, NodeStrClass @@ -143,6 +144,37 @@ class FastSafeLoader(FastestAvailableSafeLoader, _LoaderMixin): self.secrets = secrets +class SafeLoader(FastSafeLoader): + """Provided for backwards compatibility. Logs when instantiated.""" + + def __init__(*args: Any, **kwargs: Any) -> None: + """Log a warning and call super.""" + SafeLoader.__report_deprecated() + FastSafeLoader.__init__(*args, **kwargs) + + @classmethod + def add_constructor(cls, tag: str, constructor: Callable) -> None: + """Log a warning and call super.""" + SafeLoader.__report_deprecated() + FastSafeLoader.add_constructor(tag, constructor) + + @classmethod + def add_multi_constructor( + cls, tag_prefix: str, multi_constructor: Callable + ) -> None: + """Log a warning and call super.""" + SafeLoader.__report_deprecated() + FastSafeLoader.add_multi_constructor(tag_prefix, multi_constructor) + + @staticmethod + def __report_deprecated() -> None: + """Log deprecation warning.""" + report( + "uses deprecated 'SafeLoader' instead of 'FastSafeLoader', " + "which will stop working in HA Core 2024.6," + ) + + class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): """Python safe loader.""" @@ -152,6 +184,37 @@ class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): self.secrets = secrets +class SafeLineLoader(PythonSafeLoader): + """Provided for backwards compatibility. Logs when instantiated.""" + + def __init__(*args: Any, **kwargs: Any) -> None: + """Log a warning and call super.""" + SafeLineLoader.__report_deprecated() + PythonSafeLoader.__init__(*args, **kwargs) + + @classmethod + def add_constructor(cls, tag: str, constructor: Callable) -> None: + """Log a warning and call super.""" + SafeLineLoader.__report_deprecated() + PythonSafeLoader.add_constructor(tag, constructor) + + @classmethod + def add_multi_constructor( + cls, tag_prefix: str, multi_constructor: Callable + ) -> None: + """Log a warning and call super.""" + SafeLineLoader.__report_deprecated() + PythonSafeLoader.add_multi_constructor(tag_prefix, multi_constructor) + + @staticmethod + def __report_deprecated() -> None: + """Log deprecation warning.""" + report( + "uses deprecated 'SafeLineLoader' instead of 'PythonSafeLoader', " + "which will stop working in HA Core 2024.6," + ) + + type LoaderType = FastSafeLoader | PythonSafeLoader diff --git a/mypy.ini b/mypy.ini index 4d33f16d968..c851e586246 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3056,16 +3056,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.nasweb.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.neato.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3156,16 +3146,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.nordpool.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.notify.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pyproject.toml b/pyproject.toml index ebf22a93d7d..4a2857b5065 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,6 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", "Topic :: Home Automation", ] requires-python = ">=3.12.0" @@ -29,7 +28,7 @@ dependencies = [ # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 "aiohasupervisor==0.2.1", - "aiohttp==3.11.0", + "aiohttp==3.10.10", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", @@ -37,7 +36,6 @@ dependencies = [ "async-interrupt==1.2.0", "attrs==24.2.0", "atomicwrites-homeassistant==1.4.1", - "audioop-lts==0.2.1;python_version>='3.13'", "awesomeversion==24.6.0", "bcrypt==4.2.0", "certifi>=2021.5.30", @@ -56,10 +54,10 @@ dependencies = [ "PyJWT==2.9.0", # PyJWT has loose dependency. We want the latest one. "cryptography==43.0.1", - "Pillow==11.0.0", + "Pillow==10.4.0", "propcache==0.2.0", "pyOpenSSL==24.2.1", - "orjson==3.10.11", + "orjson==3.10.10", "packaging>=23.1", "psutil-home-assistant==0.0.1", "python-slugify==8.0.4", @@ -67,15 +65,13 @@ dependencies = [ "requests==2.32.3", "securetar==2024.2.1", "SQLAlchemy==2.0.31", - "standard-aifc==3.13.0;python_version>='3.13'", - "standard-telnetlib==3.13.0;python_version>='3.13'", "typing-extensions>=4.12.2,<5.0", "ulid-transform==1.0.2", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 "urllib3>=1.26.5,<2", - "uv==0.5.0", + "uv==0.4.28", "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", @@ -490,13 +486,10 @@ filterwarnings = [ "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", # -- tracked upstream / open PRs - # - pyOpenSSL v24.2.1 # https://github.com/certbot/certbot/issues/9828 - v2.11.0 - # https://github.com/certbot/certbot/issues/9992 "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", - # - other + # https://github.com/beetbox/mediafile/issues/67 - v0.12.0 + "ignore:'imghdr' is deprecated and slated for removal in Python 3.13:DeprecationWarning:mediafile", # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 # https://github.com/foxel/python_ndms2_client/pull/8 "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:ndms2_client.connection", @@ -533,8 +526,6 @@ filterwarnings = [ "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:onvif.client", # https://github.com/okunishinishi/python-stringcase/commit/6a5c5bbd3fe5337862abc7fd0853a0f36e18b2e1 - >1.2.0 "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", - # https://github.com/cereal2nd/velbus-aio/pull/126 - >2024.10.0 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler", # -- fixed for Python 3.13 # https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2 @@ -558,7 +549,7 @@ filterwarnings = [ "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 - 2024-02-24 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", - # https://github.com/lextudio/pysnmp/blob/v7.1.10/pysnmp/smi/compiler.py#L23-L31 - v7.1.10 - 2024-11-04 + # https://github.com/lextudio/pysnmp/blob/v7.1.8/pysnmp/smi/compiler.py#L23-L31 - v7.1.8 - 2024-10-15 "ignore:smiV1Relaxed is deprecated. Please use smi_v1_relaxed instead:DeprecationWarning:pysnmp.smi.compiler", "ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysmi.reader.url", # wrong stacklevel # https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10 @@ -588,7 +579,7 @@ filterwarnings = [ # - pkg_resources # https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast", - # https://pypi.org/project/habitipy/ - v0.3.3 - 2024-10-28 + # https://pypi.org/project/habitipy/ - v0.3.1 - 2019-01-14 / 2024-04-28 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:habitipy.api", # https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data", @@ -596,6 +587,14 @@ filterwarnings = [ "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", + # https://pypi.org/project/velbus-aio/ - v2024.7.6 - 2024-07-31 + # https://github.com/Cereal2nd/velbus-aio/blob/2024.7.6/velbusaio/handler.py#L22 + "ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler", + # - pyOpenSSL v24.2.1 + # https://pypi.org/project/acme/ - v2.11.0 - 2024-06-06 + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", + # https://pypi.org/project/josepy/ - v1.14.0 - 2023-11-01 + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", # -- Python 3.13 # HomeAssistant @@ -609,7 +608,7 @@ filterwarnings = [ # https://github.com/Uberi/speech_recognition/blob/3.11.0/speech_recognition/__init__.py#L7 "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", # https://pypi.org/project/voip-utils/ - v0.2.0 - 2024-09-06 - # https://github.com/home-assistant-libs/voip-utils/blob/0.2.0/voip_utils/rtp_audio.py#L3 + # https://github.com/home-assistant-libs/voip-utils/blob/v0.2.0/voip_utils/rtp_audio.py#L3 "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:voip_utils.rtp_audio", # -- Python 3.13 - unmaintained projects, last release about 2+ years @@ -621,17 +620,6 @@ filterwarnings = [ # https://github.com/ssaenger/pyws66i/blob/v1.1/pyws66i/__init__.py#L2 "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pyws66i", - # -- New in Python 3.13 - # https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11 - # https://github.com/kurtmckee/feedparser/issues/481 - "ignore:'count' is passed as positional argument:DeprecationWarning:feedparser.html", - # https://github.com/youknowone/python-deadlib - Backports for aifc, telnetlib - "ignore:aifc was removed in Python 3.13.*'standard-aifc':DeprecationWarning:speech_recognition", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:homeassistant.components.hddtemp.sensor", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:ndms2_client.connection", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:plumlightpad.lightpad", - "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:pyws66i", - # -- unmaintained projects, last release about 2+ years # https://pypi.org/project/agent-py/ - v0.0.23 - 2020-06-04 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:agent.a", diff --git a/requirements.txt b/requirements.txt index b97c8dc57a0..a5beecec8ff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.2.1 -aiohttp==3.11.0 +aiohttp==3.10.10 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 @@ -13,7 +13,6 @@ astral==2.2 async-interrupt==1.2.0 attrs==24.2.0 atomicwrites-homeassistant==1.4.1 -audioop-lts==0.2.1;python_version>='3.13' awesomeversion==24.6.0 bcrypt==4.2.0 certifi>=2021.5.30 @@ -27,10 +26,10 @@ Jinja2==3.1.4 lru-dict==1.3.0 PyJWT==2.9.0 cryptography==43.0.1 -Pillow==11.0.0 +Pillow==10.4.0 propcache==0.2.0 pyOpenSSL==24.2.1 -orjson==3.10.11 +orjson==3.10.10 packaging>=23.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 @@ -38,12 +37,10 @@ PyYAML==6.0.2 requests==2.32.3 securetar==2024.2.1 SQLAlchemy==2.0.31 -standard-aifc==3.13.0;python_version>='3.13' -standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 -uv==0.5.0 +uv==0.4.28 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 diff --git a/requirements_all.txt b/requirements_all.txt index 65ef5f1ebf2..3ac09644b5d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -33,7 +33,7 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==11.0.0 +Pillow==10.4.0 # homeassistant.components.plex PlexAPI==4.15.16 @@ -152,7 +152,7 @@ advantage-air==0.4.4 afsapi==0.2.7 # homeassistant.components.agent_dvr -agent-py==0.0.24 +agent-py==0.0.23 # homeassistant.components.geo_json_events aio-geojson-generic-client==0.4 @@ -172,9 +172,6 @@ aio-geojson-usgs-earthquakes==0.3 # homeassistant.components.gdacs aio-georss-gdacs==0.10 -# homeassistant.components.acaia -aioacaia==0.1.6 - # homeassistant.components.airq aioairq==0.3.2 @@ -182,7 +179,7 @@ aioairq==0.3.2 aioairzone-cloud==0.6.10 # homeassistant.components.airzone -aioairzone==0.9.6 +aioairzone==0.9.5 # homeassistant.components.ambient_network # homeassistant.components.ambient_station @@ -357,10 +354,10 @@ aiorecollect==2023.09.0 aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed -aioruckus==0.42 +aioruckus==0.41 # homeassistant.components.russound_rio -aiorussound==4.1.0 +aiorussound==4.0.5 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -384,7 +381,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.cambridge_audio -aiostreammagic==2.8.5 +aiostreammagic==2.8.4 # homeassistant.components.switcher_kis aioswitcher==4.4.0 @@ -395,9 +392,6 @@ aiosyncthing==0.5.1 # homeassistant.components.tankerkoenig aiotankerkoenig==0.4.2 -# homeassistant.components.tedee -aiotedee==0.2.20 - # homeassistant.components.tractive aiotractive==0.6.0 @@ -420,7 +414,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.1.3 +aiowithings==3.1.1 # homeassistant.components.yandex_transport aioymaps==1.2.5 @@ -708,7 +702,7 @@ connect-box==0.3.1 construct==2.10.68 # homeassistant.components.utility_meter -cronsim==2.6 +croniter==2.0.2 # homeassistant.components.crownstone crownstone-cloud==1.4.11 @@ -738,7 +732,7 @@ debugpy==1.8.6 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.4.1 +deebot-client==8.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -863,7 +857,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.4.1 +eq3btsmart==1.2.0 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -996,7 +990,7 @@ gitterpy==0.1.7 glances-api==0.8.0 # homeassistant.components.go2rtc -go2rtc-client==0.1.1 +go2rtc-client==0.0.1b4 # homeassistant.components.goalzero goalzero==0.2.2 @@ -1021,7 +1015,7 @@ google-cloud-texttospeech==2.17.2 google-generativeai==0.8.2 # homeassistant.components.nest -google-nest-sdm==6.1.5 +google-nest-sdm==6.1.3 # homeassistant.components.google_photos google-photos-library-api==0.12.1 @@ -1069,13 +1063,13 @@ gspread==5.5.0 gstreamer-player==1.1.2 # homeassistant.components.profiler -guppy3==3.1.4.post1;python_version<'3.13' +guppy3==3.1.4.post1 # homeassistant.components.iaqualink h2==4.1.0 # homeassistant.components.ffmpeg -ha-ffmpeg==3.2.2 +ha-ffmpeg==3.2.1 # homeassistant.components.iotawatt ha-iotawattpy==0.1.2 @@ -1096,7 +1090,7 @@ hass-nabucasa==0.84.0 hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==2.0.1 +hassil==1.7.4 # homeassistant.components.jewish_calendar hdate==0.10.9 @@ -1130,10 +1124,10 @@ hole==0.8.0 holidays==0.60 # homeassistant.components.frontend -home-assistant-frontend==20241106.2 +home-assistant-frontend==20241105.0 # homeassistant.components.conversation -home-assistant-intents==2024.11.13 +home-assistant-intents==2024.11.4 # homeassistant.components.home_connect homeconnect==0.8.0 @@ -1151,7 +1145,7 @@ httplib2==0.20.4 huawei-lte-api==1.10.0 # homeassistant.components.huum -huum==0.7.12 +huum==0.7.10 # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -1271,7 +1265,7 @@ lakeside==0.13 laundrify-aio==1.2.2 # homeassistant.components.lcn -lcn-frontend==0.2.2 +lcn-frontend==0.2.1 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1312,6 +1306,9 @@ linear-garage-door==0.2.9 # homeassistant.components.linode linode-api==4.1.9b1 +# homeassistant.components.lamarzocco +lmcloud==1.2.3 + # homeassistant.components.google_maps locationsharinglib==5.0.1 @@ -1376,7 +1373,7 @@ microBeesPy==0.3.2 mill-local==0.3.0 # homeassistant.components.mill -millheater==0.12.2 +millheater==0.11.8 # homeassistant.components.minio minio==7.1.12 @@ -1460,7 +1457,7 @@ nextdns==3.3.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.3.10 +nice-go==0.3.9 # homeassistant.components.niko_home_control niko-home-control==0.2.1 @@ -1494,7 +1491,7 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==2.1.3 +numpy==1.26.4 # homeassistant.components.nyt_games nyt_games==0.4.4 @@ -1654,7 +1651,7 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.1.0 +psutil==6.0.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 @@ -1741,7 +1738,7 @@ pyRFXtrx==0.31.1 pySDCP==1 # homeassistant.components.tibber -pyTibber==0.30.8 +pyTibber==0.30.4 # homeassistant.components.dlink pyW215==0.7.0 @@ -2026,9 +2023,6 @@ pykwb==0.0.8 # homeassistant.components.lacrosse pylacrosse==0.4 -# homeassistant.components.lamarzocco -pylamarzocco==1.2.3 - # homeassistant.components.lastfm pylast==5.1.0 @@ -2101,9 +2095,6 @@ pynetio==0.1.9.1 # homeassistant.components.nobo_hub pynobo==1.8.1 -# homeassistant.components.nordpool -pynordpool==0.2.2 - # homeassistant.components.nuki pynuki==1.6.3 @@ -2155,7 +2146,7 @@ pyoverkiz==1.14.1 pyownet==0.10.0.post1 # homeassistant.components.palazzetti -pypalazzetti==0.1.11 +pypalazzetti==0.1.10 # homeassistant.components.elv pypca==0.0.7 @@ -2293,7 +2284,7 @@ pysqueezebox==0.10.0 pystiebeleltron==0.0.1.dev2 # homeassistant.components.suez_water -pysuezV2==1.3.1 +pysuezV2==0.2.2 # homeassistant.components.switchbee pyswitchbee==1.8.3 @@ -2301,6 +2292,9 @@ pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 +# homeassistant.components.tedee +pytedee-async==0.2.20 + # homeassistant.components.thinkingcleaner pythinkingcleaner==0.0.3 @@ -2365,7 +2359,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.7.7 # homeassistant.components.linkplay -python-linkplay==0.0.20 +python-linkplay==0.0.17 # homeassistant.components.lirc # python-lirc==1.2.3 @@ -2402,7 +2396,7 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.7.2 +python-roborock==2.6.1 # homeassistant.components.smarttub python-smarttub==0.0.36 @@ -2556,7 +2550,7 @@ renault-api==0.2.7 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.1 +reolink-aio==0.10.4 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2565,7 +2559,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell==0.9.12 +ring-doorbell==0.9.8 # homeassistant.components.fleetgo ritassist==0.9.2 @@ -2632,7 +2626,7 @@ sendgrid==6.8.2 # homeassistant.components.emulated_kasa # homeassistant.components.sense -sense-energy==0.13.3 +sense-energy==0.13.2 # homeassistant.components.sensirion_ble sensirion-ble==0.1.1 @@ -2676,9 +2670,6 @@ simplisafe-python==2024.01.0 # homeassistant.components.sisyphus sisyphus-control==3.1.4 -# homeassistant.components.sky_remote -skyboxremote==0.0.6 - # homeassistant.components.slack slackclient==2.5.0 @@ -2695,7 +2686,7 @@ smhi-pkg==1.0.18 snapcast==2.3.6 # homeassistant.components.sonos -soco==0.30.6 +soco==0.30.4 # homeassistant.components.solaredge_local solaredge-local==0.2.3 @@ -2719,7 +2710,7 @@ speak2mary==1.4.0 speedtest-cli==2.1.3 # homeassistant.components.spotify -spotifyaio==0.8.8 +spotifyaio==0.8.5 # homeassistant.components.sql sqlparse==0.5.0 @@ -2864,7 +2855,7 @@ total-connect-client==2024.5 tp-connected==0.0.4 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.3 +tplink-omada-client==1.4.2 # homeassistant.components.transmission transmission-rpc==7.0.3 @@ -2879,7 +2870,7 @@ ttls==1.8.3 ttn_client==1.2.0 # homeassistant.components.tuya -tuya-device-sharing-sdk==0.2.1 +tuya-device-sharing-sdk==0.1.9 # homeassistant.components.twentemilieu twentemilieu==2.0.1 @@ -2986,14 +2977,11 @@ weatherflow4py==1.0.6 # homeassistant.components.cisco_webex_teams webexpythonsdk==2.0.1 -# homeassistant.components.nasweb -webio-api==0.1.8 - # homeassistant.components.webmin webmin-xmlrpc==0.0.2 # homeassistant.components.weheat -weheat==2024.11.02 +weheat==2024.09.23 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -3093,7 +3081,7 @@ ziggo-mediabox-xl==1.1.0 zm-py==0.5.4 # homeassistant.components.zwave_js -zwave-js-server-python==0.59.1 +zwave-js-server-python==0.58.1 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test.txt b/requirements_test.txt index 166fd965e2c..241fff89ac3 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -14,7 +14,7 @@ license-expression==30.4.0 mock-open==1.4.0 mypy-dev==1.14.0a2 pre-commit==4.0.0 -pydantic==1.10.19 +pydantic==1.10.18 pylint==3.3.1 pylint-per-file-ignores==1.3.2 pipdeptree==2.23.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b61e65f3c68..d8b4a50c254 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -33,7 +33,7 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==11.0.0 +Pillow==10.4.0 # homeassistant.components.plex PlexAPI==4.15.16 @@ -140,7 +140,7 @@ advantage-air==0.4.4 afsapi==0.2.7 # homeassistant.components.agent_dvr -agent-py==0.0.24 +agent-py==0.0.23 # homeassistant.components.geo_json_events aio-geojson-generic-client==0.4 @@ -160,9 +160,6 @@ aio-geojson-usgs-earthquakes==0.3 # homeassistant.components.gdacs aio-georss-gdacs==0.10 -# homeassistant.components.acaia -aioacaia==0.1.6 - # homeassistant.components.airq aioairq==0.3.2 @@ -170,7 +167,7 @@ aioairq==0.3.2 aioairzone-cloud==0.6.10 # homeassistant.components.airzone -aioairzone==0.9.6 +aioairzone==0.9.5 # homeassistant.components.ambient_network # homeassistant.components.ambient_station @@ -339,10 +336,10 @@ aiorecollect==2023.09.0 aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed -aioruckus==0.42 +aioruckus==0.41 # homeassistant.components.russound_rio -aiorussound==4.1.0 +aiorussound==4.0.5 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -366,7 +363,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.cambridge_audio -aiostreammagic==2.8.5 +aiostreammagic==2.8.4 # homeassistant.components.switcher_kis aioswitcher==4.4.0 @@ -377,9 +374,6 @@ aiosyncthing==0.5.1 # homeassistant.components.tankerkoenig aiotankerkoenig==0.4.2 -# homeassistant.components.tedee -aiotedee==0.2.20 - # homeassistant.components.tractive aiotractive==0.6.0 @@ -402,7 +396,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.1.3 +aiowithings==3.1.1 # homeassistant.components.yandex_transport aioymaps==1.2.5 @@ -604,7 +598,7 @@ colorthief==0.2.1 construct==2.10.68 # homeassistant.components.utility_meter -cronsim==2.6 +croniter==2.0.2 # homeassistant.components.crownstone crownstone-cloud==1.4.11 @@ -628,7 +622,7 @@ dbus-fast==2.24.3 debugpy==1.8.6 # homeassistant.components.ecovacs -deebot-client==8.4.1 +deebot-client==8.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -732,7 +726,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.4.1 +eq3btsmart==1.2.0 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -846,7 +840,7 @@ gios==5.0.0 glances-api==0.8.0 # homeassistant.components.go2rtc -go2rtc-client==0.1.1 +go2rtc-client==0.0.1b4 # homeassistant.components.goalzero goalzero==0.2.2 @@ -871,7 +865,7 @@ google-cloud-texttospeech==2.17.2 google-generativeai==0.8.2 # homeassistant.components.nest -google-nest-sdm==6.1.5 +google-nest-sdm==6.1.3 # homeassistant.components.google_photos google-photos-library-api==0.12.1 @@ -907,13 +901,13 @@ growattServer==1.5.0 gspread==5.5.0 # homeassistant.components.profiler -guppy3==3.1.4.post1;python_version<'3.13' +guppy3==3.1.4.post1 # homeassistant.components.iaqualink h2==4.1.0 # homeassistant.components.ffmpeg -ha-ffmpeg==3.2.2 +ha-ffmpeg==3.2.1 # homeassistant.components.iotawatt ha-iotawattpy==0.1.2 @@ -931,7 +925,7 @@ habluetooth==3.6.0 hass-nabucasa==0.84.0 # homeassistant.components.conversation -hassil==2.0.1 +hassil==1.7.4 # homeassistant.components.jewish_calendar hdate==0.10.9 @@ -956,10 +950,10 @@ hole==0.8.0 holidays==0.60 # homeassistant.components.frontend -home-assistant-frontend==20241106.2 +home-assistant-frontend==20241105.0 # homeassistant.components.conversation -home-assistant-intents==2024.11.13 +home-assistant-intents==2024.11.4 # homeassistant.components.home_connect homeconnect==0.8.0 @@ -974,7 +968,7 @@ httplib2==0.20.4 huawei-lte-api==1.10.0 # homeassistant.components.huum -huum==0.7.12 +huum==0.7.10 # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -1067,7 +1061,7 @@ lacrosse-view==1.0.3 laundrify-aio==1.2.2 # homeassistant.components.lcn -lcn-frontend==0.2.2 +lcn-frontend==0.2.1 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1093,6 +1087,9 @@ libsoundtouch==0.8 # homeassistant.components.linear_garage_door linear-garage-door==0.2.9 +# homeassistant.components.lamarzocco +lmcloud==1.2.3 + # homeassistant.components.london_underground london-tube-status==0.5 @@ -1145,7 +1142,7 @@ microBeesPy==0.3.2 mill-local==0.3.0 # homeassistant.components.mill -millheater==0.12.2 +millheater==0.11.8 # homeassistant.components.minio minio==7.1.12 @@ -1220,7 +1217,7 @@ nextdns==3.3.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.3.10 +nice-go==0.3.9 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 @@ -1242,7 +1239,7 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==2.1.3 +numpy==1.26.4 # homeassistant.components.nyt_games nyt_games==0.4.4 @@ -1352,7 +1349,7 @@ prometheus-client==0.21.0 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.1.0 +psutil==6.0.0 # homeassistant.components.androidtv pure-python-adb[async]==0.3.0.dev0 @@ -1418,7 +1415,7 @@ pyElectra==1.2.4 pyRFXtrx==0.31.1 # homeassistant.components.tibber -pyTibber==0.30.8 +pyTibber==0.30.4 # homeassistant.components.dlink pyW215==0.7.0 @@ -1631,9 +1628,6 @@ pykrakenapi==0.1.8 # homeassistant.components.kulersky pykulersky==0.5.2 -# homeassistant.components.lamarzocco -pylamarzocco==1.2.3 - # homeassistant.components.lastfm pylast==5.1.0 @@ -1694,9 +1688,6 @@ pynetgear==0.10.10 # homeassistant.components.nobo_hub pynobo==1.8.1 -# homeassistant.components.nordpool -pynordpool==0.2.2 - # homeassistant.components.nuki pynuki==1.6.3 @@ -1742,7 +1733,7 @@ pyoverkiz==1.14.1 pyownet==0.10.0.post1 # homeassistant.components.palazzetti -pypalazzetti==0.1.11 +pypalazzetti==0.1.10 # homeassistant.components.lcn pypck==0.7.24 @@ -1850,7 +1841,7 @@ pyspeex-noise==1.0.2 pysqueezebox==0.10.0 # homeassistant.components.suez_water -pysuezV2==1.3.1 +pysuezV2==0.2.2 # homeassistant.components.switchbee pyswitchbee==1.8.3 @@ -1858,6 +1849,9 @@ pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 +# homeassistant.components.tedee +pytedee-async==0.2.20 + # homeassistant.components.motionmount python-MotionMount==2.2.0 @@ -1892,7 +1886,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.7.7 # homeassistant.components.linkplay -python-linkplay==0.0.20 +python-linkplay==0.0.17 # homeassistant.components.matter python-matter-server==6.6.0 @@ -1923,7 +1917,7 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.7.2 +python-roborock==2.6.1 # homeassistant.components.smarttub python-smarttub==0.0.36 @@ -2047,13 +2041,13 @@ renault-api==0.2.7 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.1 +reolink-aio==0.10.4 # homeassistant.components.rflink rflink==0.0.66 # homeassistant.components.ring -ring-doorbell==0.9.12 +ring-doorbell==0.9.8 # homeassistant.components.roku rokuecp==0.19.3 @@ -2099,7 +2093,7 @@ securetar==2024.2.1 # homeassistant.components.emulated_kasa # homeassistant.components.sense -sense-energy==0.13.3 +sense-energy==0.13.2 # homeassistant.components.sensirion_ble sensirion-ble==0.1.1 @@ -2134,9 +2128,6 @@ simplepush==2.2.3 # homeassistant.components.simplisafe simplisafe-python==2024.01.0 -# homeassistant.components.sky_remote -skyboxremote==0.0.6 - # homeassistant.components.slack slackclient==2.5.0 @@ -2150,7 +2141,7 @@ smhi-pkg==1.0.18 snapcast==2.3.6 # homeassistant.components.sonos -soco==0.30.6 +soco==0.30.4 # homeassistant.components.solarlog solarlog_cli==0.3.2 @@ -2171,7 +2162,7 @@ speak2mary==1.4.0 speedtest-cli==2.1.3 # homeassistant.components.spotify -spotifyaio==0.8.8 +spotifyaio==0.8.5 # homeassistant.components.sql sqlparse==0.5.0 @@ -2277,7 +2268,7 @@ toonapi==0.3.0 total-connect-client==2024.5 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.3 +tplink-omada-client==1.4.2 # homeassistant.components.transmission transmission-rpc==7.0.3 @@ -2292,7 +2283,7 @@ ttls==1.8.3 ttn_client==1.2.0 # homeassistant.components.tuya -tuya-device-sharing-sdk==0.2.1 +tuya-device-sharing-sdk==0.1.9 # homeassistant.components.twentemilieu twentemilieu==2.0.1 @@ -2381,14 +2372,11 @@ watchdog==2.3.1 # homeassistant.components.weatherflow_cloud weatherflow4py==1.0.6 -# homeassistant.components.nasweb -webio-api==0.1.8 - # homeassistant.components.webmin webmin-xmlrpc==0.0.2 # homeassistant.components.weheat -weheat==2024.11.02 +weheat==2024.09.23 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2467,7 +2455,7 @@ zeversolar==0.3.2 zha==0.0.37 # homeassistant.components.zwave_js -zwave-js-server-python==0.59.1 +zwave-js-server-python==0.58.1 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 23f584dd0de..bab89d20584 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.7.3 +ruff==0.7.2 yamllint==1.35.1 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 7d53741c661..0f8354e1f60 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -58,16 +58,8 @@ INCLUDED_REQUIREMENTS_WHEELS = { # will be included in requirements_all_{action}.txt OVERRIDDEN_REQUIREMENTS_ACTIONS = { - "pytest": { - "exclude": set(), - "include": {"python-gammu"}, - "markers": {}, - }, - "wheels_aarch64": { - "exclude": set(), - "include": INCLUDED_REQUIREMENTS_WHEELS, - "markers": {}, - }, + "pytest": {"exclude": set(), "include": {"python-gammu"}}, + "wheels_aarch64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, # Pandas has issues building on armhf, it is expected they # will drop the platform in the near future (they consider it # "flimsy" on 386). The following packages depend on pandas, @@ -75,23 +67,10 @@ OVERRIDDEN_REQUIREMENTS_ACTIONS = { "wheels_armhf": { "exclude": {"env-canada", "noaa-coops", "pyezviz", "pykrakenapi"}, "include": INCLUDED_REQUIREMENTS_WHEELS, - "markers": {}, - }, - "wheels_armv7": { - "exclude": set(), - "include": INCLUDED_REQUIREMENTS_WHEELS, - "markers": {}, - }, - "wheels_amd64": { - "exclude": set(), - "include": INCLUDED_REQUIREMENTS_WHEELS, - "markers": {}, - }, - "wheels_i386": { - "exclude": set(), - "include": INCLUDED_REQUIREMENTS_WHEELS, - "markers": {}, }, + "wheels_armv7": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "wheels_amd64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "wheels_i386": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, } IGNORE_PIN = ("colorlog>2.1,<3", "urllib3") @@ -117,9 +96,9 @@ httplib2>=0.19.0 # gRPC is an implicit dependency that we want to make explicit so we manage # upgrades intentionally. It is a large package to build from source and we # want to ensure we have wheels built. -grpcio==1.67.1 -grpcio-status==1.67.1 -grpcio-reflection==1.67.1 +grpcio==1.66.2 +grpcio-status==1.66.2 +grpcio-reflection==1.66.2 # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -148,8 +127,7 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==2.1.3 -pandas~=2.2.3 +numpy==1.26.4 # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 @@ -160,7 +138,7 @@ backoff>=2.0 # Required to avoid breaking (#101042). # v2 has breaking changes (#99218). -pydantic==1.10.19 +pydantic==1.10.18 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 @@ -204,9 +182,12 @@ get-mac==1000000000.0.0 charset-normalizer==3.4.0 # dacite: Ensure we have a version that is able to handle type unions for -# NAM, Brother, and GIOS. +# Roborock, NAM, Brother, and GIOS. dacite>=1.7.0 +# Musle wheels for pandas 2.2.0 cannot be build for any architecture. +pandas==2.1.4 + # chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x chacha20poly1305-reuseable>=0.13.0 @@ -214,8 +195,8 @@ chacha20poly1305-reuseable>=0.13.0 # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 pycountry>=23.12.11 -# scapy==2.6.0 causes CI failures due to a race condition -scapy>=2.6.1 +# scapy<2.5.0 will not work with python3.12 +scapy>=2.5.0 # tuf isn't updated to deal with breaking changes in securesystemslib==1.0. # Only tuf>=4 includes a constraint to <1.0. @@ -332,10 +313,6 @@ def process_action_requirement(req: str, action: str) -> str: return req if normalized_package_name in EXCLUDED_REQUIREMENTS_ALL: return f"# {req}" - if markers := OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["markers"].get( - normalized_package_name, None - ): - return f"{req};{markers}" return req diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index 57d86bc4def..083cdaba1a9 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -4,7 +4,6 @@ from dataclasses import dataclass from pathlib import Path from homeassistant import core -from homeassistant.components.go2rtc.const import RECOMMENDED_VERSION as GO2RTC_VERSION from homeassistant.const import Platform from homeassistant.util import executor, thread from script.gen_requirements_all import gather_recursive_requirements @@ -80,7 +79,7 @@ WORKDIR /config _HASSFEST_TEMPLATE = r"""# Automatically generated by hassfest. # # To update, run python3 -m script.hassfest -p docker -FROM python:3.13-alpine +FROM python:3.12-alpine ENV \ UV_SYSTEM_PYTHON=true \ @@ -113,6 +112,8 @@ LABEL "com.github.actions.icon"="terminal" LABEL "com.github.actions.color"="gray-dark" """ +_GO2RTC_VERSION = "1.9.6" + def _get_package_versions(file: Path, packages: set[str]) -> dict[str, str]: package_versions: dict[str, str] = {} @@ -161,8 +162,6 @@ def _generate_hassfest_dockerimage( packages.update( gather_recursive_requirements(platform.value, already_checked_domains) ) - # Add go2rtc requirements as this file needs the go2rtc integration - packages.update(gather_recursive_requirements("go2rtc", already_checked_domains)) return File( _HASSFEST_TEMPLATE.format( @@ -198,7 +197,7 @@ def _generate_files(config: Config) -> list[File]: DOCKERFILE_TEMPLATE.format( timeout=timeout, **package_versions, - go2rtc=GO2RTC_VERSION, + go2rtc=_GO2RTC_VERSION, ), config.root / "Dockerfile", ), diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 0fa0a1a89fa..1e948c2982a 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -1,7 +1,7 @@ # Automatically generated by hassfest. # # To update, run python3 -m script.hassfest -p docker -FROM python:3.13-alpine +FROM python:3.12-alpine ENV \ UV_SYSTEM_PYTHON=true \ @@ -14,7 +14,7 @@ WORKDIR "/github/workspace" COPY . /usr/src/homeassistant # Uv is only needed during build -RUN --mount=from=ghcr.io/astral-sh/uv:0.5.0,source=/uv,target=/bin/uv \ +RUN --mount=from=ghcr.io/astral-sh/uv:0.4.28,source=/uv,target=/bin/uv \ # Required for PyTurboJPEG apk add --no-cache libturbojpeg \ && uv pip install \ @@ -22,8 +22,8 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.0,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.7.3 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.1 home-assistant-intents==2024.11.13 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.7.2 \ + PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.1 hassil==1.7.4 home-assistant-intents==2024.11.4 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/script/hassfest/manifest.py b/script/hassfest/manifest.py index 4013c8a6c19..6d2f4087f59 100644 --- a/script/hassfest/manifest.py +++ b/script/hassfest/manifest.py @@ -268,6 +268,7 @@ INTEGRATION_MANIFEST_SCHEMA = vol.Schema( ) ], vol.Required("documentation"): vol.All(vol.Url(), documentation_url), + vol.Optional("issue_tracker"): vol.Url(), vol.Optional("quality_scale"): vol.In(SUPPORTED_QUALITY_SCALES), vol.Optional("requirements"): [str], vol.Optional("dependencies"): [str], @@ -303,7 +304,6 @@ def manifest_schema(value: dict[str, Any]) -> vol.Schema: CUSTOM_INTEGRATION_MANIFEST_SCHEMA = INTEGRATION_MANIFEST_SCHEMA.extend( { vol.Optional("version"): vol.All(str, verify_version), - vol.Optional("issue_tracker"): vol.Url(), vol.Optional("import_executor"): bool, } ) diff --git a/script/hassfest/services.py b/script/hassfest/services.py index 8c9ab5c0c0b..92fca14d373 100644 --- a/script/hassfest/services.py +++ b/script/hassfest/services.py @@ -75,14 +75,6 @@ CUSTOM_INTEGRATION_FIELD_SCHEMA = CORE_INTEGRATION_FIELD_SCHEMA.extend( } ) -CUSTOM_INTEGRATION_SECTION_SCHEMA = vol.Schema( - { - vol.Optional("collapsed"): bool, - vol.Required("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), - } -) - - CORE_INTEGRATION_SERVICE_SCHEMA = vol.Any( vol.Schema( { @@ -113,17 +105,7 @@ CUSTOM_INTEGRATION_SERVICE_SCHEMA = vol.Any( vol.Optional("target"): vol.Any( selector.TargetSelector.CONFIG_SCHEMA, None ), - vol.Optional("fields"): vol.All( - vol.Schema( - { - str: vol.Any( - CUSTOM_INTEGRATION_FIELD_SCHEMA, - CUSTOM_INTEGRATION_SECTION_SCHEMA, - ) - } - ), - unique_field_validator, - ), + vol.Optional("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), } ), None, diff --git a/script/licenses.py b/script/licenses.py index 464a2fc456b..4f5432ad519 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -84,7 +84,6 @@ OSI_APPROVED_LICENSES_SPDX = { "LGPL-3.0-only", "LGPL-3.0-or-later", "MIT", - "MIT-CMU", "MPL-1.1", "MPL-2.0", "PSF-2.0", @@ -189,6 +188,7 @@ EXCEPTIONS = { "crownstone-uart", # https://github.com/crownstone/crownstone-lib-python-uart/pull/12 "eliqonline", # https://github.com/molobrakos/eliqonline/pull/17 "enocean", # https://github.com/kipe/enocean/pull/142 + "huum", # https://github.com/frwickst/pyhuum/pull/8 "imutils", # https://github.com/PyImageSearch/imutils/pull/292 "iso4217", # Public domain "kiwiki_client", # https://github.com/c7h/kiwiki_client/pull/6 diff --git a/script/split_tests.py b/script/split_tests.py index c64de46a068..e124f722552 100755 --- a/script/split_tests.py +++ b/script/split_tests.py @@ -49,27 +49,16 @@ class BucketHolder: test_folder.get_all_flatten(), reverse=True, key=lambda x: x.total_tests ) for tests in sorted_tests: + print(f"{tests.total_tests:>{digits}} tests in {tests.path}") if tests.added_to_bucket: # Already added to bucket continue - print(f"{tests.total_tests:>{digits}} tests in {tests.path}") smallest_bucket = min(self._buckets, key=lambda x: x.total_tests) - is_file = isinstance(tests, TestFile) if ( smallest_bucket.total_tests + tests.total_tests < self._tests_per_bucket - ) or is_file: + ) or isinstance(tests, TestFile): smallest_bucket.add(tests) - # Ensure all files from the same folder are in the same bucket - # to ensure that syrupy correctly identifies unused snapshots - if is_file: - for other_test in tests.parent.children.values(): - if other_test is tests or isinstance(other_test, TestFolder): - continue - print( - f"{other_test.total_tests:>{digits}} tests in {other_test.path} (same bucket)" - ) - smallest_bucket.add(other_test) # verify that all tests are added to a bucket if not test_folder.added_to_bucket: @@ -90,7 +79,6 @@ class TestFile: total_tests: int path: Path added_to_bucket: bool = field(default=False, init=False) - parent: TestFolder | None = field(default=None, init=False) def add_to_bucket(self) -> None: """Add test file to bucket.""" @@ -137,7 +125,6 @@ class TestFolder: def add_test_file(self, file: TestFile) -> None: """Add test file to folder.""" path = file.path - file.parent = self relative_path = path.relative_to(self.path) if not relative_path.parts: raise ValueError("Path is not a child of this folder") diff --git a/tests/components/acaia/__init__.py b/tests/components/acaia/__init__.py deleted file mode 100644 index f4eaa39e615..00000000000 --- a/tests/components/acaia/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Common test tools for the acaia integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> None: - """Set up the acaia integration for testing.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/acaia/conftest.py b/tests/components/acaia/conftest.py deleted file mode 100644 index 1dc6ff31051..00000000000 --- a/tests/components/acaia/conftest.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Common fixtures for the acaia tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -from aioacaia.acaiascale import AcaiaDeviceState -from aioacaia.const import UnitMass as AcaiaUnitOfMass -import pytest - -from homeassistant.components.acaia.const import CONF_IS_NEW_STYLE_SCALE, DOMAIN -from homeassistant.const import CONF_ADDRESS -from homeassistant.core import HomeAssistant - -from . import setup_integration - -from tests.common import MockConfigEntry - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.acaia.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_verify() -> Generator[AsyncMock]: - """Override is_new_scale check.""" - with patch( - "homeassistant.components.acaia.config_flow.is_new_scale", return_value=True - ) as mock_verify: - yield mock_verify - - -@pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: - """Return the default mocked config entry.""" - return MockConfigEntry( - title="LUNAR-DDEEFF", - domain=DOMAIN, - version=1, - data={ - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - CONF_IS_NEW_STYLE_SCALE: True, - }, - unique_id="aa:bb:cc:dd:ee:ff", - ) - - -@pytest.fixture -async def init_integration( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_scale: MagicMock -) -> None: - """Set up the acaia integration for testing.""" - await setup_integration(hass, mock_config_entry) - - -@pytest.fixture -def mock_scale() -> Generator[MagicMock]: - """Return a mocked acaia scale client.""" - with ( - patch( - "homeassistant.components.acaia.coordinator.AcaiaScale", - autospec=True, - ) as scale_mock, - ): - scale = scale_mock.return_value - scale.connected = True - scale.mac = "aa:bb:cc:dd:ee:ff" - scale.model = "Lunar" - scale.timer_running = True - scale.heartbeat_task = None - scale.process_queue_task = None - scale.device_state = AcaiaDeviceState( - battery_level=42, units=AcaiaUnitOfMass.GRAMS - ) - scale.weight = 123.45 - yield scale diff --git a/tests/components/acaia/snapshots/test_button.ambr b/tests/components/acaia/snapshots/test_button.ambr deleted file mode 100644 index cd91ca1a17a..00000000000 --- a/tests/components/acaia/snapshots/test_button.ambr +++ /dev/null @@ -1,139 +0,0 @@ -# serializer version: 1 -# name: test_buttons[button.lunar_ddeeff_reset_timer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.lunar_ddeeff_reset_timer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Reset timer', - 'platform': 'acaia', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reset_timer', - 'unique_id': 'aa:bb:cc:dd:ee:ff_reset_timer', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[button.lunar_ddeeff_reset_timer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'LUNAR-DDEEFF Reset timer', - }), - 'context': , - 'entity_id': 'button.lunar_ddeeff_reset_timer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[button.lunar_ddeeff_start_stop_timer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.lunar_ddeeff_start_stop_timer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Start/stop timer', - 'platform': 'acaia', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'start_stop', - 'unique_id': 'aa:bb:cc:dd:ee:ff_start_stop', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[button.lunar_ddeeff_start_stop_timer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'LUNAR-DDEEFF Start/stop timer', - }), - 'context': , - 'entity_id': 'button.lunar_ddeeff_start_stop_timer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_buttons[button.lunar_ddeeff_tare-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.lunar_ddeeff_tare', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tare', - 'platform': 'acaia', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tare', - 'unique_id': 'aa:bb:cc:dd:ee:ff_tare', - 'unit_of_measurement': None, - }) -# --- -# name: test_buttons[button.lunar_ddeeff_tare-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'LUNAR-DDEEFF Tare', - }), - 'context': , - 'entity_id': 'button.lunar_ddeeff_tare', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/acaia/snapshots/test_init.ambr b/tests/components/acaia/snapshots/test_init.ambr deleted file mode 100644 index 1cc3d8dbbc0..00000000000 --- a/tests/components/acaia/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_device - DeviceRegistryEntrySnapshot({ - 'area_id': 'kitchen', - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'acaia', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Acaia', - 'model': 'Lunar', - 'model_id': None, - 'name': 'LUNAR-DDEEFF', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': 'Kitchen', - 'sw_version': None, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/acaia/test_button.py b/tests/components/acaia/test_button.py deleted file mode 100644 index f68f85e253d..00000000000 --- a/tests/components/acaia/test_button.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Tests for the acaia buttons.""" - -from datetime import timedelta -from unittest.mock import MagicMock, patch - -from freezegun.api import FrozenDateTimeFactory -from syrupy import SnapshotAssertion - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_UNAVAILABLE, - STATE_UNKNOWN, - Platform, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform - -BUTTONS = ( - "tare", - "reset_timer", - "start_stop_timer", -) - - -async def test_buttons( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - mock_scale: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the acaia buttons.""" - - with patch("homeassistant.components.acaia.PLATFORMS", [Platform.BUTTON]): - await setup_integration(hass, mock_config_entry) - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_button_presses( - hass: HomeAssistant, - mock_scale: MagicMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the acaia button presses.""" - - await setup_integration(hass, mock_config_entry) - - for button in BUTTONS: - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - { - ATTR_ENTITY_ID: f"button.lunar_ddeeff_{button}", - }, - blocking=True, - ) - - function = getattr(mock_scale, button) - function.assert_called_once() - - -async def test_buttons_unavailable_on_disconnected_scale( - hass: HomeAssistant, - mock_scale: MagicMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, -) -> None: - """Test the acaia buttons are unavailable when the scale is disconnected.""" - - await setup_integration(hass, mock_config_entry) - - for button in BUTTONS: - state = hass.states.get(f"button.lunar_ddeeff_{button}") - assert state - assert state.state == STATE_UNKNOWN - - mock_scale.connected = False - freezer.tick(timedelta(minutes=10)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - for button in BUTTONS: - state = hass.states.get(f"button.lunar_ddeeff_{button}") - assert state - assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/acaia/test_config_flow.py b/tests/components/acaia/test_config_flow.py deleted file mode 100644 index 2bf4b1dbe8a..00000000000 --- a/tests/components/acaia/test_config_flow.py +++ /dev/null @@ -1,242 +0,0 @@ -"""Test the acaia config flow.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, patch - -from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice -import pytest - -from homeassistant.components.acaia.const import CONF_IS_NEW_STYLE_SCALE, DOMAIN -from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER -from homeassistant.const import CONF_ADDRESS -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo - -from tests.common import MockConfigEntry - -service_info = BluetoothServiceInfo( - name="LUNAR-DDEEFF", - address="aa:bb:cc:dd:ee:ff", - rssi=-63, - manufacturer_data={}, - service_data={}, - service_uuids=[], - source="local", -) - - -@pytest.fixture -def mock_discovered_service_info() -> Generator[AsyncMock]: - """Override getting Bluetooth service info.""" - with patch( - "homeassistant.components.acaia.config_flow.async_discovered_service_info", - return_value=[service_info], - ) as mock_discovered_service_info: - yield mock_discovered_service_info - - -async def test_form( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_verify: AsyncMock, - mock_discovered_service_info: AsyncMock, -) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - user_input = { - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - } - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=user_input, - ) - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "LUNAR-DDEEFF" - assert result2["data"] == { - **user_input, - CONF_IS_NEW_STYLE_SCALE: True, - } - - -async def test_bluetooth_discovery( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_verify: AsyncMock, -) -> None: - """Test we can discover a device.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "bluetooth_confirm" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={}, - ) - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == service_info.name - assert result2["data"] == { - CONF_ADDRESS: service_info.address, - CONF_IS_NEW_STYLE_SCALE: True, - } - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (AcaiaDeviceNotFound("Error"), "device_not_found"), - (AcaiaError, "unknown"), - (AcaiaUnknownDevice, "unsupported_device"), - ], -) -async def test_bluetooth_discovery_errors( - hass: HomeAssistant, - mock_verify: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test abortions of Bluetooth discovery.""" - mock_verify.side_effect = exception - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == error - - -async def test_already_configured( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_verify: AsyncMock, - mock_discovered_service_info: AsyncMock, -) -> None: - """Ensure we can't add the same device twice.""" - - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "already_configured" - - -async def test_already_configured_bluetooth_discovery( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> None: - """Ensure configure device is not discovered again.""" - - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (AcaiaDeviceNotFound("Error"), "device_not_found"), - (AcaiaError, "unknown"), - ], -) -async def test_recoverable_config_flow_errors( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_verify: AsyncMock, - mock_discovered_service_info: AsyncMock, - exception: Exception, - error: str, -) -> None: - """Test recoverable errors.""" - mock_verify.side_effect = exception - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": error} - - # recover - mock_verify.side_effect = None - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - { - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - }, - ) - assert result3["type"] is FlowResultType.CREATE_ENTRY - - -async def test_unsupported_device( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_verify: AsyncMock, - mock_discovered_service_info: AsyncMock, -) -> None: - """Test flow aborts on unsupported device.""" - mock_verify.side_effect = AcaiaUnknownDevice - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", - }, - ) - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "unsupported_device" - - -async def test_no_bluetooth_devices( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_discovered_service_info: AsyncMock, -) -> None: - """Test flow aborts on unsupported device.""" - mock_discovered_service_info.return_value = [] - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_devices_found" diff --git a/tests/components/acaia/test_init.py b/tests/components/acaia/test_init.py deleted file mode 100644 index 8ad988d3b9b..00000000000 --- a/tests/components/acaia/test_init.py +++ /dev/null @@ -1,65 +0,0 @@ -"""Test init of acaia integration.""" - -from datetime import timedelta -from unittest.mock import MagicMock - -from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError -from freezegun.api import FrozenDateTimeFactory -import pytest -from syrupy import SnapshotAssertion - -from homeassistant.components.acaia.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from tests.common import MockConfigEntry, async_fire_time_changed - -pytestmark = pytest.mark.usefixtures("init_integration") - - -async def test_load_unload_config_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> None: - """Test loading and unloading the integration.""" - - assert mock_config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED - - -@pytest.mark.parametrize( - "exception", [AcaiaError, AcaiaDeviceNotFound("Boom"), TimeoutError] -) -async def test_update_exception_leads_to_active_disconnect( - hass: HomeAssistant, - mock_scale: MagicMock, - freezer: FrozenDateTimeFactory, - exception: Exception, -) -> None: - """Test scale gets disconnected on exception.""" - - mock_scale.connect.side_effect = exception - mock_scale.connected = False - - freezer.tick(timedelta(minutes=10)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - mock_scale.device_disconnected_handler.assert_called_once() - - -async def test_device( - mock_scale: MagicMock, - device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Snapshot the device from registry.""" - - device = device_registry.async_get_device({(DOMAIN, mock_scale.mac)}) - assert device - assert device == snapshot diff --git a/tests/components/alarm_control_panel/test_init.py b/tests/components/alarm_control_panel/test_init.py index 89a2a2a2b1a..90b23f87ab1 100644 --- a/tests/components/alarm_control_panel/test_init.py +++ b/tests/components/alarm_control_panel/test_init.py @@ -489,96 +489,3 @@ async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state ) # Test we only log once assert "Entities should implement the 'alarm_state' property and" not in caplog.text - - -async def test_alarm_control_panel_deprecated_state_does_not_break_state( - hass: HomeAssistant, - code_format: CodeFormat | None, - supported_features: AlarmControlPanelEntityFeature, - code_arm_required: bool, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test using _attr_state attribute does not break state.""" - - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [ALARM_CONTROL_PANEL_DOMAIN] - ) - return True - - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - ), - ) - - class MockLegacyAlarmControlPanel(MockAlarmControlPanel): - """Mocked alarm control entity.""" - - def __init__( - self, - supported_features: AlarmControlPanelEntityFeature = AlarmControlPanelEntityFeature( - 0 - ), - code_format: CodeFormat | None = None, - code_arm_required: bool = True, - ) -> None: - """Initialize the alarm control.""" - self._attr_state = "armed_away" - super().__init__(supported_features, code_format, code_arm_required) - - def alarm_disarm(self, code: str | None = None) -> None: - """Mock alarm disarm calls.""" - self._attr_state = "disarmed" - - entity = MockLegacyAlarmControlPanel( - supported_features=supported_features, - code_format=code_format, - code_arm_required=code_arm_required, - ) - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test alarm control panel platform via config entry.""" - async_add_entities([entity]) - - mock_platform( - hass, - f"{TEST_DOMAIN}.{ALARM_CONTROL_PANEL_DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), - ) - - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get(entity.entity_id) - assert state is not None - assert state.state == "armed_away" - - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - await help_test_async_alarm_control_panel_service( - hass, entity.entity_id, SERVICE_ALARM_DISARM - ) - - state = hass.states.get(entity.entity_id) - assert state is not None - assert state.state == "disarmed" diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index b806c6faf23..131444c17ac 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -697,7 +697,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called Are', + 'speech': 'Sorry, I am not aware of any area called are', }), }), }), @@ -741,7 +741,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called Are', + 'speech': 'Sorry, I am not aware of any area called are', }), }), }), diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py deleted file mode 100644 index 631c774e63c..00000000000 --- a/tests/components/backup/conftest.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Test fixtures for the Backup integration.""" - -from __future__ import annotations - -from collections.abc import Generator -from pathlib import Path -from unittest.mock import MagicMock, Mock, patch - -import pytest - -from homeassistant.core import HomeAssistant - - -@pytest.fixture(name="mocked_json_bytes") -def mocked_json_bytes_fixture() -> Generator[Mock]: - """Mock json_bytes.""" - with patch( - "homeassistant.components.backup.manager.json_bytes", - return_value=b"{}", # Empty JSON - ) as mocked_json_bytes: - yield mocked_json_bytes - - -@pytest.fixture(name="mocked_tarfile") -def mocked_tarfile_fixture() -> Generator[Mock]: - """Mock tarfile.""" - with patch( - "homeassistant.components.backup.manager.SecureTarFile" - ) as mocked_tarfile: - yield mocked_tarfile - - -@pytest.fixture(name="mock_backup_generation") -def mock_backup_generation_fixture( - hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock -) -> Generator[None]: - """Mock backup generator.""" - - def _mock_iterdir(path: Path) -> list[Path]: - if not path.name.endswith("testing_config"): - return [] - return [ - Path("test.txt"), - Path(".DS_Store"), - Path(".storage"), - ] - - with ( - patch("pathlib.Path.iterdir", _mock_iterdir), - patch("pathlib.Path.stat", MagicMock(st_size=123)), - patch("pathlib.Path.is_file", lambda x: x.name != ".storage"), - patch( - "pathlib.Path.is_dir", - lambda x: x.name == ".storage", - ), - patch( - "pathlib.Path.exists", - lambda x: x != Path(hass.config.path("backups")), - ), - patch( - "pathlib.Path.is_symlink", - lambda _: False, - ), - patch( - "pathlib.Path.mkdir", - MagicMock(), - ), - patch( - "homeassistant.components.backup.manager.HAVERSION", - "2025.1.0", - ), - ): - yield diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 42eb524e529..096df37d704 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -210,23 +210,16 @@ dict({ 'id': 1, 'result': dict({ - 'slug': '27f5c632', + 'date': '1970-01-01T00:00:00.000Z', + 'name': 'Test', + 'path': 'abc123.tar', + 'size': 0.0, + 'slug': 'abc123', }), 'success': True, 'type': 'result', }) # --- -# name: test_generate[without_hassio].1 - dict({ - 'event': dict({ - 'done': True, - 'stage': None, - 'success': True, - }), - 'id': 1, - 'type': 'event', - }) -# --- # name: test_info[with_hassio] dict({ 'error': dict({ diff --git a/tests/components/backup/test_http.py b/tests/components/backup/test_http.py index 76b1f76b55b..93ecb27bc97 100644 --- a/tests/components/backup/test_http.py +++ b/tests/components/backup/test_http.py @@ -1,11 +1,8 @@ """Tests for the Backup integration.""" -import asyncio -from io import StringIO from unittest.mock import patch from aiohttp import web -import pytest from homeassistant.core import HomeAssistant @@ -52,12 +49,12 @@ async def test_downloading_backup_not_found( assert resp.status == 404 -async def test_downloading_as_non_admin( +async def test_non_admin( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_admin_user: MockUser, ) -> None: - """Test downloading a backup file when you are not an admin.""" + """Test downloading a backup file that does not exist.""" hass_admin_user.groups = [] await setup_backup_integration(hass) @@ -65,53 +62,3 @@ async def test_downloading_as_non_admin( resp = await client.get("/api/backup/download/abc123") assert resp.status == 401 - - -async def test_uploading_a_backup_file( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, -) -> None: - """Test uploading a backup file.""" - await setup_backup_integration(hass) - - client = await hass_client() - - with patch( - "homeassistant.components.backup.manager.BackupManager.async_receive_backup", - ) as async_receive_backup_mock: - resp = await client.post( - "/api/backup/upload", - data={"file": StringIO("test")}, - ) - assert resp.status == 201 - assert async_receive_backup_mock.called - - -@pytest.mark.parametrize( - ("error", "message"), - [ - (OSError("Boom!"), "Can't write backup file Boom!"), - (asyncio.CancelledError("Boom!"), ""), - ], -) -async def test_error_handling_uploading_a_backup_file( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - error: Exception, - message: str, -) -> None: - """Test error handling when uploading a backup file.""" - await setup_backup_integration(hass) - - client = await hass_client() - - with patch( - "homeassistant.components.backup.manager.BackupManager.async_receive_backup", - side_effect=error, - ): - resp = await client.post( - "/api/backup/upload", - data={"file": StringIO("test")}, - ) - assert resp.status == 500 - assert await resp.text() == message diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 9d24964aedf..a4dba5c6936 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -2,18 +2,13 @@ from __future__ import annotations -import asyncio -from unittest.mock import AsyncMock, MagicMock, Mock, mock_open, patch +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, Mock, patch -import aiohttp -from multidict import CIMultiDict, CIMultiDictProxy import pytest from homeassistant.components.backup import BackupManager -from homeassistant.components.backup.manager import ( - BackupPlatformProtocol, - BackupProgress, -) +from homeassistant.components.backup.manager import BackupPlatformProtocol from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component @@ -23,30 +18,59 @@ from .common import TEST_BACKUP from tests.common import MockPlatform, mock_platform -async def _mock_backup_generation( - manager: BackupManager, mocked_json_bytes: Mock, mocked_tarfile: Mock -) -> None: +async def _mock_backup_generation(manager: BackupManager): """Mock backup generator.""" - progress: list[BackupProgress] = [] + def _mock_iterdir(path: Path) -> list[Path]: + if not path.name.endswith("testing_config"): + return [] + return [ + Path("test.txt"), + Path(".DS_Store"), + Path(".storage"), + ] - def on_progress(_progress: BackupProgress) -> None: - """Mock progress callback.""" - progress.append(_progress) + with ( + patch( + "homeassistant.components.backup.manager.SecureTarFile" + ) as mocked_tarfile, + patch("pathlib.Path.iterdir", _mock_iterdir), + patch("pathlib.Path.stat", MagicMock(st_size=123)), + patch("pathlib.Path.is_file", lambda x: x.name != ".storage"), + patch( + "pathlib.Path.is_dir", + lambda x: x.name == ".storage", + ), + patch( + "pathlib.Path.exists", + lambda x: x != manager.backup_dir, + ), + patch( + "pathlib.Path.is_symlink", + lambda _: False, + ), + patch( + "pathlib.Path.mkdir", + MagicMock(), + ), + patch( + "homeassistant.components.backup.manager.json_bytes", + return_value=b"{}", # Empty JSON + ) as mocked_json_bytes, + patch( + "homeassistant.components.backup.manager.HAVERSION", + "2025.1.0", + ), + ): + await manager.async_create_backup() - assert manager.backup_task is None - await manager.async_create_backup(on_progress=on_progress) - assert manager.backup_task is not None - assert progress == [] - - await manager.backup_task - assert progress == [BackupProgress(done=True, stage=None, success=True)] - - assert mocked_json_bytes.call_count == 1 - backup_json_dict = mocked_json_bytes.call_args[0][0] - assert isinstance(backup_json_dict, dict) - assert backup_json_dict["homeassistant"] == {"version": "2025.1.0"} - assert manager.backup_dir.as_posix() in str(mocked_tarfile.call_args_list[0][0][0]) + assert mocked_json_bytes.call_count == 1 + backup_json_dict = mocked_json_bytes.call_args[0][0] + assert isinstance(backup_json_dict, dict) + assert backup_json_dict["homeassistant"] == {"version": "2025.1.0"} + assert manager.backup_dir.as_posix() in str( + mocked_tarfile.call_args_list[0][0][0] + ) async def _setup_mock_domain( @@ -150,26 +174,21 @@ async def test_getting_backup_that_does_not_exist( async def test_async_create_backup_when_backing_up(hass: HomeAssistant) -> None: """Test generate backup.""" - event = asyncio.Event() manager = BackupManager(hass) - manager.backup_task = hass.async_create_task(event.wait()) + manager.backing_up = True with pytest.raises(HomeAssistantError, match="Backup already in progress"): - await manager.async_create_backup(on_progress=None) - event.set() + await manager.async_create_backup() -@pytest.mark.usefixtures("mock_backup_generation") async def test_async_create_backup( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - mocked_json_bytes: Mock, - mocked_tarfile: Mock, ) -> None: """Test generate backup.""" manager = BackupManager(hass) manager.loaded_backups = True - await _mock_backup_generation(manager, mocked_json_bytes, mocked_tarfile) + await _mock_backup_generation(manager) assert "Generated new backup with slug " in caplog.text assert "Creating backup directory" in caplog.text @@ -226,9 +245,7 @@ async def test_not_loading_bad_platforms( ) -async def test_exception_plaform_pre( - hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock -) -> None: +async def test_exception_plaform_pre(hass: HomeAssistant) -> None: """Test exception in pre step.""" manager = BackupManager(hass) manager.loaded_backups = True @@ -245,12 +262,10 @@ async def test_exception_plaform_pre( ) with pytest.raises(HomeAssistantError): - await _mock_backup_generation(manager, mocked_json_bytes, mocked_tarfile) + await _mock_backup_generation(manager) -async def test_exception_plaform_post( - hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock -) -> None: +async def test_exception_plaform_post(hass: HomeAssistant) -> None: """Test exception in post step.""" manager = BackupManager(hass) manager.loaded_backups = True @@ -267,7 +282,7 @@ async def test_exception_plaform_post( ) with pytest.raises(HomeAssistantError): - await _mock_backup_generation(manager, mocked_json_bytes, mocked_tarfile) + await _mock_backup_generation(manager) async def test_loading_platforms_when_running_async_pre_backup_actions( @@ -320,40 +335,6 @@ async def test_loading_platforms_when_running_async_post_backup_actions( assert "Loaded 1 platforms" in caplog.text -async def test_async_receive_backup( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test receiving a backup file.""" - manager = BackupManager(hass) - - size = 2 * 2**16 - protocol = Mock(_reading_paused=False) - stream = aiohttp.StreamReader(protocol, 2**16) - stream.feed_data(b"0" * size + b"\r\n--:--") - stream.feed_eof() - - open_mock = mock_open() - - with patch("pathlib.Path.open", open_mock), patch("shutil.move") as mover_mock: - await manager.async_receive_backup( - contents=aiohttp.BodyPartReader( - b"--:", - CIMultiDictProxy( - CIMultiDict( - { - aiohttp.hdrs.CONTENT_DISPOSITION: "attachment; filename=abc123.tar" - } - ) - ), - stream, - ) - ) - assert open_mock.call_count == 1 - assert mover_mock.call_count == 1 - assert mover_mock.mock_calls[0].args[1].name == "abc123.tar" - - async def test_async_trigger_restore( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 3e031f172ae..125ba8adaad 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -2,7 +2,6 @@ from unittest.mock import patch -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion @@ -116,30 +115,29 @@ async def test_remove( @pytest.mark.parametrize( - ("with_hassio", "number_of_messages"), + "with_hassio", [ - pytest.param(True, 1, id="with_hassio"), - pytest.param(False, 2, id="without_hassio"), + pytest.param(True, id="with_hassio"), + pytest.param(False, id="without_hassio"), ], ) -@pytest.mark.usefixtures("mock_backup_generation") async def test_generate( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, with_hassio: bool, - number_of_messages: int, ) -> None: """Test generating a backup.""" await setup_backup_integration(hass, with_hassio=with_hassio) client = await hass_ws_client(hass) - freezer.move_to("2024-11-13 12:01:00+01:00") await hass.async_block_till_done() - await client.send_json_auto_id({"type": "backup/generate"}) - for _ in range(number_of_messages): + with patch( + "homeassistant.components.backup.manager.BackupManager.async_create_backup", + return_value=TEST_BACKUP, + ): + await client.send_json_auto_id({"type": "backup/generate"}) assert snapshot == await client.receive_json() diff --git a/tests/components/bang_olufsen/conftest.py b/tests/components/bang_olufsen/conftest.py index cbde856ff89..6c19a29c1da 100644 --- a/tests/components/bang_olufsen/conftest.py +++ b/tests/components/bang_olufsen/conftest.py @@ -35,13 +35,13 @@ from .const import ( TEST_DATA_CREATE_ENTRY, TEST_DATA_CREATE_ENTRY_2, TEST_FRIENDLY_NAME, + TEST_FRIENDLY_NAME_2, TEST_FRIENDLY_NAME_3, - TEST_FRIENDLY_NAME_4, + TEST_HOST_2, TEST_HOST_3, - TEST_HOST_4, TEST_JID_1, + TEST_JID_2, TEST_JID_3, - TEST_JID_4, TEST_NAME, TEST_NAME_2, TEST_SERIAL_NUMBER, @@ -267,29 +267,29 @@ def mock_mozart_client() -> Generator[AsyncMock]: } client.get_beolink_peers = AsyncMock() client.get_beolink_peers.return_value = [ + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_2, + jid=TEST_JID_2, + ip_address=TEST_HOST_2, + ), BeolinkPeer( friendly_name=TEST_FRIENDLY_NAME_3, jid=TEST_JID_3, ip_address=TEST_HOST_3, ), - BeolinkPeer( - friendly_name=TEST_FRIENDLY_NAME_4, - jid=TEST_JID_4, - ip_address=TEST_HOST_4, - ), ] client.get_beolink_listeners = AsyncMock() client.get_beolink_listeners.return_value = [ + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_2, + jid=TEST_JID_2, + ip_address=TEST_HOST_2, + ), BeolinkPeer( friendly_name=TEST_FRIENDLY_NAME_3, jid=TEST_JID_3, ip_address=TEST_HOST_3, ), - BeolinkPeer( - friendly_name=TEST_FRIENDLY_NAME_4, - jid=TEST_JID_4, - ip_address=TEST_HOST_4, - ), ] client.get_listening_mode_set = AsyncMock() diff --git a/tests/components/bang_olufsen/const.py b/tests/components/bang_olufsen/const.py index 6602a898eb6..3769aef5cd3 100644 --- a/tests/components/bang_olufsen/const.py +++ b/tests/components/bang_olufsen/const.py @@ -16,7 +16,6 @@ from mozart_api.models import ( PlayQueueItemType, RenderingState, SceneProperties, - Source, UserFlow, VolumeLevel, VolumeMute, @@ -126,10 +125,7 @@ TEST_DATA_ZEROCONF_IPV6 = ZeroconfServiceInfo( }, ) -TEST_SOURCE = Source( - name="Tidal", id="tidal", is_seekable=True, is_enabled=True, is_playable=True -) -TEST_AUDIO_SOURCES = [TEST_SOURCE.name, BangOlufsenSource.LINE_IN.name] +TEST_AUDIO_SOURCES = [BangOlufsenSource.TIDAL.name, BangOlufsenSource.LINE_IN.name] TEST_VIDEO_SOURCES = ["HDMI A"] TEST_SOURCES = TEST_AUDIO_SOURCES + TEST_VIDEO_SOURCES TEST_FALLBACK_SOURCES = [ diff --git a/tests/components/bang_olufsen/snapshots/test_media_player.ambr b/tests/components/bang_olufsen/snapshots/test_media_player.ambr deleted file mode 100644 index ea96e286821..00000000000 --- a/tests/components/bang_olufsen/snapshots/test_media_player.ambr +++ /dev/null @@ -1,874 +0,0 @@ -# serializer version: 1 -# name: test_async_beolink_allstandby - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_expand[all_discovered-True-None-log_messages0-2] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_expand[all_discovered-True-expand_side_effect1-log_messages1-2] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_expand[beolink_jids-parameter_value2-None-log_messages2-1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_expand[beolink_jids-parameter_value3-expand_side_effect3-log_messages3-1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_join - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_beolink_unexpand - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players[group_members0-1-0] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players[group_members0-1-0].1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_22222222', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players[group_members1-0-1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players[group_members1-0-1].1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_22222222', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'media_position': 0, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Line-In', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source].1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_22222222', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source': 'Tidal', - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity].1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_22222222', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_unjoin_player - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_11111111', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'repeat': , - 'shuffle': False, - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_update_beolink_listener - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'leader': dict({ - 'Laundry room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'media_player.beosound_balance_11111111', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_11111111', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_async_update_beolink_listener.1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'beolink': dict({ - 'listeners': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'peers': dict({ - 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', - 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', - }), - 'self': dict({ - 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', - }), - }), - 'device_class': 'speaker', - 'entity_picture_local': None, - 'friendly_name': 'Living room Balance', - 'group_members': list([ - 'media_player.beosound_balance_22222222', - 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', - 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', - ]), - 'icon': 'mdi:speaker-wireless', - 'media_content_type': , - 'sound_mode': 'Test Listening Mode (123)', - 'sound_mode_list': list([ - 'Test Listening Mode (123)', - 'Test Listening Mode (234)', - 'Test Listening Mode 2 (345)', - ]), - 'source_list': list([ - 'Tidal', - 'Line-In', - 'HDMI A', - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'media_player.beosound_balance_22222222', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- diff --git a/tests/components/bang_olufsen/test_init.py b/tests/components/bang_olufsen/test_init.py index c8e4c05f9ab..5b809488ed8 100644 --- a/tests/components/bang_olufsen/test_init.py +++ b/tests/components/bang_olufsen/test_init.py @@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceRegistry -from .const import TEST_FRIENDLY_NAME, TEST_MODEL_BALANCE, TEST_SERIAL_NUMBER +from .const import TEST_MODEL_BALANCE, TEST_NAME, TEST_SERIAL_NUMBER from tests.common import MockConfigEntry @@ -35,8 +35,7 @@ async def test_setup_entry( identifiers={(DOMAIN, TEST_SERIAL_NUMBER)} ) assert device is not None - # Is usually TEST_NAME, but is updated to the device's friendly name by _update_name_and_beolink - assert device.name == TEST_FRIENDLY_NAME + assert device.name == TEST_NAME assert device.model == TEST_MODEL_BALANCE # Ensure that the connection has been checked WebSocket connection has been initialized diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index aa35b0265dc..8f23af9e04a 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -4,10 +4,8 @@ from contextlib import AbstractContextManager, nullcontext as does_not_raise import logging from unittest.mock import AsyncMock, patch -from mozart_api.exceptions import NotFoundException from mozart_api.models import ( BeolinkLeader, - BeolinkSelf, PlaybackContentMetadata, PlayQueueSettings, RenderingState, @@ -16,8 +14,6 @@ from mozart_api.models import ( WebsocketNotificationTag, ) import pytest -from syrupy.assertion import SnapshotAssertion -from syrupy.filters import props from homeassistant.components.bang_olufsen.const import ( BANG_OLUFSEN_REPEAT_FROM_HA, @@ -50,29 +46,24 @@ from homeassistant.components.media_player import ( ATTR_SOUND_MODE_LIST, DOMAIN as MEDIA_PLAYER_DOMAIN, SERVICE_CLEAR_PLAYLIST, - SERVICE_JOIN, SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PLAY_PAUSE, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_MEDIA_SEEK, SERVICE_MEDIA_STOP, SERVICE_PLAY_MEDIA, - SERVICE_REPEAT_SET, SERVICE_SELECT_SOUND_MODE, SERVICE_SELECT_SOURCE, - SERVICE_SHUFFLE_SET, SERVICE_TURN_OFF, - SERVICE_UNJOIN, SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, MediaPlayerState, MediaType, RepeatMode, ) -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_REPEAT_SET, SERVICE_SHUFFLE_SET from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.setup import async_setup_component from .const import ( @@ -85,10 +76,7 @@ from .const import ( TEST_DEEZER_TRACK, TEST_FALLBACK_SOURCES, TEST_FRIENDLY_NAME_2, - TEST_JID_1, TEST_JID_2, - TEST_JID_3, - TEST_JID_4, TEST_LISTENING_MODE_REF, TEST_MEDIA_PLAYER_ENTITY_ID, TEST_MEDIA_PLAYER_ENTITY_ID_2, @@ -105,7 +93,6 @@ from .const import ( TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, TEST_SOUND_MODE_2, TEST_SOUND_MODES, - TEST_SOURCE, TEST_SOURCES, TEST_VIDEO_SOURCES, TEST_VOLUME, @@ -149,9 +136,6 @@ async def test_initialization( mock_mozart_client.get_remote_menu.assert_called_once() mock_mozart_client.get_listening_mode_set.assert_called_once() mock_mozart_client.get_active_listening_mode.assert_called_once() - mock_mozart_client.get_beolink_self.assert_called_once() - mock_mozart_client.get_beolink_peers.assert_called_once() - mock_mozart_client.get_beolink_listeners.assert_called_once() async def test_async_update_sources_audio_only( @@ -232,7 +216,7 @@ async def test_async_update_sources_availability( # Add a source that is available and playable mock_mozart_client.get_available_sources.return_value = SourceArray( - items=[TEST_SOURCE] + items=[BangOlufsenSource.TIDAL] ) # Send playback_source. The source is not actually used, so its attributes don't matter @@ -240,7 +224,7 @@ async def test_async_update_sources_availability( assert mock_mozart_client.get_available_sources.call_count == 2 assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_INPUT_SOURCE_LIST] == [TEST_SOURCE.name] + assert states.attributes[ATTR_INPUT_SOURCE_LIST] == [BangOlufsenSource.TIDAL.name] async def test_async_update_playback_metadata( @@ -358,17 +342,19 @@ async def test_async_update_playback_state( @pytest.mark.parametrize( - ("source", "content_type", "progress", "metadata"), + ("reported_source", "real_source", "content_type", "progress", "metadata"), [ - # Normal source, music mediatype expected + # Normal source, music mediatype expected, no progress expected ( - TEST_SOURCE, + BangOlufsenSource.TIDAL, + BangOlufsenSource.TIDAL, MediaType.MUSIC, TEST_PLAYBACK_PROGRESS.progress, PlaybackContentMetadata(), ), - # URI source, url media type expected + # URI source, url media type expected, no progress expected ( + BangOlufsenSource.URI_STREAMER, BangOlufsenSource.URI_STREAMER, MediaType.URL, TEST_PLAYBACK_PROGRESS.progress, @@ -377,17 +363,44 @@ async def test_async_update_playback_state( # Line-In source,media type expected, progress 0 expected ( BangOlufsenSource.LINE_IN, + BangOlufsenSource.CHROMECAST, MediaType.MUSIC, 0, PlaybackContentMetadata(), ), + # Chromecast as source, but metadata says Line-In. + # Progress is not set to 0 as the source is Chromecast first + ( + BangOlufsenSource.CHROMECAST, + BangOlufsenSource.LINE_IN, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(title=BangOlufsenSource.LINE_IN.name), + ), + # Chromecast as source, but metadata says Bluetooth + ( + BangOlufsenSource.CHROMECAST, + BangOlufsenSource.BLUETOOTH, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(title=BangOlufsenSource.BLUETOOTH.name), + ), + # Chromecast as source, but metadata says Bluetooth in another way + ( + BangOlufsenSource.CHROMECAST, + BangOlufsenSource.BLUETOOTH, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(art=[]), + ), ], ) async def test_async_update_source_change( hass: HomeAssistant, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, - source: Source, + reported_source: Source, + real_source: Source, content_type: MediaType, progress: int, metadata: PlaybackContentMetadata, @@ -416,10 +429,10 @@ async def test_async_update_source_change( # Simulate metadata playback_metadata_callback(metadata) - source_change_callback(source) + source_change_callback(reported_source) assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_INPUT_SOURCE] == source.name + assert states.attributes[ATTR_INPUT_SOURCE] == real_source.name assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == content_type assert states.attributes[ATTR_MEDIA_POSITION] == progress @@ -517,14 +530,11 @@ async def test_async_update_beolink_line_in( assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.attributes["group_members"] == [] - # Called once during _initialize and once during _async_update_beolink - assert mock_mozart_client.get_beolink_listeners.call_count == 2 - assert mock_mozart_client.get_beolink_peers.call_count == 2 + assert mock_mozart_client.get_beolink_listeners.call_count == 1 async def test_async_update_beolink_listener( hass: HomeAssistant, - snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, mock_config_entry_2: MockConfigEntry, @@ -557,56 +567,7 @@ async def test_async_update_beolink_listener( TEST_MEDIA_PLAYER_ENTITY_ID, ] - # Called once for each entity during _initialize - assert mock_mozart_client.get_beolink_listeners.call_count == 2 - # Called once for each entity during _initialize and - # once more during _async_update_beolink for the entity that has the callback associated with it. - assert mock_mozart_client.get_beolink_peers.call_count == 3 - - # Main entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - # Secondary entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -async def test_async_update_name_and_beolink( - hass: HomeAssistant, - device_registry: DeviceRegistry, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test _async_update_name_and_beolink.""" - # Change response to ensure device name is changed - mock_mozart_client.get_beolink_self.return_value = BeolinkSelf( - friendly_name=TEST_FRIENDLY_NAME_2, jid=TEST_JID_1 - ) - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - configuration_callback = ( - mock_mozart_client.get_notification_notifications.call_args[0][0] - ) - # Trigger callback - configuration_callback(WebsocketNotificationTag(value="configuration")) - - await hass.async_block_till_done() - - assert mock_mozart_client.get_beolink_self.call_count == 2 - assert mock_mozart_client.get_beolink_peers.call_count == 2 - assert mock_mozart_client.get_beolink_listeners.call_count == 2 - - # Check that device name has been changed - assert mock_config_entry.unique_id - assert ( - device := device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - ) - assert device.name == TEST_FRIENDLY_NAME_2 + assert mock_mozart_client.get_beolink_listeners.call_count == 0 async def test_async_mute_volume( @@ -746,7 +707,7 @@ async def test_async_media_next_track( ("source", "expected_result", "seek_called_times"), [ # Seekable source, seek expected - (TEST_SOURCE, does_not_raise(), 1), + (BangOlufsenSource.DEEZER, does_not_raise(), 1), # Non seekable source, seek shouldn't work (BangOlufsenSource.LINE_IN, pytest.raises(HomeAssistantError), 0), # Malformed source, seek shouldn't work @@ -834,7 +795,7 @@ async def test_async_clear_playlist( # Invalid source ("Test source", pytest.raises(ServiceValidationError), 0, 0), # Valid audio source - (TEST_SOURCE.name, does_not_raise(), 1, 0), + (BangOlufsenSource.TIDAL.name, does_not_raise(), 1, 0), # Valid video source (TEST_VIDEO_SOURCES[0], does_not_raise(), 0, 1), ], @@ -1382,7 +1343,6 @@ async def test_async_browse_media( ) async def test_async_join_players( hass: HomeAssistant, - snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, mock_config_entry_2: MockConfigEntry, @@ -1404,11 +1364,11 @@ async def test_async_join_players( await hass.config_entries.async_setup(mock_config_entry_2.entry_id) # Set the source to a beolink expandable source - source_change_callback(TEST_SOURCE) + source_change_callback(BangOlufsenSource.TIDAL) await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_JOIN, + "media_player", + "join", { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_GROUP_MEMBERS: group_members, @@ -1419,14 +1379,6 @@ async def test_async_join_players( assert mock_mozart_client.post_beolink_expand.call_count == expand_count assert mock_mozart_client.join_latest_beolink_experience.call_count == join_count - # Main entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - # Secondary entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) - assert states == snapshot(exclude=props("media_position_updated_at")) - @pytest.mark.parametrize( ("source", "group_members", "expected_result", "error_type"), @@ -1440,7 +1392,7 @@ async def test_async_join_players( ), # Invalid media_player entity ( - TEST_SOURCE, + BangOlufsenSource.TIDAL, [TEST_MEDIA_PLAYER_ENTITY_ID_3], pytest.raises(ServiceValidationError), "invalid_grouping_entity", @@ -1449,7 +1401,6 @@ async def test_async_join_players( ) async def test_async_join_players_invalid( hass: HomeAssistant, - snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, mock_config_entry_2: MockConfigEntry, @@ -1474,8 +1425,8 @@ async def test_async_join_players_invalid( with expected_result as exc_info: await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_JOIN, + "media_player", + "join", { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_GROUP_MEMBERS: group_members, @@ -1490,18 +1441,9 @@ async def test_async_join_players_invalid( assert mock_mozart_client.post_beolink_expand.call_count == 0 assert mock_mozart_client.join_latest_beolink_experience.call_count == 0 - # Main entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - # Secondary entity - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) - assert states == snapshot(exclude=props("media_position_updated_at")) - async def test_async_unjoin_player( hass: HomeAssistant, - snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: @@ -1511,181 +1453,14 @@ async def test_async_unjoin_player( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_UNJOIN, + "media_player", + "unjoin", {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) mock_mozart_client.post_beolink_leave.assert_called_once() - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -async def test_async_beolink_join( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_beolink_join with defined JID.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - DOMAIN, - "beolink_join", - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - "beolink_jid": TEST_JID_2, - }, - blocking=True, - ) - - mock_mozart_client.join_beolink_peer.assert_called_once_with(jid=TEST_JID_2) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -@pytest.mark.parametrize( - ( - "parameter", - "parameter_value", - "expand_side_effect", - "log_messages", - "peers_call_count", - ), - [ - # All discovered - # Valid peers - ("all_discovered", True, None, [], 2), - # Invalid peers - ( - "all_discovered", - True, - NotFoundException(), - [f"Unable to expand to {TEST_JID_3}", f"Unable to expand to {TEST_JID_4}"], - 2, - ), - # Beolink JIDs - # Valid peer - ("beolink_jids", [TEST_JID_3, TEST_JID_4], None, [], 1), - # Invalid peer - ( - "beolink_jids", - [TEST_JID_3, TEST_JID_4], - NotFoundException(), - [ - f"Unable to expand to {TEST_JID_3}. Is the device available on the network?", - f"Unable to expand to {TEST_JID_4}. Is the device available on the network?", - ], - 1, - ), - ], -) -async def test_async_beolink_expand( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, - parameter: str, - parameter_value: bool | list[str], - expand_side_effect: NotFoundException | None, - log_messages: list[str], - peers_call_count: int, -) -> None: - """Test async_beolink_expand.""" - mock_mozart_client.post_beolink_expand.side_effect = expand_side_effect - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - source_change_callback = ( - mock_mozart_client.get_source_change_notifications.call_args[0][0] - ) - - # Set the source to a beolink expandable source - source_change_callback(TEST_SOURCE) - - await hass.services.async_call( - DOMAIN, - "beolink_expand", - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - parameter: parameter_value, - }, - blocking=True, - ) - - # Check log messages - for log_message in log_messages: - assert log_message in caplog.text - - # Called once during _initialize and once during async_beolink_expand for all_discovered - assert mock_mozart_client.get_beolink_peers.call_count == peers_call_count - - assert mock_mozart_client.post_beolink_expand.call_count == len( - await mock_mozart_client.get_beolink_peers() - ) - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -async def test_async_beolink_unexpand( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test test_async_beolink_unexpand.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - DOMAIN, - "beolink_unexpand", - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - "beolink_jids": [TEST_JID_3, TEST_JID_4], - }, - blocking=True, - ) - - assert mock_mozart_client.post_beolink_unexpand.call_count == 2 - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - - -async def test_async_beolink_allstandby( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_mozart_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test async_beolink_allstandby.""" - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - - await hass.services.async_call( - DOMAIN, - "beolink_allstandby", - {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, - blocking=True, - ) - - mock_mozart_client.post_beolink_allstandby.assert_called_once() - - assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states == snapshot(exclude=props("media_position_updated_at")) - @pytest.mark.parametrize( ("repeat"), diff --git a/tests/components/bluesound/test_media_player.py b/tests/components/bluesound/test_media_player.py index 0bf615de3da..894528265e1 100644 --- a/tests/components/bluesound/test_media_player.py +++ b/tests/components/bluesound/test_media_player.py @@ -345,31 +345,3 @@ async def test_attr_bluesound_group( ).attributes.get("bluesound_group") assert attr_bluesound_group == ["player-name1111", "player-name2222"] - - -async def test_volume_up_from_6_to_7( - hass: HomeAssistant, - setup_config_entry: None, - player_mocks: PlayerMocks, -) -> None: - """Test the media player volume up from 6 to 7. - - This fails if if rounding is not done correctly. See https://github.com/home-assistant/core/issues/129956 for more details. - """ - player_mocks.player_data.status_long_polling_mock.set( - dataclasses.replace( - player_mocks.player_data.status_long_polling_mock.get(), volume=6 - ) - ) - - # give the long polling loop a chance to update the state; this could be any async call - await hass.async_block_till_done() - - await hass.services.async_call( - MEDIA_PLAYER_DOMAIN, - SERVICE_VOLUME_UP, - {ATTR_ENTITY_ID: "media_player.player_name1111"}, - blocking=True, - ) - - player_mocks.player_data.player.volume.assert_called_once_with(level=7) diff --git a/tests/components/bring/fixtures/items_invitation.json b/tests/components/bring/fixtures/items_invitation.json deleted file mode 100644 index 82ef623e439..00000000000 --- a/tests/components/bring/fixtures/items_invitation.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", - "status": "INVITATION", - "purchase": [ - { - "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", - "itemId": "Paprika", - "specification": "Rot", - "attributes": [ - { - "type": "PURCHASE_CONDITIONS", - "content": { - "urgent": true, - "convenient": true, - "discounted": true - } - } - ] - }, - { - "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", - "itemId": "Pouletbrüstli", - "specification": "Bio", - "attributes": [ - { - "type": "PURCHASE_CONDITIONS", - "content": { - "urgent": true, - "convenient": true, - "discounted": true - } - } - ] - } - ], - "recently": [ - { - "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", - "itemId": "Ananas", - "specification": "", - "attributes": [] - } - ] -} diff --git a/tests/components/bring/fixtures/items_shared.json b/tests/components/bring/fixtures/items_shared.json deleted file mode 100644 index 9ac999729d3..00000000000 --- a/tests/components/bring/fixtures/items_shared.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", - "status": "SHARED", - "purchase": [ - { - "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", - "itemId": "Paprika", - "specification": "Rot", - "attributes": [ - { - "type": "PURCHASE_CONDITIONS", - "content": { - "urgent": true, - "convenient": true, - "discounted": true - } - } - ] - }, - { - "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", - "itemId": "Pouletbrüstli", - "specification": "Bio", - "attributes": [ - { - "type": "PURCHASE_CONDITIONS", - "content": { - "urgent": true, - "convenient": true, - "discounted": true - } - } - ] - } - ], - "recently": [ - { - "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", - "itemId": "Ananas", - "specification": "", - "attributes": [] - } - ] -} diff --git a/tests/components/bring/snapshots/test_sensor.ambr b/tests/components/bring/snapshots/test_sensor.ambr index 97e1d1b4bd9..513b4e6469e 100644 --- a/tests/components/bring/snapshots/test_sensor.ambr +++ b/tests/components/bring/snapshots/test_sensor.ambr @@ -55,7 +55,6 @@ 'options': list([ 'registered', 'shared', - 'invitation', ]), }), 'config_entry_id': , @@ -93,7 +92,6 @@ 'options': list([ 'registered', 'shared', - 'invitation', ]), }), 'context': , @@ -346,7 +344,6 @@ 'options': list([ 'registered', 'shared', - 'invitation', ]), }), 'config_entry_id': , @@ -384,7 +381,6 @@ 'options': list([ 'registered', 'shared', - 'invitation', ]), }), 'context': , diff --git a/tests/components/bring/test_sensor.py b/tests/components/bring/test_sensor.py index 974818ccedf..a36b0163165 100644 --- a/tests/components/bring/test_sensor.py +++ b/tests/components/bring/test_sensor.py @@ -1,18 +1,17 @@ """Test for sensor platform of the Bring! integration.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.bring.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform +from tests.common import MockConfigEntry, snapshot_platform @pytest.fixture(autouse=True) @@ -43,34 +42,3 @@ async def test_setup( await snapshot_platform( hass, entity_registry, snapshot, bring_config_entry.entry_id ) - - -@pytest.mark.parametrize( - ("fixture", "entity_state"), - [ - ("items_invitation", "invitation"), - ("items_shared", "shared"), - ("items", "registered"), - ], -) -async def test_list_access_states( - hass: HomeAssistant, - bring_config_entry: MockConfigEntry, - mock_bring_client: AsyncMock, - fixture: str, - entity_state: str, -) -> None: - """Snapshot test states of list access sensor.""" - - mock_bring_client.get_list.return_value = load_json_object_fixture( - f"{fixture}.json", DOMAIN - ) - - bring_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(bring_config_entry.entry_id) - await hass.async_block_till_done() - - assert bring_config_entry.state is ConfigEntryState.LOADED - - assert (state := hass.states.get("sensor.einkauf_list_access")) - assert state.state == entity_state diff --git a/tests/components/camera/conftest.py b/tests/components/camera/conftest.py index f0c418711c7..d6343959d41 100644 --- a/tests/components/camera/conftest.py +++ b/tests/components/camera/conftest.py @@ -157,7 +157,7 @@ def mock_stream_source_fixture() -> Generator[AsyncMock]: @pytest.fixture async def mock_test_webrtc_cameras(hass: HomeAssistant) -> None: - """Initialize test WebRTC cameras with native RTC support.""" + """Initialize a test WebRTC cameras.""" # Cannot use the fixture mock_camera_web_rtc as it's mocking Camera.async_handle_web_rtc_offer # and native support is checked by verify the function "async_handle_web_rtc_offer" was diff --git a/tests/components/camera/test_init.py b/tests/components/camera/test_init.py index 32024694b7e..621ac8b7fb3 100644 --- a/tests/components/camera/test_init.py +++ b/tests/components/camera/test_init.py @@ -1005,52 +1005,3 @@ async def test_webrtc_provider_not_added_for_native_webrtc( assert camera_obj._webrtc_provider is None assert camera_obj._supports_native_sync_webrtc is not expect_native_async_webrtc assert camera_obj._supports_native_async_webrtc is expect_native_async_webrtc - - -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") -async def test_camera_capabilities_changing_non_native_support( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test WebRTC camera capabilities.""" - cam = get_camera_from_entity_id(hass, "camera.demo_camera") - assert ( - cam.supported_features - == camera.CameraEntityFeature.ON_OFF | camera.CameraEntityFeature.STREAM - ) - - await _test_capabilities( - hass, - hass_ws_client, - cam.entity_id, - {StreamType.HLS}, - {StreamType.HLS, StreamType.WEB_RTC}, - ) - - cam._attr_supported_features = camera.CameraEntityFeature(0) - cam.async_write_ha_state() - await hass.async_block_till_done() - - await _test_capabilities(hass, hass_ws_client, cam.entity_id, set(), set()) - - -@pytest.mark.usefixtures("mock_test_webrtc_cameras") -@pytest.mark.parametrize(("entity_id"), ["camera.sync", "camera.async"]) -async def test_camera_capabilities_changing_native_support( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - entity_id: str, -) -> None: - """Test WebRTC camera capabilities.""" - cam = get_camera_from_entity_id(hass, entity_id) - assert cam.supported_features == camera.CameraEntityFeature.STREAM - - await _test_capabilities( - hass, hass_ws_client, cam.entity_id, {StreamType.WEB_RTC}, {StreamType.WEB_RTC} - ) - - cam._attr_supported_features = camera.CameraEntityFeature(0) - cam.async_write_ha_state() - await hass.async_block_till_done() - - await _test_capabilities(hass, hass_ws_client, cam.entity_id, set(), set()) diff --git a/tests/components/camera/test_webrtc.py b/tests/components/camera/test_webrtc.py index 29fb9d61c4e..7a1df556c20 100644 --- a/tests/components/camera/test_webrtc.py +++ b/tests/components/camera/test_webrtc.py @@ -139,46 +139,42 @@ async def init_test_integration( return test_camera -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +@pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source") async def test_async_register_webrtc_provider( hass: HomeAssistant, ) -> None: """Test registering a WebRTC provider.""" + await async_setup_component(hass, "camera", {}) + camera = get_camera_from_entity_id(hass, "camera.demo_camera") - assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} + assert camera.frontend_stream_type is StreamType.HLS provider = SomeTestProvider() unregister = async_register_webrtc_provider(hass, provider) await hass.async_block_till_done() - assert camera.camera_capabilities.frontend_stream_types == { - StreamType.HLS, - StreamType.WEB_RTC, - } + assert camera.frontend_stream_type is StreamType.WEB_RTC # Mark stream as unsupported provider._is_supported = False # Manually refresh the provider await camera.async_refresh_providers() - assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} + assert camera.frontend_stream_type is StreamType.HLS # Mark stream as supported provider._is_supported = True # Manually refresh the provider await camera.async_refresh_providers() - assert camera.camera_capabilities.frontend_stream_types == { - StreamType.HLS, - StreamType.WEB_RTC, - } + assert camera.frontend_stream_type is StreamType.WEB_RTC unregister() await hass.async_block_till_done() - assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} + assert camera.frontend_stream_type is StreamType.HLS -@pytest.mark.usefixtures("mock_camera", "mock_stream_source") +@pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source") async def test_async_register_webrtc_provider_twice( hass: HomeAssistant, register_test_provider: SomeTestProvider, @@ -196,11 +192,13 @@ async def test_async_register_webrtc_provider_camera_not_loaded( async_register_webrtc_provider(hass, SomeTestProvider()) -@pytest.mark.usefixtures("mock_test_webrtc_cameras") +@pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source") async def test_async_register_ice_server( hass: HomeAssistant, ) -> None: """Test registering an ICE server.""" + await async_setup_component(hass, "camera", {}) + # Clear any existing ICE servers hass.data[DATA_ICE_SERVERS].clear() @@ -218,7 +216,7 @@ async def test_async_register_ice_server( unregister = async_register_ice_servers(hass, get_ice_servers) assert not called - camera = get_camera_from_entity_id(hass, "camera.async") + camera = get_camera_from_entity_id(hass, "camera.demo_camera") config = camera.async_get_webrtc_client_configuration() assert config.configuration.ice_servers == [ @@ -279,7 +277,7 @@ async def test_async_register_ice_server( assert config.configuration.ice_servers == [] -@pytest.mark.usefixtures("mock_test_webrtc_cameras") +@pytest.mark.usefixtures("mock_camera_webrtc") async def test_ws_get_client_config( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -288,7 +286,7 @@ async def test_ws_get_client_config( client = await hass_ws_client(hass) await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"} ) msg = await client.receive_json() @@ -298,12 +296,8 @@ async def test_ws_get_client_config( assert msg["result"] == { "configuration": { "iceServers": [ - { - "urls": [ - "stun:stun.home-assistant.io:80", - "stun:stun.home-assistant.io:3478", - ] - }, + {"urls": "stun:stun.home-assistant.io:80"}, + {"urls": "stun:stun.home-assistant.io:3478"}, ], }, "getCandidatesUpfront": False, @@ -322,7 +316,7 @@ async def test_ws_get_client_config( async_register_ice_servers(hass, get_ice_server) await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"} ) msg = await client.receive_json() @@ -332,12 +326,8 @@ async def test_ws_get_client_config( assert msg["result"] == { "configuration": { "iceServers": [ - { - "urls": [ - "stun:stun.home-assistant.io:80", - "stun:stun.home-assistant.io:3478", - ] - }, + {"urls": "stun:stun.home-assistant.io:80"}, + {"urls": "stun:stun.home-assistant.io:3478"}, { "urls": ["stun:example2.com", "turn:example2.com"], "username": "user", @@ -372,7 +362,7 @@ async def test_ws_get_client_config_sync_offer( } -@pytest.mark.usefixtures("mock_test_webrtc_cameras") +@pytest.mark.usefixtures("mock_camera_webrtc") async def test_ws_get_client_config_custom_config( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -386,7 +376,7 @@ async def test_ws_get_client_config_custom_config( client = await hass_ws_client(hass) await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"} ) msg = await client.receive_json() @@ -437,7 +427,7 @@ def mock_rtsp_to_webrtc_fixture(hass: HomeAssistant) -> Generator[Mock]: unsub() -@pytest.mark.usefixtures("mock_test_webrtc_cameras") +@pytest.mark.usefixtures("mock_camera_webrtc") async def test_websocket_webrtc_offer( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -446,7 +436,7 @@ async def test_websocket_webrtc_offer( await client.send_json_auto_id( { "type": "camera/webrtc/offer", - "entity_id": "camera.async", + "entity_id": "camera.demo_camera", "offer": WEBRTC_OFFER, } ) @@ -557,11 +547,11 @@ async def test_websocket_webrtc_offer_webrtc_provider( mock_async_close_session.assert_called_once_with(session_id) +@pytest.mark.usefixtures("mock_camera_webrtc") async def test_websocket_webrtc_offer_invalid_entity( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test WebRTC with a camera entity that does not exist.""" - await async_setup_component(hass, "camera", {}) client = await hass_ws_client(hass) await client.send_json_auto_id( { @@ -580,7 +570,7 @@ async def test_websocket_webrtc_offer_invalid_entity( } -@pytest.mark.usefixtures("mock_test_webrtc_cameras") +@pytest.mark.usefixtures("mock_camera_webrtc") async def test_websocket_webrtc_offer_missing_offer( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -607,6 +597,7 @@ async def test_websocket_webrtc_offer_missing_offer( (TimeoutError(), "Timeout handling WebRTC offer"), ], ) +@pytest.mark.usefixtures("mock_camera_webrtc_frontendtype_only") async def test_websocket_webrtc_offer_failure( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -950,7 +941,7 @@ async def test_rtsp_to_webrtc_offer_not_accepted( unsub() -@pytest.mark.usefixtures("mock_test_webrtc_cameras") +@pytest.mark.usefixtures("mock_camera_webrtc") async def test_ws_webrtc_candidate( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -958,13 +949,13 @@ async def test_ws_webrtc_candidate( client = await hass_ws_client(hass) session_id = "session_id" candidate = "candidate" - with patch.object( - get_camera_from_entity_id(hass, "camera.async"), "async_on_webrtc_candidate" + with patch( + "homeassistant.components.camera.Camera.async_on_webrtc_candidate" ) as mock_on_webrtc_candidate: await client.send_json_auto_id( { "type": "camera/webrtc/candidate", - "entity_id": "camera.async", + "entity_id": "camera.demo_camera", "session_id": session_id, "candidate": candidate, } @@ -977,7 +968,7 @@ async def test_ws_webrtc_candidate( ) -@pytest.mark.usefixtures("mock_test_webrtc_cameras") +@pytest.mark.usefixtures("mock_camera_webrtc") async def test_ws_webrtc_candidate_not_supported( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -986,7 +977,7 @@ async def test_ws_webrtc_candidate_not_supported( await client.send_json_auto_id( { "type": "camera/webrtc/candidate", - "entity_id": "camera.sync", + "entity_id": "camera.demo_camera", "session_id": "session_id", "candidate": "candidate", } @@ -1029,11 +1020,11 @@ async def test_ws_webrtc_candidate_webrtc_provider( ) +@pytest.mark.usefixtures("mock_camera_webrtc") async def test_ws_webrtc_candidate_invalid_entity( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test ws WebRTC candidate command with a camera entity that does not exist.""" - await async_setup_component(hass, "camera", {}) client = await hass_ws_client(hass) await client.send_json_auto_id( { @@ -1053,7 +1044,7 @@ async def test_ws_webrtc_candidate_invalid_entity( } -@pytest.mark.usefixtures("mock_test_webrtc_cameras") +@pytest.mark.usefixtures("mock_camera_webrtc") async def test_ws_webrtc_canidate_missing_candidate( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -1062,7 +1053,7 @@ async def test_ws_webrtc_canidate_missing_candidate( await client.send_json_auto_id( { "type": "camera/webrtc/candidate", - "entity_id": "camera.async", + "entity_id": "camera.demo_camera", "session_id": "session_id", } ) diff --git a/tests/components/co2signal/test_config_flow.py b/tests/components/co2signal/test_config_flow.py index f8f94d44126..92d9450b670 100644 --- a/tests/components/co2signal/test_config_flow.py +++ b/tests/components/co2signal/test_config_flow.py @@ -44,7 +44,7 @@ async def test_form_home(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Electricity Maps" + assert result2["title"] == "CO2 Signal" assert result2["data"] == { "api_key": "api_key", } @@ -185,7 +185,7 @@ async def test_form_error_handling( await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Electricity Maps" + assert result["title"] == "CO2 Signal" assert result["data"] == { "api_key": "api_key", } diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 363d39a2e63..ba5d12afd01 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -5,17 +5,10 @@ from __future__ import annotations from collections.abc import Callable, Generator from importlib.util import find_spec from pathlib import Path -import string from typing import TYPE_CHECKING, Any from unittest.mock import AsyncMock, MagicMock, patch -from aiohasupervisor.models import ( - Discovery, - Repository, - ResolutionInfo, - StoreAddon, - StoreInfo, -) +from aiohasupervisor.models import Discovery, Repository, StoreAddon, StoreInfo import pytest from homeassistant.config_entries import ( @@ -26,12 +19,7 @@ from homeassistant.config_entries import ( ) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import ( - FlowContext, - FlowHandler, - FlowManager, - FlowResultType, -) +from homeassistant.data_entry_flow import FlowHandler, FlowManager, FlowResultType from homeassistant.helpers.translation import async_get_translations if TYPE_CHECKING: @@ -485,26 +473,6 @@ def supervisor_is_connected_fixture(supervisor_client: AsyncMock) -> AsyncMock: return supervisor_client.supervisor.ping -@pytest.fixture(name="resolution_info") -def resolution_info_fixture(supervisor_client: AsyncMock) -> AsyncMock: - """Mock resolution info from supervisor.""" - supervisor_client.resolution.info.return_value = ResolutionInfo( - suggestions=[], - unsupported=[], - unhealthy=[], - issues=[], - checks=[], - ) - return supervisor_client.resolution.info - - -@pytest.fixture(name="resolution_suggestions_for_issue") -def resolution_suggestions_for_issue_fixture(supervisor_client: AsyncMock) -> AsyncMock: - """Mock suggestions by issue from supervisor resolution.""" - supervisor_client.resolution.suggestions_for_issue.return_value = [] - return supervisor_client.resolution.suggestions_for_issue - - @pytest.fixture(name="supervisor_client") def supervisor_client() -> Generator[AsyncMock]: """Mock the supervisor client.""" @@ -513,7 +481,6 @@ def supervisor_client() -> Generator[AsyncMock]: supervisor_client.discovery = AsyncMock() supervisor_client.homeassistant = AsyncMock() supervisor_client.os = AsyncMock() - supervisor_client.resolution = AsyncMock() supervisor_client.supervisor = AsyncMock() with ( patch( @@ -537,63 +504,31 @@ def supervisor_client() -> Generator[AsyncMock]: return_value=supervisor_client, ), patch( - "homeassistant.components.hassio.issues.get_supervisor_client", - return_value=supervisor_client, - ), - patch( - "homeassistant.components.hassio.repairs.get_supervisor_client", + "homeassistant.components.hassio.get_supervisor_client", return_value=supervisor_client, ), ): yield supervisor_client -def _validate_translation_placeholders( - full_key: str, - translation: str, - description_placeholders: dict[str, str] | None, -) -> str | None: - """Raise if translation exists with missing placeholders.""" - tuples = list(string.Formatter().parse(translation)) - for _, placeholder, _, _ in tuples: - if placeholder is None: - continue - if ( - description_placeholders is None - or placeholder not in description_placeholders - ): - ignore_translations[full_key] = ( - f"Description not found for placeholder `{placeholder}` in {full_key}" - ) - - -async def _validate_translation( +async def _ensure_translation_exists( hass: HomeAssistant, ignore_translations: dict[str, StoreInfo], category: str, component: str, key: str, - description_placeholders: dict[str, str] | None, - *, - translation_required: bool = True, ) -> None: """Raise if translation doesn't exist.""" full_key = f"component.{component}.{category}.{key}" translations = await async_get_translations(hass, "en", category, [component]) - if (translation := translations.get(full_key)) is not None: - _validate_translation_placeholders( - full_key, translation, description_placeholders - ) - return - - if not translation_required: + if full_key in translations: return if full_key in ignore_translations: ignore_translations[full_key] = "used" return - ignore_translations[full_key] = ( + pytest.fail( f"Translation not found for {component}: `{category}.{key}`. " f"Please add to homeassistant/components/{component}/strings.json" ) @@ -609,106 +544,68 @@ def ignore_translations() -> str | list[str]: return [] -async def _check_config_flow_result_translations( - manager: FlowManager, - flow: FlowHandler, - result: FlowResult[FlowContext, str], - ignore_translations: dict[str, str], -) -> None: - if isinstance(manager, ConfigEntriesFlowManager): - category = "config" - integration = flow.handler - elif isinstance(manager, OptionsFlowManager): - category = "options" - integration = flow.hass.config_entries.async_get_entry(flow.handler).domain - else: - return - - # Check if this flow has been seen before - # Gets set to False on first run, and to True on subsequent runs - setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) - - if result["type"] is FlowResultType.FORM: - if step_id := result.get("step_id"): - # neither title nor description are required - # - title defaults to integration name - # - description is optional - for header in ("title", "description"): - await _validate_translation( - flow.hass, - ignore_translations, - category, - integration, - f"step.{step_id}.{header}", - result["description_placeholders"], - translation_required=False, - ) - if errors := result.get("errors"): - for error in errors.values(): - await _validate_translation( - flow.hass, - ignore_translations, - category, - integration, - f"error.{error}", - result["description_placeholders"], - ) - return - - if result["type"] is FlowResultType.ABORT: - # We don't need translations for a discovery flow which immediately - # aborts, since such flows won't be seen by users - if not flow.__flow_seen_before and flow.source in DISCOVERY_SOURCES: - return - await _validate_translation( - flow.hass, - ignore_translations, - category, - integration, - f"abort.{result["reason"]}", - result["description_placeholders"], - ) - - @pytest.fixture(autouse=True) -def check_translations(ignore_translations: str | list[str]) -> Generator[None]: - """Check that translation requirements are met. - - Current checks: - - data entry flow results (ConfigFlow/OptionsFlow) - """ +def check_config_translations(ignore_translations: str | list[str]) -> Generator[None]: + """Ensure config_flow translations are available.""" if not isinstance(ignore_translations, list): ignore_translations = [ignore_translations] _ignore_translations = {k: "unused" for k in ignore_translations} + _original = FlowManager._async_handle_step - # Keep reference to original functions - _original_flow_manager_async_handle_step = FlowManager._async_handle_step - - # Prepare override functions - async def _flow_manager_async_handle_step( + async def _async_handle_step( self: FlowManager, flow: FlowHandler, *args ) -> FlowResult: - result = await _original_flow_manager_async_handle_step(self, flow, *args) - await _check_config_flow_result_translations( - self, flow, result, _ignore_translations - ) + result = await _original(self, flow, *args) + if isinstance(self, ConfigEntriesFlowManager): + category = "config" + component = flow.handler + elif isinstance(self, OptionsFlowManager): + category = "options" + component = flow.hass.config_entries.async_get_entry(flow.handler).domain + else: + return result + + # Check if this flow has been seen before + # Gets set to False on first run, and to True on subsequent runs + setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) + + if result["type"] is FlowResultType.FORM: + if errors := result.get("errors"): + for error in errors.values(): + await _ensure_translation_exists( + flow.hass, + _ignore_translations, + category, + component, + f"error.{error}", + ) + return result + + if result["type"] is FlowResultType.ABORT: + # We don't need translations for a discovery flow which immediately + # aborts, since such flows won't be seen by users + if not flow.__flow_seen_before and flow.source in DISCOVERY_SOURCES: + return result + await _ensure_translation_exists( + flow.hass, + _ignore_translations, + category, + component, + f"abort.{result["reason"]}", + ) + return result - # Use override functions with patch( "homeassistant.data_entry_flow.FlowManager._async_handle_step", - _flow_manager_async_handle_step, + _async_handle_step, ): yield - # Run final checks unused_ignore = [k for k, v in _ignore_translations.items() if v == "unused"] if unused_ignore: pytest.fail( f"Unused ignore translations: {', '.join(unused_ignore)}. " "Please remove them from the ignore_translations fixture." ) - for description in _ignore_translations.values(): - if description not in {"used", "unused"}: - pytest.fail(description) diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index d9d859113f8..08aca43aba5 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -639,7 +639,7 @@ 'details': dict({ 'brightness': dict({ 'name': 'brightness', - 'text': '100', + 'text': '100%', 'value': 100, }), 'name': dict({ @@ -654,7 +654,7 @@ 'match': True, 'sentence_template': '[] brightness [to] ', 'slots': dict({ - 'brightness': '100', + 'brightness': '100%', 'name': 'test light', }), 'source': 'builtin', diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 3c6b463670a..14a9b0ca88c 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -418,44 +418,6 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None: assert len(callback.mock_calls) == 0 -@pytest.mark.parametrize( - ("language", "expected"), - [("en", "English done"), ("de", "German done"), ("not_translated", "Done")], -) -@pytest.mark.usefixtures("init_components") -async def test_trigger_sentence_response_translation( - hass: HomeAssistant, language: str, expected: str -) -> None: - """Test translation of default response 'done'.""" - hass.config.language = language - - agent = hass.data[DATA_DEFAULT_ENTITY] - assert isinstance(agent, default_agent.DefaultAgent) - - translations = { - "en": {"component.conversation.conversation.agent.done": "English done"}, - "de": {"component.conversation.conversation.agent.done": "German done"}, - "not_translated": {}, - } - - with patch( - "homeassistant.components.conversation.default_agent.translation.async_get_translations", - return_value=translations.get(language), - ): - unregister = agent.register_trigger( - ["test sentence"], AsyncMock(return_value=None) - ) - result = await conversation.async_converse( - hass, "test sentence", None, Context() - ) - assert result.response.response_type == intent.IntentResponseType.ACTION_DONE - assert result.response.speech == { - "plain": {"speech": expected, "extra_data": None} - } - - unregister() - - @pytest.mark.usefixtures("init_components", "sl_setup") async def test_shopping_list_add_item(hass: HomeAssistant) -> None: """Test adding an item to the shopping list through the default agent.""" @@ -770,8 +732,8 @@ async def test_error_no_device_on_floor_exposed( ) with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], ): result = await conversation.async_converse( hass, "turn on test light on the ground floor", None, Context(), None @@ -838,8 +800,8 @@ async def test_error_no_domain(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], ): result = await conversation.async_converse( hass, "turn on the fans", None, Context(), None @@ -873,8 +835,8 @@ async def test_error_no_domain_exposed(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], ): result = await conversation.async_converse( hass, "turn on the fans", None, Context(), None @@ -1047,8 +1009,8 @@ async def test_error_no_device_class(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], ): result = await conversation.async_converse( hass, "open the windows", None, Context(), None @@ -1096,8 +1058,8 @@ async def test_error_no_device_class_exposed(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], ): result = await conversation.async_converse( hass, "open all the windows", None, Context(), None @@ -1207,8 +1169,8 @@ async def test_error_no_device_class_on_floor_exposed( ) with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=recognize_result, + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[recognize_result], ): result = await conversation.async_converse( hass, "open ground floor windows", None, Context(), None @@ -1229,8 +1191,8 @@ async def test_error_no_device_class_on_floor_exposed( async def test_error_no_intent(hass: HomeAssistant) -> None: """Test response with an intent match failure.""" with patch( - "homeassistant.components.conversation.default_agent.recognize_best", - return_value=None, + "homeassistant.components.conversation.default_agent.recognize_all", + return_value=[], ): result = await conversation.async_converse( hass, "do something", None, Context(), None diff --git a/tests/components/conversation/test_trace.py b/tests/components/conversation/test_trace.py index 7c00b9a80b2..59cd10d2510 100644 --- a/tests/components/conversation/test_trace.py +++ b/tests/components/conversation/test_trace.py @@ -56,7 +56,7 @@ async def test_converation_trace( "intent_name": "HassListAddItem", "slots": { "name": "Shopping List", - "item": "apples", + "item": "apples ", }, } diff --git a/tests/components/dhcp/conftest.py b/tests/components/dhcp/conftest.py new file mode 100644 index 00000000000..b0fa3f573c5 --- /dev/null +++ b/tests/components/dhcp/conftest.py @@ -0,0 +1,21 @@ +"""Tests for the dhcp integration.""" + +import os +import pathlib + + +def pytest_sessionstart(session): + """Try to avoid flaky FileExistsError in CI. + + Called after the Session object has been created and + before performing collection and entering the run test loop. + + This is needed due to a race condition in scapy v2.6.0 + See https://github.com/secdev/scapy/pull/4558 + + Can be removed when scapy 2.6.1 is released. + """ + for sub_dir in (".cache", ".config"): + path = pathlib.Path(os.path.join(os.path.expanduser("~"), sub_dir)) + if not path.exists(): + path.mkdir(mode=0o700, exist_ok=True) diff --git a/tests/components/ecobee/fixtures/ecobee-data.json b/tests/components/ecobee/fixtures/ecobee-data.json index e0e82d68863..1573484795f 100644 --- a/tests/components/ecobee/fixtures/ecobee-data.json +++ b/tests/components/ecobee/fixtures/ecobee-data.json @@ -160,7 +160,6 @@ "hasHumidifier": true, "humidifierMode": "manual", "hasHeatPump": true, - "compressorProtectionMinTemp": 100, "humidity": "30" }, "equipmentStatus": "fan", diff --git a/tests/components/ecobee/test_number.py b/tests/components/ecobee/test_number.py index be65b6dbb30..5b01fe8c5ba 100644 --- a/tests/components/ecobee/test_number.py +++ b/tests/components/ecobee/test_number.py @@ -12,8 +12,8 @@ from homeassistant.core import HomeAssistant from .common import setup_platform -VENTILATOR_MIN_HOME_ID = "number.ecobee_ventilator_minimum_time_home" -VENTILATOR_MIN_AWAY_ID = "number.ecobee_ventilator_minimum_time_away" +VENTILATOR_MIN_HOME_ID = "number.ecobee_ventilator_min_time_home" +VENTILATOR_MIN_AWAY_ID = "number.ecobee_ventilator_min_time_away" THERMOSTAT_ID = 0 @@ -26,9 +26,7 @@ async def test_ventilator_min_on_home_attributes(hass: HomeAssistant) -> None: assert state.attributes.get("min") == 0 assert state.attributes.get("max") == 60 assert state.attributes.get("step") == 5 - assert ( - state.attributes.get("friendly_name") == "ecobee Ventilator minimum time home" - ) + assert state.attributes.get("friendly_name") == "ecobee Ventilator min time home" assert state.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES @@ -41,9 +39,7 @@ async def test_ventilator_min_on_away_attributes(hass: HomeAssistant) -> None: assert state.attributes.get("min") == 0 assert state.attributes.get("max") == 60 assert state.attributes.get("step") == 5 - assert ( - state.attributes.get("friendly_name") == "ecobee Ventilator minimum time away" - ) + assert state.attributes.get("friendly_name") == "ecobee Ventilator min time away" assert state.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES @@ -81,42 +77,3 @@ async def test_set_min_time_away(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() mock_set_min_away_time.assert_called_once_with(THERMOSTAT_ID, target_value) - - -COMPRESSOR_MIN_TEMP_ID = "number.ecobee2_compressor_minimum_temperature" - - -async def test_compressor_protection_min_temp_attributes(hass: HomeAssistant) -> None: - """Test the compressor min temp value is correct. - - Ecobee runs in Fahrenheit; the test rig runs in Celsius. Conversions are necessary. - """ - await setup_platform(hass, NUMBER_DOMAIN) - - state = hass.states.get(COMPRESSOR_MIN_TEMP_ID) - assert state.state == "-12.2" - assert ( - state.attributes.get("friendly_name") - == "ecobee2 Compressor minimum temperature" - ) - - -async def test_set_compressor_protection_min_temp(hass: HomeAssistant) -> None: - """Test the number can set minimum compressor operating temp. - - Ecobee runs in Fahrenheit; the test rig runs in Celsius. Conversions are necessary - """ - target_value = 0 - with patch( - "homeassistant.components.ecobee.Ecobee.set_aux_cutover_threshold" - ) as mock_set_compressor_min_temp: - await setup_platform(hass, NUMBER_DOMAIN) - - await hass.services.async_call( - NUMBER_DOMAIN, - SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: COMPRESSOR_MIN_TEMP_ID, ATTR_VALUE: target_value}, - blocking=True, - ) - await hass.async_block_till_done() - mock_set_compressor_min_temp.assert_called_once_with(1, 32) diff --git a/tests/components/ecobee/test_switch.py b/tests/components/ecobee/test_switch.py index b3c4c4f8296..31c8ce8f72d 100644 --- a/tests/components/ecobee/test_switch.py +++ b/tests/components/ecobee/test_switch.py @@ -118,7 +118,7 @@ async def test_turn_off_20min_ventilator(hass: HomeAssistant) -> None: mock_set_20min_ventilator.assert_called_once_with(THERMOSTAT_ID, False) -DEVICE_ID = "switch.ecobee2_auxiliary_heat_only" +DEVICE_ID = "switch.ecobee2_aux_heat_only" async def test_aux_heat_only_turn_on(hass: HomeAssistant) -> None: diff --git a/tests/components/emoncms/conftest.py b/tests/components/emoncms/conftest.py index 4bd1d68217a..29e86f3c59d 100644 --- a/tests/components/emoncms/conftest.py +++ b/tests/components/emoncms/conftest.py @@ -91,21 +91,6 @@ def config_entry() -> MockConfigEntry: ) -FLOW_RESULT_SECOND_URL = copy.deepcopy(FLOW_RESULT) -FLOW_RESULT_SECOND_URL[CONF_URL] = "http://1.1.1.2" - - -@pytest.fixture -def config_entry_unique_id() -> MockConfigEntry: - """Mock emoncms config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title=SENSOR_NAME, - data=FLOW_RESULT_SECOND_URL, - unique_id="123-53535292", - ) - - FLOW_RESULT_NO_FEED = copy.deepcopy(FLOW_RESULT) FLOW_RESULT_NO_FEED[CONF_ONLY_INCLUDE_FEEDID] = None @@ -158,5 +143,4 @@ async def emoncms_client() -> AsyncGenerator[AsyncMock]: ): client = mock_client.return_value client.async_request.return_value = {"success": True, "message": FEEDS} - client.async_get_uuid.return_value = "123-53535292" yield client diff --git a/tests/components/emoncms/snapshots/test_sensor.ambr b/tests/components/emoncms/snapshots/test_sensor.ambr index f6a2745fb1a..5e718c1d8e8 100644 --- a/tests/components/emoncms/snapshots/test_sensor.ambr +++ b/tests/components/emoncms/snapshots/test_sensor.ambr @@ -30,7 +30,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '123-53535292-1', + 'unique_id': 'XXXXXXXX-1', 'unit_of_measurement': , }) # --- diff --git a/tests/components/emoncms/test_config_flow.py b/tests/components/emoncms/test_config_flow.py index 1914f23fb0b..b3afc714c59 100644 --- a/tests/components/emoncms/test_config_flow.py +++ b/tests/components/emoncms/test_config_flow.py @@ -106,6 +106,7 @@ CONFIG_ENTRY = { async def test_options_flow( hass: HomeAssistant, + mock_setup_entry: AsyncMock, emoncms_client: AsyncMock, config_entry: MockConfigEntry, ) -> None: @@ -141,21 +142,3 @@ async def test_options_flow_failure( assert result["description_placeholders"]["details"] == "failure" assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" - - -async def test_unique_id_exists( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - emoncms_client: AsyncMock, - config_entry_unique_id: MockConfigEntry, -) -> None: - """Test when entry with same unique id already exists.""" - config_entry_unique_id.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], USER_INPUT - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/emoncms/test_init.py b/tests/components/emoncms/test_init.py index abe1a020034..b89b6e65a66 100644 --- a/tests/components/emoncms/test_init.py +++ b/tests/components/emoncms/test_init.py @@ -4,14 +4,11 @@ from __future__ import annotations from unittest.mock import AsyncMock -from homeassistant.components.emoncms.const import DOMAIN, FEED_ID, FEED_NAME from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, issue_registry as ir from . import setup_integration -from .conftest import EMONCMS_FAILURE, FEEDS +from .conftest import EMONCMS_FAILURE from tests.common import MockConfigEntry @@ -41,49 +38,3 @@ async def test_failure( emoncms_client.async_request.return_value = EMONCMS_FAILURE config_entry.add_to_hass(hass) assert not await hass.config_entries.async_setup(config_entry.entry_id) - - -async def test_migrate_uuid( - hass: HomeAssistant, - config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - emoncms_client: AsyncMock, -) -> None: - """Test migration from home assistant uuid to emoncms uuid.""" - config_entry.add_to_hass(hass) - assert config_entry.unique_id is None - for _, feed in enumerate(FEEDS): - entity_registry.async_get_or_create( - Platform.SENSOR, - DOMAIN, - f"{config_entry.entry_id}-{feed[FEED_ID]}", - config_entry=config_entry, - suggested_object_id=f"{DOMAIN}_{feed[FEED_NAME]}", - ) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - emoncms_uuid = emoncms_client.async_get_uuid.return_value - assert config_entry.unique_id == emoncms_uuid - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - - for nb, feed in enumerate(FEEDS): - assert entity_entries[nb].unique_id == f"{emoncms_uuid}-{feed[FEED_ID]}" - assert ( - entity_entries[nb].previous_unique_id - == f"{config_entry.entry_id}-{feed[FEED_ID]}" - ) - - -async def test_no_uuid( - hass: HomeAssistant, - config_entry: MockConfigEntry, - issue_registry: ir.IssueRegistry, - emoncms_client: AsyncMock, -) -> None: - """Test an issue is created when the emoncms server does not ship an uuid.""" - emoncms_client.async_get_uuid.return_value = None - await setup_integration(hass, config_entry) - - assert issue_registry.async_get_issue(domain=DOMAIN, issue_id="migrate database") diff --git a/tests/components/esphome/test_config_flow.py b/tests/components/esphome/test_config_flow.py index 0a389969c78..3051547bd43 100644 --- a/tests/components/esphome/test_config_flow.py +++ b/tests/components/esphome/test_config_flow.py @@ -1400,14 +1400,6 @@ async def test_discovery_mqtt_no_mac( await mqtt_discovery_test_abort(hass, "{}", "mqtt_missing_mac") -@pytest.mark.usefixtures("mock_zeroconf") -async def test_discovery_mqtt_empty_payload( - hass: HomeAssistant, mock_client, mock_setup_entry: None -) -> None: - """Test discovery aborted if MQTT payload is empty.""" - await mqtt_discovery_test_abort(hass, "", "mqtt_missing_payload") - - @pytest.mark.usefixtures("mock_zeroconf") async def test_discovery_mqtt_no_api( hass: HomeAssistant, mock_client, mock_setup_entry: None diff --git a/tests/components/file/test_notify.py b/tests/components/file/test_notify.py index e7cb85a9cfc..33e4739a488 100644 --- a/tests/components/file/test_notify.py +++ b/tests/components/file/test_notify.py @@ -12,46 +12,222 @@ from homeassistant.components.file import DOMAIN from homeassistant.components.notify import ATTR_TITLE_DEFAULT from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.typing import ConfigType +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, assert_setup_component + + +async def test_bad_config(hass: HomeAssistant) -> None: + """Test set up the platform with bad/missing config.""" + config = {notify.DOMAIN: {"name": "test", "platform": "file"}} + with assert_setup_component(0, domain="notify") as handle_config: + assert await async_setup_component(hass, notify.DOMAIN, config) + await hass.async_block_till_done() + assert not handle_config[notify.DOMAIN] @pytest.mark.parametrize( ("domain", "service", "params"), [ + (notify.DOMAIN, "test", {"message": "one, two, testing, testing"}), ( notify.DOMAIN, "send_message", {"entity_id": "notify.test", "message": "one, two, testing, testing"}, ), ], + ids=["legacy", "entity"], +) +@pytest.mark.parametrize( + ("timestamp", "config"), + [ + ( + False, + { + "notify": [ + { + "name": "test", + "platform": "file", + "filename": "mock_file", + "timestamp": False, + } + ] + }, + ), + ( + True, + { + "notify": [ + { + "name": "test", + "platform": "file", + "filename": "mock_file", + "timestamp": True, + } + ] + }, + ), + ], + ids=["no_timestamp", "timestamp"], ) -@pytest.mark.parametrize("timestamp", [False, True], ids=["no_timestamp", "timestamp"]) async def test_notify_file( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - mock_is_allowed_path: MagicMock, timestamp: bool, + mock_is_allowed_path: MagicMock, + config: ConfigType, domain: str, service: str, params: dict[str, str], ) -> None: """Test the notify file output.""" filename = "mock_file" - full_filename = os.path.join(hass.config.path(), filename) + message = params["message"] + assert await async_setup_component(hass, notify.DOMAIN, config) + await hass.async_block_till_done() + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + freezer.move_to(dt_util.utcnow()) + + m_open = mock_open() + with ( + patch("homeassistant.components.file.notify.open", m_open, create=True), + patch("homeassistant.components.file.notify.os.stat") as mock_st, + ): + mock_st.return_value.st_size = 0 + title = ( + f"{ATTR_TITLE_DEFAULT} notifications " + f"(Log started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" + ) + + await hass.services.async_call(domain, service, params, blocking=True) + + full_filename = os.path.join(hass.config.path(), filename) + assert m_open.call_count == 1 + assert m_open.call_args == call(full_filename, "a", encoding="utf8") + + assert m_open.return_value.write.call_count == 2 + if not timestamp: + assert m_open.return_value.write.call_args_list == [ + call(title), + call(f"{message}\n"), + ] + else: + assert m_open.return_value.write.call_args_list == [ + call(title), + call(f"{dt_util.utcnow().isoformat()} {message}\n"), + ] + + +@pytest.mark.parametrize( + ("domain", "service", "params"), + [(notify.DOMAIN, "test", {"message": "one, two, testing, testing"})], + ids=["legacy"], +) +@pytest.mark.parametrize( + ("is_allowed", "config"), + [ + ( + True, + { + "notify": [ + { + "name": "test", + "platform": "file", + "filename": "mock_file", + } + ] + }, + ), + ], + ids=["allowed_but_access_failed"], +) +async def test_legacy_notify_file_exception( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_is_allowed_path: MagicMock, + config: ConfigType, + domain: str, + service: str, + params: dict[str, str], +) -> None: + """Test legacy notify file output has exception.""" + assert await async_setup_component(hass, notify.DOMAIN, config) + await hass.async_block_till_done() + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done(wait_background_tasks=True) + + freezer.move_to(dt_util.utcnow()) + + m_open = mock_open() + with ( + patch("homeassistant.components.file.notify.open", m_open, create=True), + patch("homeassistant.components.file.notify.os.stat") as mock_st, + ): + mock_st.side_effect = OSError("Access Failed") + with pytest.raises(ServiceValidationError) as exc: + await hass.services.async_call(domain, service, params, blocking=True) + assert f"{exc.value!r}" == "ServiceValidationError('write_access_failed')" + + +@pytest.mark.parametrize( + ("timestamp", "data", "options"), + [ + ( + False, + { + "name": "test", + "platform": "notify", + "file_path": "mock_file", + }, + { + "timestamp": False, + }, + ), + ( + True, + { + "name": "test", + "platform": "notify", + "file_path": "mock_file", + }, + { + "timestamp": True, + }, + ), + ], + ids=["no_timestamp", "timestamp"], +) +async def test_legacy_notify_file_entry_only_setup( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + timestamp: bool, + mock_is_allowed_path: MagicMock, + data: dict[str, Any], + options: dict[str, Any], +) -> None: + """Test the legacy notify file output in entry only setup.""" + filename = "mock_file" + + domain = notify.DOMAIN + service = "test" + params = {"message": "one, two, testing, testing"} message = params["message"] entry = MockConfigEntry( domain=DOMAIN, - data={"name": "test", "platform": "notify", "file_path": full_filename}, - options={"timestamp": timestamp}, + data=data, version=2, - title=f"test [{filename}]", + options=options, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(entry.entry_id) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) freezer.move_to(dt_util.utcnow()) @@ -69,7 +245,7 @@ async def test_notify_file( await hass.services.async_call(domain, service, params, blocking=True) assert m_open.call_count == 1 - assert m_open.call_args == call(full_filename, "a", encoding="utf8") + assert m_open.call_args == call(filename, "a", encoding="utf8") assert m_open.return_value.write.call_count == 2 if not timestamp: @@ -101,14 +277,14 @@ async def test_notify_file( ], ids=["not_allowed"], ) -async def test_notify_file_not_allowed( +async def test_legacy_notify_file_not_allowed( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_is_allowed_path: MagicMock, config: dict[str, Any], options: dict[str, Any], ) -> None: - """Test notify file output not allowed.""" + """Test legacy notify file output not allowed.""" entry = MockConfigEntry( domain=DOMAIN, data=config, @@ -125,10 +301,11 @@ async def test_notify_file_not_allowed( @pytest.mark.parametrize( ("service", "params"), [ + ("test", {"message": "one, two, testing, testing"}), ( "send_message", {"entity_id": "notify.test", "message": "one, two, testing, testing"}, - ) + ), ], ) @pytest.mark.parametrize( diff --git a/tests/components/file/test_sensor.py b/tests/components/file/test_sensor.py index 9e6a16e3e27..634ae9d626c 100644 --- a/tests/components/file/test_sensor.py +++ b/tests/components/file/test_sensor.py @@ -7,10 +7,33 @@ import pytest from homeassistant.components.file import DOMAIN from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, get_fixture_path +@patch("os.path.isfile", Mock(return_value=True)) +@patch("os.access", Mock(return_value=True)) +async def test_file_value_yaml_setup( + hass: HomeAssistant, mock_is_allowed_path: MagicMock +) -> None: + """Test the File sensor from YAML setup.""" + config = { + "sensor": { + "platform": "file", + "scan_interval": 30, + "name": "file1", + "file_path": get_fixture_path("file_value.txt", "file"), + } + } + + assert await async_setup_component(hass, "sensor", config) + await hass.async_block_till_done() + + state = hass.states.get("sensor.file1") + assert state.state == "21" + + @patch("os.path.isfile", Mock(return_value=True)) @patch("os.access", Mock(return_value=True)) async def test_file_value_entry_setup( diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index 84f1b240b88..e3fae8c083e 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -10,7 +10,6 @@ from fritzconnection.core.exceptions import ( ) import pytest -from homeassistant.components import ssdp from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME, @@ -23,6 +22,7 @@ from homeassistant.components.fritz.const import ( ERROR_UNKNOWN, FRITZ_AUTH_EXCEPTIONS, ) +from homeassistant.components.ssdp import ATTR_UPNP_UDN from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import ( CONF_HOST, @@ -644,7 +644,7 @@ async def test_ssdp_already_in_progress_host( MOCK_NO_UNIQUE_ID = dataclasses.replace(MOCK_SSDP_DATA) MOCK_NO_UNIQUE_ID.upnp = MOCK_NO_UNIQUE_ID.upnp.copy() - del MOCK_NO_UNIQUE_ID.upnp[ssdp.ATTR_UPNP_UDN] + del MOCK_NO_UNIQUE_ID.upnp[ATTR_UPNP_UDN] result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_NO_UNIQUE_ID ) @@ -737,23 +737,3 @@ async def test_options_flow(hass: HomeAssistant) -> None: CONF_OLD_DISCOVERY: False, CONF_CONSIDER_HOME: 37, } - - -async def test_ssdp_ipv6_link_local(hass: HomeAssistant) -> None: - """Test ignoring ipv6-link-local while ssdp discovery.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_SSDP}, - data=ssdp.SsdpServiceInfo( - ssdp_usn="mock_usn", - ssdp_st="mock_st", - ssdp_location="https://[fe80::1ff:fe23:4567:890a]:12345/test", - upnp={ - ssdp.ATTR_UPNP_FRIENDLY_NAME: "fake_name", - ssdp.ATTR_UPNP_UDN: "uuid:only-a-test", - }, - ), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "ignore_ip6_link_local" diff --git a/tests/components/generic/test_camera.py b/tests/components/generic/test_camera.py index d3ef0a39241..59ff513ccc9 100644 --- a/tests/components/generic/test_camera.py +++ b/tests/components/generic/test_camera.py @@ -275,9 +275,7 @@ async def test_limit_refetch( with ( pytest.raises(aiohttp.ServerTimeoutError), - patch.object( - client.session._connector, "connect", side_effect=asyncio.TimeoutError - ), + patch("asyncio.timeout", side_effect=TimeoutError()), ): resp = await client.get("/api/camera_proxy/camera.config_test") diff --git a/tests/components/geniushub/test_config_flow.py b/tests/components/geniushub/test_config_flow.py index 7d1d33a2245..9234e03e35a 100644 --- a/tests/components/geniushub/test_config_flow.py +++ b/tests/components/geniushub/test_config_flow.py @@ -2,14 +2,21 @@ from http import HTTPStatus import socket +from typing import Any from unittest.mock import AsyncMock from aiohttp import ClientConnectionError, ClientResponseError import pytest from homeassistant.components.geniushub import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import ( + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -302,3 +309,174 @@ async def test_cloud_duplicate( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + ], +) +async def test_import_local_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, + data: dict[str, Any], +) -> None: + """Test full local import flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "10.0.0.130" + assert result["data"] == data + assert result["result"].unique_id == "aa:bb:cc:dd:ee:ff" + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_TOKEN: "abcdef", + }, + { + CONF_TOKEN: "abcdef", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + ], +) +async def test_import_cloud_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, + data: dict[str, Any], +) -> None: + """Test full cloud import flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Genius hub" + assert result["data"] == data + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + { + CONF_TOKEN: "abcdef", + }, + { + CONF_TOKEN: "abcdef", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + ], +) +@pytest.mark.parametrize( + ("exception", "reason"), + [ + (socket.gaierror, "invalid_host"), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), + "invalid_auth", + ), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), + "invalid_host", + ), + (TimeoutError, "cannot_connect"), + (ClientConnectionError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_import_flow_exceptions( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + data: dict[str, Any], + exception: Exception, + reason: str, +) -> None: + """Test import flow exceptions.""" + mock_geniushub_client.request.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_HOST: "10.0.0.131", + CONF_USERNAME: "test-username1", + CONF_PASSWORD: "test-password", + }, + ], +) +async def test_import_flow_local_duplicate( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_local_config_entry: MockConfigEntry, + data: dict[str, Any], +) -> None: + """Test import flow aborts on local duplicate data.""" + mock_local_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import_flow_cloud_duplicate( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_cloud_config_entry: MockConfigEntry, +) -> None: + """Test import flow aborts on cloud duplicate data.""" + mock_cloud_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/go2rtc/conftest.py b/tests/components/go2rtc/conftest.py index abb139b89bf..42b363b2324 100644 --- a/tests/components/go2rtc/conftest.py +++ b/tests/components/go2rtc/conftest.py @@ -3,11 +3,9 @@ from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch -from awesomeversion import AwesomeVersion from go2rtc_client.rest import _StreamClient, _WebRTCClient import pytest -from homeassistant.components.go2rtc.const import RECOMMENDED_VERSION from homeassistant.components.go2rtc.server import Server GO2RTC_PATH = "homeassistant.components.go2rtc" @@ -25,9 +23,7 @@ def rest_client() -> Generator[AsyncMock]: client = mock_client.return_value client.streams = streams = Mock(spec_set=_StreamClient) streams.list.return_value = {} - client.validate_server_version = AsyncMock( - return_value=AwesomeVersion(RECOMMENDED_VERSION) - ) + client.validate_server_version = AsyncMock() client.webrtc = Mock(spec_set=_WebRTCClient) yield client diff --git a/tests/components/go2rtc/test_init.py b/tests/components/go2rtc/test_init.py index 0f1cac6942d..61b0ca97406 100644 --- a/tests/components/go2rtc/test_init.py +++ b/tests/components/go2rtc/test_init.py @@ -6,7 +6,6 @@ from typing import NamedTuple from unittest.mock import AsyncMock, Mock, patch from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError -from awesomeversion import AwesomeVersion from go2rtc_client import Stream from go2rtc_client.exceptions import Go2RtcClientError, Go2RtcVersionError from go2rtc_client.models import Producer @@ -37,12 +36,10 @@ from homeassistant.components.go2rtc.const import ( CONF_DEBUG_UI, DEBUG_UI_URL_MESSAGE, DOMAIN, - RECOMMENDED_VERSION, ) from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component @@ -202,7 +199,6 @@ async def init_test_integration( async def _test_setup_and_signaling( hass: HomeAssistant, - issue_registry: ir.IssueRegistry, rest_client: AsyncMock, ws_client: Mock, config: ConfigType, @@ -215,7 +211,6 @@ async def _test_setup_and_signaling( assert await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done(wait_background_tasks=True) - assert issue_registry.async_get_issue(DOMAIN, "recommended_version") is None config_entries = hass.config_entries.async_entries(DOMAIN) assert len(config_entries) == 1 assert config_entries[0].state == ConfigEntryState.LOADED @@ -243,17 +238,13 @@ async def _test_setup_and_signaling( await test() rest_client.streams.add.assert_called_once_with( - entity_id, - [ - "rtsp://stream", - f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug", - ], + entity_id, ["rtsp://stream", f"ffmpeg:{camera.entity_id}#audio=opus"] ) # Stream exists but the source is different rest_client.streams.add.reset_mock() rest_client.streams.list.return_value = { - entity_id: Stream([Producer("rtsp://different")]) + entity_id: Stream([Producer("rtsp://different", [])]) } receive_message_callback.reset_mock() @@ -261,17 +252,13 @@ async def _test_setup_and_signaling( await test() rest_client.streams.add.assert_called_once_with( - entity_id, - [ - "rtsp://stream", - f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug", - ], + entity_id, ["rtsp://stream", f"ffmpeg:{camera.entity_id}#audio=opus"] ) # If the stream is already added, the stream should not be added again. rest_client.streams.add.reset_mock() rest_client.streams.list.return_value = { - entity_id: Stream([Producer("rtsp://stream")]) + entity_id: Stream([Producer("rtsp://stream", [])]) } receive_message_callback.reset_mock() @@ -311,7 +298,6 @@ async def _test_setup_and_signaling( @pytest.mark.parametrize("has_go2rtc_entry", [True, False]) async def test_setup_go_binary( hass: HomeAssistant, - issue_registry: ir.IssueRegistry, rest_client: AsyncMock, ws_client: Mock, server: AsyncMock, @@ -330,13 +316,7 @@ async def test_setup_go_binary( server_start.assert_called_once() await _test_setup_and_signaling( - hass, - issue_registry, - rest_client, - ws_client, - config, - after_setup, - init_test_integration, + hass, rest_client, ws_client, config, after_setup, init_test_integration ) await hass.async_stop() @@ -352,9 +332,8 @@ async def test_setup_go_binary( ], ) @pytest.mark.parametrize("has_go2rtc_entry", [True, False]) -async def test_setup( +async def test_setup_go( hass: HomeAssistant, - issue_registry: ir.IssueRegistry, rest_client: AsyncMock, ws_client: Mock, server: Mock, @@ -372,13 +351,7 @@ async def test_setup( server.assert_not_called() await _test_setup_and_signaling( - hass, - issue_registry, - rest_client, - ws_client, - config, - after_setup, - init_test_integration, + hass, rest_client, ws_client, config, after_setup, init_test_integration ) mock_get_binary.assert_not_called() @@ -730,30 +703,3 @@ async def test_config_entry_remove(hass: HomeAssistant) -> None: assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert not await hass.config_entries.async_setup(config_entry.entry_id) assert len(hass.config_entries.async_entries(DOMAIN)) == 0 - - -@pytest.mark.parametrize("config", [{DOMAIN: {CONF_URL: "http://localhost:1984"}}]) -@pytest.mark.usefixtures("server") -async def test_setup_with_recommended_version_repair( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - rest_client: AsyncMock, - config: ConfigType, -) -> None: - """Test setup integration entry fails.""" - rest_client.validate_server_version.return_value = AwesomeVersion("1.9.5") - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - - # Verify the issue is created - issue = issue_registry.async_get_issue(DOMAIN, "recommended_version") - assert issue - assert issue.is_fixable is False - assert issue.is_persistent is False - assert issue.severity == ir.IssueSeverity.WARNING - assert issue.issue_id == "recommended_version" - assert issue.translation_key == "recommended_version" - assert issue.translation_placeholders == { - "recommended_version": RECOMMENDED_VERSION, - "current_version": "1.9.5", - } diff --git a/tests/components/go2rtc/test_server.py b/tests/components/go2rtc/test_server.py index e4fe3993f3c..d810dbd88eb 100644 --- a/tests/components/go2rtc/test_server.py +++ b/tests/components/go2rtc/test_server.py @@ -105,13 +105,12 @@ async def test_server_run_success( # Verify that the config file was written mock_tempfile.write.assert_called_once_with( - f"""# This file is managed by Home Assistant -# Do not edit it manually - + f""" api: listen: "{api_ip}:11984" rtsp: + # ffmpeg needs rtsp for opus audio transcoding listen: "127.0.0.1:18554" webrtc: diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index 1e42edf8e7b..f5dedc357c1 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -4069,90 +4069,3 @@ async def test_sensorstate( ) is False ) - - -@pytest.mark.parametrize( - ("state", "identifier"), - [ - (STATE_ON, 0), - (STATE_OFF, 1), - (STATE_UNKNOWN, 2), - ], -) -@pytest.mark.parametrize( - ("device_class", "name", "states"), - [ - ( - binary_sensor.BinarySensorDeviceClass.CO, - "CarbonMonoxideLevel", - ["carbon monoxide detected", "no carbon monoxide detected", "unknown"], - ), - ( - binary_sensor.BinarySensorDeviceClass.SMOKE, - "SmokeLevel", - ["smoke detected", "no smoke detected", "unknown"], - ), - ( - binary_sensor.BinarySensorDeviceClass.MOISTURE, - "WaterLeak", - ["leak", "no leak", "unknown"], - ), - ], -) -async def test_binary_sensorstate( - hass: HomeAssistant, - state: str, - identifier: int, - device_class: binary_sensor.BinarySensorDeviceClass, - name: str, - states: list[str], -) -> None: - """Test SensorState trait support for binary sensor domain.""" - - assert helpers.get_google_type(binary_sensor.DOMAIN, None) is not None - assert trait.SensorStateTrait.supported( - binary_sensor.DOMAIN, None, device_class, None - ) - - trt = trait.SensorStateTrait( - hass, - State( - "binary_sensor.test", - state, - { - "device_class": device_class, - }, - ), - BASIC_CONFIG, - ) - - assert trt.sync_attributes() == { - "sensorStatesSupported": [ - { - "name": name, - "descriptiveCapabilities": { - "availableStates": states, - }, - } - ] - } - assert trt.query_attributes() == { - "currentSensorStateData": [ - { - "name": name, - "currentSensorState": states[identifier], - "rawValue": None, - }, - ] - } - - assert helpers.get_google_type(binary_sensor.DOMAIN, None) is not None - assert ( - trait.SensorStateTrait.supported( - binary_sensor.DOMAIN, - None, - binary_sensor.BinarySensorDeviceClass.TAMPER, - None, - ) - is False - ) diff --git a/tests/components/habitica/conftest.py b/tests/components/habitica/conftest.py index 8d729f4358f..b5ceadd2762 100644 --- a/tests/components/habitica/conftest.py +++ b/tests/components/habitica/conftest.py @@ -34,7 +34,7 @@ def mock_called_with( ( call for call in mock_client.mock_calls - if call[0].upper() == method.upper() and call[1] == URL(url) + if call[0] == method.upper() and call[1] == URL(url) ), None, ) @@ -56,11 +56,6 @@ def mock_habitica(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: f"{DEFAULT_URL}/api/v3/tasks/user", json=load_json_object_fixture("tasks.json", DOMAIN), ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/content", - params={"language": "en"}, - json=load_json_object_fixture("content.json", DOMAIN), - ) return aioclient_mock diff --git a/tests/components/habitica/fixtures/common_buttons_unavailable.json b/tests/components/habitica/fixtures/common_buttons_unavailable.json index efee5364e02..08039ae1762 100644 --- a/tests/components/habitica/fixtures/common_buttons_unavailable.json +++ b/tests/components/habitica/fixtures/common_buttons_unavailable.json @@ -29,26 +29,11 @@ "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_warrior_5", - "armor": "armor_warrior_5", - "head": "head_warrior_5", - "shield": "shield_warrior_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "needsCron": false } } diff --git a/tests/components/habitica/fixtures/content.json b/tests/components/habitica/fixtures/content.json deleted file mode 100644 index e8e14dead73..00000000000 --- a/tests/components/habitica/fixtures/content.json +++ /dev/null @@ -1,287 +0,0 @@ -{ - "success": true, - "data": { - "gear": { - "flat": { - "weapon_warrior_5": { - "text": "Ruby Sword", - "notes": "Weapon whose forge-glow never fades. Increases Strength by 15. ", - "str": 15, - "value": 90, - "type": "weapon", - "key": "weapon_warrior_5", - "set": "warrior-5", - "klass": "warrior", - "index": "5", - "int": 0, - "per": 0, - "con": 0 - }, - "armor_warrior_5": { - "text": "Golden Armor", - "notes": "Looks ceremonial, but no known blade can pierce it. Increases Constitution by 11.", - "con": 11, - "value": 120, - "last": true, - "type": "armor", - "key": "armor_warrior_5", - "set": "warrior-5", - "klass": "warrior", - "index": "5", - "str": 0, - "int": 0, - "per": 0 - }, - "head_warrior_5": { - "text": "Golden Helm", - "notes": "Regal crown bound to shining armor. Increases Strength by 12.", - "str": 12, - "value": 80, - "last": true, - "type": "head", - "key": "head_warrior_5", - "set": "warrior-5", - "klass": "warrior", - "index": "5", - "int": 0, - "per": 0, - "con": 0 - }, - "shield_warrior_5": { - "text": "Golden Shield", - "notes": "Shining badge of the vanguard. Increases Constitution by 9.", - "con": 9, - "value": 90, - "last": true, - "type": "shield", - "key": "shield_warrior_5", - "set": "warrior-5", - "klass": "warrior", - "index": "5", - "str": 0, - "int": 0, - "per": 0 - }, - "weapon_wizard_5": { - "twoHanded": true, - "text": "Archmage Staff", - "notes": "Assists in weaving the most complex of spells. Increases Intelligence by 15 and Perception by 7. Two-handed item.", - "int": 15, - "per": 7, - "value": 160, - "type": "weapon", - "key": "weapon_wizard_5", - "set": "wizard-5", - "klass": "wizard", - "index": "5", - "str": 0, - "con": 0 - }, - "armor_wizard_5": { - "text": "Royal Magus Robe", - "notes": "Symbol of the power behind the throne. Increases Intelligence by 12.", - "int": 12, - "value": 120, - "last": true, - "type": "armor", - "key": "armor_wizard_5", - "set": "wizard-5", - "klass": "wizard", - "index": "5", - "str": 0, - "per": 0, - "con": 0 - }, - "head_wizard_5": { - "text": "Royal Magus Hat", - "notes": "Shows authority over fortune, weather, and lesser mages. Increases Perception by 10.", - "per": 10, - "value": 80, - "last": true, - "type": "head", - "key": "head_wizard_5", - "set": "wizard-5", - "klass": "wizard", - "index": "5", - "str": 0, - "int": 0, - "con": 0 - }, - "weapon_healer_5": { - "text": "Royal Scepter", - "notes": "Fit to grace the hand of a monarch, or of one who stands at a monarch's right hand. Increases Intelligence by 9. ", - "int": 9, - "value": 90, - "type": "weapon", - "key": "weapon_healer_5", - "set": "healer-5", - "klass": "healer", - "index": "5", - "str": 0, - "per": 0, - "con": 0 - }, - "armor_healer_5": { - "text": "Royal Mantle", - "notes": "Attire of those who have saved the lives of kings. Increases Constitution by 18.", - "con": 18, - "value": 120, - "last": true, - "type": "armor", - "key": "armor_healer_5", - "set": "healer-5", - "klass": "healer", - "index": "5", - "str": 0, - "int": 0, - "per": 0 - }, - "head_healer_5": { - "text": "Royal Diadem", - "notes": "For king, queen, or miracle-worker. Increases Intelligence by 9.", - "int": 9, - "value": 80, - "last": true, - "type": "head", - "key": "head_healer_5", - "set": "healer-5", - "klass": "healer", - "index": "5", - "str": 0, - "per": 0, - "con": 0 - }, - "shield_healer_5": { - "text": "Royal Shield", - "notes": "Bestowed upon those most dedicated to the kingdom's defense. Increases Constitution by 12.", - "con": 12, - "value": 90, - "last": true, - "type": "shield", - "key": "shield_healer_5", - "set": "healer-5", - "klass": "healer", - "index": "5", - "str": 0, - "int": 0, - "per": 0 - }, - "weapon_rogue_5": { - "text": "Ninja-to", - "notes": "Sleek and deadly as the ninja themselves. Increases Strength by 8. ", - "str": 8, - "value": 90, - "type": "weapon", - "key": "weapon_rogue_5", - "set": "rogue-5", - "klass": "rogue", - "index": "5", - "int": 0, - "per": 0, - "con": 0 - }, - "armor_rogue_5": { - "text": "Umbral Armor", - "notes": "Allows stealth in the open in broad daylight. Increases Perception by 18.", - "per": 18, - "value": 120, - "last": true, - "type": "armor", - "key": "armor_rogue_5", - "set": "rogue-5", - "klass": "rogue", - "index": "5", - "str": 0, - "int": 0, - "con": 0 - }, - "head_rogue_5": { - "text": "Umbral Hood", - "notes": "Conceals even thoughts from those who would probe them. Increases Perception by 12.", - "per": 12, - "value": 80, - "last": true, - "type": "head", - "key": "head_rogue_5", - "set": "rogue-5", - "klass": "rogue", - "index": "5", - "str": 0, - "int": 0, - "con": 0 - }, - "shield_rogue_5": { - "text": "Ninja-to", - "notes": "Sleek and deadly as the ninja themselves. Increases Strength by 8. ", - "str": 8, - "value": 90, - "type": "shield", - "key": "shield_rogue_5", - "set": "rogue-5", - "klass": "rogue", - "index": "5", - "int": 0, - "per": 0, - "con": 0 - }, - "back_special_heroicAureole": { - "text": "Heroic Aureole", - "notes": "The gems on this aureole glimmer when you tell your tales of glory. Increases all stats by 7.", - "con": 7, - "str": 7, - "per": 7, - "int": 7, - "value": 175, - "type": "back", - "key": "back_special_heroicAureole", - "set": "special-heroicAureole", - "klass": "special", - "index": "heroicAureole" - }, - "headAccessory_armoire_gogglesOfBookbinding": { - "per": 8, - "set": "bookbinder", - "notes": "These goggles will help you zero in on any task, large or small! Increases Perception by 8. Enchanted Armoire: Bookbinder Set (Item 1 of 4).", - "text": "Goggles of Bookbinding", - "value": 100, - "type": "headAccessory", - "key": "headAccessory_armoire_gogglesOfBookbinding", - "klass": "armoire", - "index": "gogglesOfBookbinding", - "str": 0, - "int": 0, - "con": 0 - }, - "eyewear_armoire_plagueDoctorMask": { - "con": 5, - "int": 5, - "set": "plagueDoctor", - "notes": "An authentic mask worn by the doctors who battle the Plague of Procrastination. Increases Constitution and Intelligence by 5 each. Enchanted Armoire: Plague Doctor Set (Item 2 of 3).", - "text": "Plague Doctor Mask", - "value": 100, - "type": "eyewear", - "key": "eyewear_armoire_plagueDoctorMask", - "klass": "armoire", - "index": "plagueDoctorMask", - "str": 0, - "per": 0 - }, - "body_special_aetherAmulet": { - "text": "Aether Amulet", - "notes": "This amulet has a mysterious history. Increases Constitution and Strength by 10 each.", - "value": 175, - "str": 10, - "con": 10, - "type": "body", - "key": "body_special_aetherAmulet", - "set": "special-aetherAmulet", - "klass": "special", - "index": "aetherAmulet", - "int": 0, - "per": 0 - } - } - } - }, - "appVersion": "5.29.2" -} diff --git a/tests/components/habitica/fixtures/healer_fixture.json b/tests/components/habitica/fixtures/healer_fixture.json index 85f719f4ca7..04cbabcfa2d 100644 --- a/tests/components/habitica/fixtures/healer_fixture.json +++ b/tests/components/habitica/fixtures/healer_fixture.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,36 +24,17 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 5, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 5 }, "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z", - "items": { - "gear": { - "equipped": { - "weapon": "weapon_healer_5", - "armor": "armor_healer_5", - "head": "head_healer_5", - "shield": "shield_healer_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "lastCron": "2024-09-21T22:01:55.586Z" } } diff --git a/tests/components/habitica/fixtures/healer_skills_unavailable.json b/tests/components/habitica/fixtures/healer_skills_unavailable.json index a6bff246b2a..305a5f8cda1 100644 --- a/tests/components/habitica/fixtures/healer_skills_unavailable.json +++ b/tests/components/habitica/fixtures/healer_skills_unavailable.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,35 +24,16 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 0 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_healer_5", - "armor": "armor_healer_5", - "head": "head_healer_5", - "shield": "shield_healer_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "needsCron": false } } diff --git a/tests/components/habitica/fixtures/quest_invitation_off.json b/tests/components/habitica/fixtures/quest_invitation_off.json index b5eccd99e10..f862a85c7c4 100644 --- a/tests/components/habitica/fixtures/quest_invitation_off.json +++ b/tests/components/habitica/fixtures/quest_invitation_off.json @@ -29,8 +29,7 @@ "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true diff --git a/tests/components/habitica/fixtures/rogue_fixture.json b/tests/components/habitica/fixtures/rogue_fixture.json index 1e5e996c034..f0ea42a7182 100644 --- a/tests/components/habitica/fixtures/rogue_fixture.json +++ b/tests/components/habitica/fixtures/rogue_fixture.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,36 +24,17 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 5, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 5 }, "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z", - "items": { - "gear": { - "equipped": { - "weapon": "weapon_rogue_5", - "armor": "armor_rogue_5", - "head": "head_rogue_5", - "shield": "shield_rogue_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "lastCron": "2024-09-21T22:01:55.586Z" } } diff --git a/tests/components/habitica/fixtures/rogue_skills_unavailable.json b/tests/components/habitica/fixtures/rogue_skills_unavailable.json index c7c5ff32245..2709731ba55 100644 --- a/tests/components/habitica/fixtures/rogue_skills_unavailable.json +++ b/tests/components/habitica/fixtures/rogue_skills_unavailable.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 0, "streaks": true, "seafoam": false, @@ -24,35 +24,16 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 0 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_rogue_5", - "armor": "armor_rogue_5", - "head": "head_rogue_5", - "shield": "shield_rogue_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "needsCron": false } } diff --git a/tests/components/habitica/fixtures/rogue_stealth_unavailable.json b/tests/components/habitica/fixtures/rogue_stealth_unavailable.json index 9fd7adcca42..a4e86abbb91 100644 --- a/tests/components/habitica/fixtures/rogue_stealth_unavailable.json +++ b/tests/components/habitica/fixtures/rogue_stealth_unavailable.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 4, "streaks": false, "seafoam": false, @@ -24,35 +24,16 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 0 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_rogue_5", - "armor": "armor_rogue_5", - "head": "head_rogue_5", - "shield": "shield_rogue_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "needsCron": false } } diff --git a/tests/components/habitica/fixtures/tasks.json b/tests/components/habitica/fixtures/tasks.json index 2e8305283d0..0d6ffba0732 100644 --- a/tests/components/habitica/fixtures/tasks.json +++ b/tests/components/habitica/fixtures/tasks.json @@ -121,8 +121,7 @@ "createdAt": "2024-07-07T17:51:53.264Z", "updatedAt": "2024-07-12T09:58:45.438Z", "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "e97659e0-2c42-4599-a7bb-00282adc410d", - "alias": "create_a_task" + "id": "e97659e0-2c42-4599-a7bb-00282adc410d" }, { "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", @@ -455,8 +454,7 @@ "createdAt": "2024-09-21T22:17:19.513Z", "updatedAt": "2024-09-21T22:19:35.576Z", "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "2f6fcabc-f670-4ec3-ba65-817e8deea490", - "alias": "pay_bills" + "id": "2f6fcabc-f670-4ec3-ba65-817e8deea490" }, { "_id": "1aa3137e-ef72-4d1f-91ee-41933602f438", diff --git a/tests/components/habitica/fixtures/user.json b/tests/components/habitica/fixtures/user.json index 569c5b81a02..818f4ed4eda 100644 --- a/tests/components/habitica/fixtures/user.json +++ b/tests/components/habitica/fixtures/user.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,17 +24,12 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 5, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 5 }, "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true @@ -64,20 +59,6 @@ } }, "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z", - "items": { - "gear": { - "equipped": { - "weapon": "weapon_warrior_5", - "armor": "armor_warrior_5", - "head": "head_warrior_5", - "shield": "shield_warrior_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "lastCron": "2024-09-21T22:01:55.586Z" } } diff --git a/tests/components/habitica/fixtures/warrior_fixture.json b/tests/components/habitica/fixtures/warrior_fixture.json index 3517e8a908a..53d18206f9a 100644 --- a/tests/components/habitica/fixtures/warrior_fixture.json +++ b/tests/components/habitica/fixtures/warrior_fixture.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,36 +24,17 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 5, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 5 }, "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z", - "items": { - "gear": { - "equipped": { - "weapon": "weapon_warrior_5", - "armor": "armor_warrior_5", - "head": "head_warrior_5", - "shield": "shield_warrior_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "lastCron": "2024-09-21T22:01:55.586Z" } } diff --git a/tests/components/habitica/fixtures/warrior_skills_unavailable.json b/tests/components/habitica/fixtures/warrior_skills_unavailable.json index b3d33c85d5c..53160646569 100644 --- a/tests/components/habitica/fixtures/warrior_skills_unavailable.json +++ b/tests/components/habitica/fixtures/warrior_skills_unavailable.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,35 +24,16 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 0 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_warrior_5", - "armor": "armor_warrior_5", - "head": "head_warrior_5", - "shield": "shield_warrior_5", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "needsCron": false } } diff --git a/tests/components/habitica/fixtures/wizard_fixture.json b/tests/components/habitica/fixtures/wizard_fixture.json index de596e231de..0f9f2a49639 100644 --- a/tests/components/habitica/fixtures/wizard_fixture.json +++ b/tests/components/habitica/fixtures/wizard_fixture.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,36 +24,17 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 5, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 5 }, "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z", - "items": { - "gear": { - "equipped": { - "weapon": "weapon_wizard_5", - "armor": "armor_wizard_5", - "head": "head_wizard_5", - "shield": "shield_base_0", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "lastCron": "2024-09-21T22:01:55.586Z" } } diff --git a/tests/components/habitica/fixtures/wizard_frost_unavailable.json b/tests/components/habitica/fixtures/wizard_frost_unavailable.json index 31d10fde4b9..ba57568e99e 100644 --- a/tests/components/habitica/fixtures/wizard_frost_unavailable.json +++ b/tests/components/habitica/fixtures/wizard_frost_unavailable.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 0, "streaks": true, "seafoam": false, @@ -24,35 +24,16 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 0 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_wizard_5", - "armor": "armor_wizard_5", - "head": "head_wizard_5", - "shield": "shield_base_0", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "needsCron": false } } diff --git a/tests/components/habitica/fixtures/wizard_skills_unavailable.json b/tests/components/habitica/fixtures/wizard_skills_unavailable.json index f3bdee9dd74..11bf0a19193 100644 --- a/tests/components/habitica/fixtures/wizard_skills_unavailable.json +++ b/tests/components/habitica/fixtures/wizard_skills_unavailable.json @@ -4,10 +4,10 @@ "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 26, - "int": 26, - "per": 26, - "con": 26, + "str": 0, + "int": 0, + "per": 0, + "con": 0, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,35 +24,16 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0, - "str": 15, - "con": 15, - "int": 15, - "per": 15 + "points": 0 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false, - "language": "en" + "disableClasses": false }, "flags": { "classSelected": true }, - "needsCron": false, - "items": { - "gear": { - "equipped": { - "weapon": "weapon_wizard_5", - "armor": "armor_wizard_5", - "head": "head_wizard_5", - "shield": "shield_base_0", - "back": "heroicAureole", - "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", - "eyewear": "plagueDoctorMask", - "body": "aetherAmulet" - } - } - } + "needsCron": false } } diff --git a/tests/components/habitica/snapshots/test_sensor.ambr b/tests/components/habitica/snapshots/test_sensor.ambr index 3a43069bfc4..ee75b424a93 100644 --- a/tests/components/habitica/snapshots/test_sensor.ambr +++ b/tests/components/habitica/snapshots/test_sensor.ambr @@ -59,61 +59,6 @@ 'state': 'wizard', }) # --- -# name: test_sensors[sensor.test_user_constitution-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_constitution', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Constitution', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_constitution', - 'unit_of_measurement': 'CON', - }) -# --- -# name: test_sensors[sensor.test_user_constitution-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'allocated': 15, - 'buffs': 26, - 'class': 0, - 'equipment': 20, - 'friendly_name': 'test-user Constitution', - 'level': 19, - 'unit_of_measurement': 'CON', - }), - 'context': , - 'entity_id': 'sensor.test_user_constitution', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '80', - }) -# --- # name: test_sensors[sensor.test_user_dailies-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -622,61 +567,6 @@ 'state': '0', }) # --- -# name: test_sensors[sensor.test_user_intelligence-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_intelligence', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Intelligence', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_intelligence', - 'unit_of_measurement': 'INT', - }) -# --- -# name: test_sensors[sensor.test_user_intelligence-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'allocated': 15, - 'buffs': 26, - 'class': 0, - 'equipment': 0, - 'friendly_name': 'test-user Intelligence', - 'level': 19, - 'unit_of_measurement': 'INT', - }), - 'context': , - 'entity_id': 'sensor.test_user_intelligence', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60', - }) -# --- # name: test_sensors[sensor.test_user_level-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -964,61 +854,6 @@ 'state': '880', }) # --- -# name: test_sensors[sensor.test_user_perception-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_perception', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Perception', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_perception', - 'unit_of_measurement': 'PER', - }) -# --- -# name: test_sensors[sensor.test_user_perception-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'allocated': 15, - 'buffs': 26, - 'class': 0, - 'equipment': 8, - 'friendly_name': 'test-user Perception', - 'level': 19, - 'unit_of_measurement': 'PER', - }), - 'context': , - 'entity_id': 'sensor.test_user_perception', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '68', - }) -# --- # name: test_sensors[sensor.test_user_rewards-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1080,61 +915,6 @@ 'state': '1', }) # --- -# name: test_sensors[sensor.test_user_strength-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_user_strength', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Strength', - 'platform': 'habitica', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': '00000000-0000-0000-0000-000000000000_strength', - 'unit_of_measurement': 'STR', - }) -# --- -# name: test_sensors[sensor.test_user_strength-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'allocated': 15, - 'buffs': 26, - 'class': 0, - 'equipment': 27, - 'friendly_name': 'test-user Strength', - 'level': 19, - 'unit_of_measurement': 'STR', - }), - 'context': , - 'entity_id': 'sensor.test_user_strength', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '87', - }) -# --- # name: test_sensors[sensor.test_user_to_do_s-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/habitica/test_binary_sensor.py b/tests/components/habitica/test_binary_sensor.py index 1710f8f217e..5b19cd008bf 100644 --- a/tests/components/habitica/test_binary_sensor.py +++ b/tests/components/habitica/test_binary_sensor.py @@ -66,11 +66,7 @@ async def test_pending_quest_states( json=load_json_object_fixture(f"{fixture}.json", DOMAIN), ) aioclient_mock.get(f"{DEFAULT_URL}/api/v3/tasks/user", json={"data": []}) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/content", - params={"language": "en"}, - json=load_json_object_fixture("content.json", DOMAIN), - ) + config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/habitica/test_button.py b/tests/components/habitica/test_button.py index 979cefef923..6bd62f3a58e 100644 --- a/tests/components/habitica/test_button.py +++ b/tests/components/habitica/test_button.py @@ -63,11 +63,6 @@ async def test_buttons( f"{DEFAULT_URL}/api/v3/tasks/user", json=load_json_object_fixture("tasks.json", DOMAIN), ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/content", - params={"language": "en"}, - json=load_json_object_fixture("content.json", DOMAIN), - ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -168,11 +163,6 @@ async def test_button_press( f"{DEFAULT_URL}/api/v3/tasks/user", json=load_json_object_fixture("tasks.json", DOMAIN), ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/content", - params={"language": "en"}, - json=load_json_object_fixture("content.json", DOMAIN), - ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/habitica/test_services.py b/tests/components/habitica/test_services.py deleted file mode 100644 index 403779bcbfb..00000000000 --- a/tests/components/habitica/test_services.py +++ /dev/null @@ -1,548 +0,0 @@ -"""Test Habitica actions.""" - -from collections.abc import Generator -from http import HTTPStatus -from typing import Any -from unittest.mock import patch - -import pytest - -from homeassistant.components.habitica.const import ( - ATTR_CONFIG_ENTRY, - ATTR_DIRECTION, - ATTR_SKILL, - ATTR_TASK, - DEFAULT_URL, - DOMAIN, - SERVICE_ABORT_QUEST, - SERVICE_ACCEPT_QUEST, - SERVICE_CANCEL_QUEST, - SERVICE_CAST_SKILL, - SERVICE_LEAVE_QUEST, - SERVICE_REJECT_QUEST, - SERVICE_SCORE_HABIT, - SERVICE_SCORE_REWARD, - SERVICE_START_QUEST, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError - -from .conftest import mock_called_with - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker - -REQUEST_EXCEPTION_MSG = "Unable to connect to Habitica, try again later" -RATE_LIMIT_EXCEPTION_MSG = "Rate limit exceeded, try again later" - - -@pytest.fixture(autouse=True) -def services_only() -> Generator[None]: - """Enable only services.""" - with patch( - "homeassistant.components.habitica.PLATFORMS", - [], - ): - yield - - -@pytest.fixture(autouse=True) -async def load_entry( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - services_only: Generator, -) -> None: - """Load config entry.""" - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - - -@pytest.mark.parametrize( - ("service_data", "item", "target_id"), - [ - ( - { - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "pickpocket", - }, - "pickPocket", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ( - { - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "backstab", - }, - "backStab", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ( - { - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "fireball", - }, - "fireball", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ( - { - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "smash", - }, - "smash", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ( - { - ATTR_TASK: "Rechnungen bezahlen", - ATTR_SKILL: "smash", - }, - "smash", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ( - { - ATTR_TASK: "pay_bills", - ATTR_SKILL: "smash", - }, - "smash", - "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ), - ], - ids=[ - "cast pickpocket", - "cast backstab", - "cast fireball", - "cast smash", - "select task by name", - "select task_by_alias", - ], -) -async def test_cast_skill( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - service_data: dict[str, Any], - item: str, - target_id: str, -) -> None: - """Test Habitica cast skill action.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", - json={"success": True, "data": {}}, - ) - - await hass.services.async_call( - DOMAIN, - SERVICE_CAST_SKILL, - service_data={ - ATTR_CONFIG_ENTRY: config_entry.entry_id, - **service_data, - }, - return_response=True, - blocking=True, - ) - - assert mock_called_with( - mock_habitica, - "post", - f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", - ) - - -@pytest.mark.parametrize( - ( - "service_data", - "http_status", - "expected_exception", - "expected_exception_msg", - ), - [ - ( - { - ATTR_TASK: "task-not-found", - ATTR_SKILL: "smash", - }, - HTTPStatus.OK, - ServiceValidationError, - "Unable to complete action, could not find the task 'task-not-found'", - ), - ( - { - ATTR_TASK: "Rechnungen bezahlen", - ATTR_SKILL: "smash", - }, - HTTPStatus.TOO_MANY_REQUESTS, - ServiceValidationError, - RATE_LIMIT_EXCEPTION_MSG, - ), - ( - { - ATTR_TASK: "Rechnungen bezahlen", - ATTR_SKILL: "smash", - }, - HTTPStatus.NOT_FOUND, - ServiceValidationError, - "Unable to cast skill, your character does not have the skill or spell smash", - ), - ( - { - ATTR_TASK: "Rechnungen bezahlen", - ATTR_SKILL: "smash", - }, - HTTPStatus.UNAUTHORIZED, - ServiceValidationError, - "Unable to cast skill, not enough mana. Your character has 50 MP, but the skill costs 10 MP", - ), - ( - { - ATTR_TASK: "Rechnungen bezahlen", - ATTR_SKILL: "smash", - }, - HTTPStatus.BAD_REQUEST, - HomeAssistantError, - REQUEST_EXCEPTION_MSG, - ), - ], -) -@pytest.mark.usefixtures("mock_habitica") -async def test_cast_skill_exceptions( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - service_data: dict[str, Any], - http_status: HTTPStatus, - expected_exception: Exception, - expected_exception_msg: str, -) -> None: - """Test Habitica cast skill action exceptions.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/user/class/cast/smash?targetId=2f6fcabc-f670-4ec3-ba65-817e8deea490", - json={"success": True, "data": {}}, - status=http_status, - ) - - with pytest.raises(expected_exception, match=expected_exception_msg): - await hass.services.async_call( - DOMAIN, - SERVICE_CAST_SKILL, - service_data={ - ATTR_CONFIG_ENTRY: config_entry.entry_id, - **service_data, - }, - return_response=True, - blocking=True, - ) - - -@pytest.mark.usefixtures("mock_habitica") -async def test_get_config_entry( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, -) -> None: - """Test Habitica config entry exceptions.""" - - with pytest.raises( - ServiceValidationError, - match="The selected character is not configured in Home Assistant", - ): - await hass.services.async_call( - DOMAIN, - SERVICE_CAST_SKILL, - service_data={ - ATTR_CONFIG_ENTRY: "0000000000000000", - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "smash", - }, - return_response=True, - blocking=True, - ) - - assert await hass.config_entries.async_unload(config_entry.entry_id) - - with pytest.raises( - ServiceValidationError, - match="The selected character is currently not loaded or disabled in Home Assistant", - ): - await hass.services.async_call( - DOMAIN, - SERVICE_CAST_SKILL, - service_data={ - ATTR_CONFIG_ENTRY: config_entry.entry_id, - ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", - ATTR_SKILL: "smash", - }, - return_response=True, - blocking=True, - ) - - -@pytest.mark.parametrize( - ("service", "command"), - [ - (SERVICE_ABORT_QUEST, "abort"), - (SERVICE_ACCEPT_QUEST, "accept"), - (SERVICE_CANCEL_QUEST, "cancel"), - (SERVICE_LEAVE_QUEST, "leave"), - (SERVICE_REJECT_QUEST, "reject"), - (SERVICE_START_QUEST, "force-start"), - ], - ids=[], -) -async def test_handle_quests( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - service: str, - command: str, -) -> None: - """Test Habitica actions for quest handling.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/groups/party/quests/{command}", - json={"success": True, "data": {}}, - ) - - await hass.services.async_call( - DOMAIN, - service, - service_data={ATTR_CONFIG_ENTRY: config_entry.entry_id}, - return_response=True, - blocking=True, - ) - - assert mock_called_with( - mock_habitica, - "post", - f"{DEFAULT_URL}/api/v3/groups/party/quests/{command}", - ) - - -@pytest.mark.parametrize( - ( - "http_status", - "expected_exception", - "expected_exception_msg", - ), - [ - ( - HTTPStatus.TOO_MANY_REQUESTS, - ServiceValidationError, - RATE_LIMIT_EXCEPTION_MSG, - ), - ( - HTTPStatus.NOT_FOUND, - ServiceValidationError, - "Unable to complete action, quest or group not found", - ), - ( - HTTPStatus.UNAUTHORIZED, - ServiceValidationError, - "Action not allowed, only quest leader or group leader can perform this action", - ), - ( - HTTPStatus.BAD_REQUEST, - HomeAssistantError, - REQUEST_EXCEPTION_MSG, - ), - ], -) -@pytest.mark.usefixtures("mock_habitica") -async def test_handle_quests_exceptions( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - http_status: HTTPStatus, - expected_exception: Exception, - expected_exception_msg: str, -) -> None: - """Test Habitica handle quests action exceptions.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/groups/party/quests/accept", - json={"success": True, "data": {}}, - status=http_status, - ) - - with pytest.raises(expected_exception, match=expected_exception_msg): - await hass.services.async_call( - DOMAIN, - SERVICE_ACCEPT_QUEST, - service_data={ATTR_CONFIG_ENTRY: config_entry.entry_id}, - return_response=True, - blocking=True, - ) - - -@pytest.mark.parametrize( - ("service", "service_data", "task_id"), - [ - ( - SERVICE_SCORE_HABIT, - { - ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", - ATTR_DIRECTION: "up", - }, - "e97659e0-2c42-4599-a7bb-00282adc410d", - ), - ( - SERVICE_SCORE_HABIT, - { - ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", - ATTR_DIRECTION: "down", - }, - "e97659e0-2c42-4599-a7bb-00282adc410d", - ), - ( - SERVICE_SCORE_REWARD, - { - ATTR_TASK: "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", - }, - "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", - ), - ( - SERVICE_SCORE_HABIT, - { - ATTR_TASK: "Füge eine Aufgabe zu Habitica hinzu", - ATTR_DIRECTION: "up", - }, - "e97659e0-2c42-4599-a7bb-00282adc410d", - ), - ( - SERVICE_SCORE_HABIT, - { - ATTR_TASK: "create_a_task", - ATTR_DIRECTION: "up", - }, - "e97659e0-2c42-4599-a7bb-00282adc410d", - ), - ], - ids=[ - "habit score up", - "habit score down", - "buy reward", - "match task by name", - "match task by alias", - ], -) -async def test_score_task( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - service: str, - service_data: dict[str, Any], - task_id: str, -) -> None: - """Test Habitica score task action.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/{task_id}/score/{service_data.get(ATTR_DIRECTION, "up")}", - json={"success": True, "data": {}}, - ) - - await hass.services.async_call( - DOMAIN, - service, - service_data={ - ATTR_CONFIG_ENTRY: config_entry.entry_id, - **service_data, - }, - return_response=True, - blocking=True, - ) - - assert mock_called_with( - mock_habitica, - "post", - f"{DEFAULT_URL}/api/v3/tasks/{task_id}/score/{service_data.get(ATTR_DIRECTION, "up")}", - ) - - -@pytest.mark.parametrize( - ( - "service_data", - "http_status", - "expected_exception", - "expected_exception_msg", - ), - [ - ( - { - ATTR_TASK: "task does not exist", - ATTR_DIRECTION: "up", - }, - HTTPStatus.OK, - ServiceValidationError, - "Unable to complete action, could not find the task 'task does not exist'", - ), - ( - { - ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", - ATTR_DIRECTION: "up", - }, - HTTPStatus.TOO_MANY_REQUESTS, - ServiceValidationError, - RATE_LIMIT_EXCEPTION_MSG, - ), - ( - { - ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", - ATTR_DIRECTION: "up", - }, - HTTPStatus.BAD_REQUEST, - HomeAssistantError, - REQUEST_EXCEPTION_MSG, - ), - ( - { - ATTR_TASK: "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", - ATTR_DIRECTION: "up", - }, - HTTPStatus.UNAUTHORIZED, - HomeAssistantError, - "Unable to buy reward, not enough gold. Your character has 137.63 GP, but the reward costs 10 GP", - ), - ], -) -@pytest.mark.usefixtures("mock_habitica") -async def test_score_task_exceptions( - hass: HomeAssistant, - config_entry: MockConfigEntry, - mock_habitica: AiohttpClientMocker, - service_data: dict[str, Any], - http_status: HTTPStatus, - expected_exception: Exception, - expected_exception_msg: str, -) -> None: - """Test Habitica score task action exceptions.""" - - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/e97659e0-2c42-4599-a7bb-00282adc410d/score/up", - json={"success": True, "data": {}}, - status=http_status, - ) - mock_habitica.post( - f"{DEFAULT_URL}/api/v3/tasks/5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b/score/up", - json={"success": True, "data": {}}, - status=http_status, - ) - - with pytest.raises(expected_exception, match=expected_exception_msg): - await hass.services.async_call( - DOMAIN, - SERVICE_SCORE_HABIT, - service_data={ - ATTR_CONFIG_ENTRY: config_entry.entry_id, - **service_data, - }, - return_response=True, - blocking=True, - ) diff --git a/tests/components/habitica/test_todo.py b/tests/components/habitica/test_todo.py index c9a4b3dd37a..88947caba2d 100644 --- a/tests/components/habitica/test_todo.py +++ b/tests/components/habitica/test_todo.py @@ -672,11 +672,6 @@ async def test_next_due_date( f"{DEFAULT_URL}/api/v3/tasks/user", json=load_json_object_fixture(fixture, DOMAIN), ) - aioclient_mock.get( - f"{DEFAULT_URL}/api/v3/content", - params={"language": "en"}, - json=load_json_object_fixture("content.json", DOMAIN), - ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/hassio/test_binary_sensor.py b/tests/components/hassio/test_binary_sensor.py index 9878dd67a21..c97be736248 100644 --- a/tests/components/hassio/test_binary_sensor.py +++ b/tests/components/hassio/test_binary_sensor.py @@ -25,7 +25,6 @@ def mock_all( store_info: AsyncMock, addon_changelog: AsyncMock, addon_stats: AsyncMock, - resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) @@ -141,6 +140,19 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_diagnostics.py b/tests/components/hassio/test_diagnostics.py index c95cde67b8a..c238d9d2a15 100644 --- a/tests/components/hassio/test_diagnostics.py +++ b/tests/components/hassio/test_diagnostics.py @@ -24,7 +24,6 @@ def mock_all( store_info: AsyncMock, addon_stats: AsyncMock, addon_changelog: AsyncMock, - resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) @@ -144,6 +143,19 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_handler.py b/tests/components/hassio/test_handler.py index 56f0dcb706c..e125e09ae7e 100644 --- a/tests/components/hassio/test_handler.py +++ b/tests/components/hassio/test_handler.py @@ -208,7 +208,7 @@ async def test_api_ingress_panels( @pytest.mark.parametrize( ("api_call", "method", "payload"), [ - ("get_network_info", "GET", None), + ("get_resolution_info", "GET", None), ("update_diagnostics", "POST", True), ], ) diff --git a/tests/components/hassio/test_init.py b/tests/components/hassio/test_init.py index 5c11370ae74..23259543478 100644 --- a/tests/components/hassio/test_init.py +++ b/tests/components/hassio/test_init.py @@ -67,7 +67,6 @@ def mock_all( addon_info: AsyncMock, addon_stats: AsyncMock, addon_changelog: AsyncMock, - resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) @@ -205,6 +204,19 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_issues.py b/tests/components/hassio/test_issues.py index 7ce11a18fb5..1a3d3d83f95 100644 --- a/tests/components/hassio/test_issues.py +++ b/tests/components/hassio/test_issues.py @@ -4,28 +4,11 @@ from __future__ import annotations from collections.abc import Generator from datetime import timedelta +from http import HTTPStatus import os from typing import Any -from unittest.mock import ANY, AsyncMock, patch -from uuid import UUID, uuid4 +from unittest.mock import ANY, patch -from aiohasupervisor import ( - SupervisorBadRequestError, - SupervisorError, - SupervisorTimeoutError, -) -from aiohasupervisor.models import ( - Check, - CheckType, - ContextType, - Issue, - IssueType, - ResolutionInfo, - Suggestion, - SuggestionType, - UnhealthyReason, - UnsupportedReason, -) from freezegun.api import FrozenDateTimeFactory import pytest @@ -35,6 +18,7 @@ from homeassistant.setup import async_setup_component from .test_init import MOCK_ENVIRON +from tests.test_util.aiohttp import AiohttpClientMocker, AiohttpClientMockResponse from tests.typing import WebSocketGenerator @@ -52,41 +36,49 @@ def fixture_supervisor_environ() -> Generator[None]: def mock_resolution_info( - supervisor_client: AsyncMock, - unsupported: list[UnsupportedReason] | None = None, - unhealthy: list[UnhealthyReason] | None = None, - issues: list[Issue] | None = None, - suggestions_by_issue: dict[UUID, list[Suggestion]] | None = None, - suggestion_result: SupervisorError | None = None, + aioclient_mock: AiohttpClientMocker, + unsupported: list[str] | None = None, + unhealthy: list[str] | None = None, + issues: list[dict[str, str]] | None = None, + suggestion_result: str = "ok", ) -> None: """Mock resolution/info endpoint with unsupported/unhealthy reasons and/or issues.""" - supervisor_client.resolution.info.return_value = ResolutionInfo( - unsupported=unsupported or [], - unhealthy=unhealthy or [], - issues=issues or [], - suggestions=[ - suggestion - for issue_list in suggestions_by_issue.values() - for suggestion in issue_list - ] - if suggestions_by_issue - else [], - checks=[ - Check(enabled=True, slug=CheckType.SUPERVISOR_TRUST), - Check(enabled=True, slug=CheckType.FREE_SPACE), - ], + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": unsupported or [], + "unhealthy": unhealthy or [], + "suggestions": [], + "issues": [ + {k: v for k, v in issue.items() if k != "suggestions"} + for issue in issues + ] + if issues + else [], + "checks": [ + {"enabled": True, "slug": "supervisor_trust"}, + {"enabled": True, "slug": "free_space"}, + ], + }, + }, ) - if suggestions_by_issue: - - async def mock_suggestions_for_issue(uuid: UUID) -> list[Suggestion]: - """Mock of suggestions for issue api.""" - return suggestions_by_issue.get(uuid, []) - - supervisor_client.resolution.suggestions_for_issue.side_effect = ( - mock_suggestions_for_issue - ) - supervisor_client.resolution.apply_suggestion.side_effect = suggestion_result + if issues: + suggestions_by_issue = { + issue["uuid"]: issue.get("suggestions", []) for issue in issues + } + for issue_uuid, suggestions in suggestions_by_issue.items(): + aioclient_mock.get( + f"http://127.0.0.1/resolution/issue/{issue_uuid}/suggestions", + json={"result": "ok", "data": {"suggestions": suggestions}}, + ) + for suggestion in suggestions: + aioclient_mock.post( + f"http://127.0.0.1/resolution/suggestion/{suggestion['uuid']}", + json={"result": suggestion_result}, + ) def assert_repair_in_list( @@ -142,13 +134,11 @@ def assert_issue_repair_in_list( @pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Test issues added for unhealthy systems.""" - mock_resolution_info( - supervisor_client, unhealthy=[UnhealthyReason.DOCKER, UnhealthyReason.SETUP] - ) + mock_resolution_info(aioclient_mock, unhealthy=["docker", "setup"]) result = await async_setup_component(hass, "hassio", {}) assert result @@ -166,14 +156,11 @@ async def test_unhealthy_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Test issues added for unsupported systems.""" - mock_resolution_info( - supervisor_client, - unsupported=[UnsupportedReason.CONTENT_TRUST, UnsupportedReason.OS], - ) + mock_resolution_info(aioclient_mock, unsupported=["content_trust", "os"]) result = await async_setup_component(hass, "hassio", {}) assert result @@ -193,11 +180,11 @@ async def test_unsupported_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues_add_remove( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Test unhealthy issues added and removed from dispatches.""" - mock_resolution_info(supervisor_client) + mock_resolution_info(aioclient_mock) result = await async_setup_component(hass, "hassio", {}) assert result @@ -250,11 +237,11 @@ async def test_unhealthy_issues_add_remove( @pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues_add_remove( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Test unsupported issues added and removed from dispatches.""" - mock_resolution_info(supervisor_client) + mock_resolution_info(aioclient_mock) result = await async_setup_component(hass, "hassio", {}) assert result @@ -307,21 +294,21 @@ async def test_unsupported_issues_add_remove( @pytest.mark.usefixtures("all_setup_requests") async def test_reset_issues_supervisor_restart( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """All issues reset on supervisor restart.""" mock_resolution_info( - supervisor_client, - unsupported=[UnsupportedReason.OS], - unhealthy=[UnhealthyReason.DOCKER], + aioclient_mock, + unsupported=["os"], + unhealthy=["docker"], issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=(uuid := uuid4()), - ) + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + } ], ) @@ -338,14 +325,15 @@ async def test_reset_issues_supervisor_restart( assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") assert_issue_repair_in_list( msg["result"]["issues"], - uuid=uuid.hex, + uuid="1234", context="system", type_="reboot_required", fixable=False, reference=None, ) - mock_resolution_info(supervisor_client) + aioclient_mock.clear_requests() + mock_resolution_info(aioclient_mock) await client.send_json( { "id": 2, @@ -370,15 +358,11 @@ async def test_reset_issues_supervisor_restart( @pytest.mark.usefixtures("all_setup_requests") async def test_reasons_added_and_removed( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Test an unsupported/unhealthy reasons being added and removed at same time.""" - mock_resolution_info( - supervisor_client, - unsupported=[UnsupportedReason.OS], - unhealthy=[UnhealthyReason.DOCKER], - ) + mock_resolution_info(aioclient_mock, unsupported=["os"], unhealthy=["docker"]) result = await async_setup_component(hass, "hassio", {}) assert result @@ -392,10 +376,9 @@ async def test_reasons_added_and_removed( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="docker") assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") + aioclient_mock.clear_requests() mock_resolution_info( - supervisor_client, - unsupported=[UnsupportedReason.CONTENT_TRUST], - unhealthy=[UnhealthyReason.SETUP], + aioclient_mock, unsupported=["content_trust"], unhealthy=["setup"] ) await client.send_json( { @@ -425,14 +408,12 @@ async def test_reasons_added_and_removed( @pytest.mark.usefixtures("all_setup_requests") async def test_ignored_unsupported_skipped( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Unsupported reasons which have an identical unhealthy reason are ignored.""" mock_resolution_info( - supervisor_client, - unsupported=[UnsupportedReason.PRIVILEGED], - unhealthy=[UnhealthyReason.PRIVILEGED], + aioclient_mock, unsupported=["privileged"], unhealthy=["privileged"] ) result = await async_setup_component(hass, "hassio", {}) @@ -450,14 +431,12 @@ async def test_ignored_unsupported_skipped( @pytest.mark.usefixtures("all_setup_requests") async def test_new_unsupported_unhealthy_reason( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """New unsupported/unhealthy reasons result in a generic repair until next core update.""" mock_resolution_info( - supervisor_client, - unsupported=["fake_unsupported"], - unhealthy=["fake_unhealthy"], + aioclient_mock, unsupported=["fake_unsupported"], unhealthy=["fake_unhealthy"] ) result = await async_setup_component(hass, "hassio", {}) @@ -502,43 +481,40 @@ async def test_new_unsupported_unhealthy_reason( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Test repairs added for supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=(uuid_issue1 := uuid4()), - ), - Issue( - type=IssueType.MULTIPLE_DATA_DISKS, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=(uuid_issue2 := uuid4()), - ), - Issue( - type="should_not_be_repair", - context=ContextType.OS, - reference=None, - uuid=uuid4(), - ), + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + }, + { + "uuid": "1235", + "type": "multiple_data_disks", + "context": "system", + "reference": "/dev/sda1", + "suggestions": [ + { + "uuid": "1236", + "type": "rename_data_disk", + "context": "system", + "reference": "/dev/sda1", + } + ], + }, + { + "uuid": "1237", + "type": "should_not_be_repair", + "context": "os", + "reference": None, + }, ], - suggestions_by_issue={ - uuid_issue2: [ - Suggestion( - type=SuggestionType.RENAME_DATA_DISK, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=uuid4(), - auto=False, - ) - ] - }, ) result = await async_setup_component(hass, "hassio", {}) @@ -552,7 +528,7 @@ async def test_supervisor_issues( assert len(msg["result"]["issues"]) == 2 assert_issue_repair_in_list( msg["result"]["issues"], - uuid=uuid_issue1.hex, + uuid="1234", context="system", type_="reboot_required", fixable=False, @@ -560,7 +536,7 @@ async def test_supervisor_issues( ) assert_issue_repair_in_list( msg["result"]["issues"], - uuid=uuid_issue2.hex, + uuid="1235", context="system", type_="multiple_data_disks", fixable=True, @@ -571,33 +547,61 @@ async def test_supervisor_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_initial_failure( hass: HomeAssistant, - resolution_info: AsyncMock, - resolution_suggestions_for_issue: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, ) -> None: """Test issues manager retries after initial update failure.""" - resolution_info.side_effect = [ - SupervisorBadRequestError("System is not ready with state: setup"), - ResolutionInfo( - unsupported=[], - unhealthy=[], - suggestions=[], - issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=uuid4(), - ) - ], - checks=[ - Check(enabled=True, slug=CheckType.SUPERVISOR_TRUST), - Check(enabled=True, slug=CheckType.FREE_SPACE), - ], + responses = [ + AiohttpClientMockResponse( + method="get", + url="http://127.0.0.1/resolution/info", + status=HTTPStatus.BAD_REQUEST, + json={ + "result": "error", + "message": "System is not ready with state: setup", + }, + ), + AiohttpClientMockResponse( + method="get", + url="http://127.0.0.1/resolution/info", + status=HTTPStatus.OK, + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [ + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + }, + ], + "checks": [ + {"enabled": True, "slug": "supervisor_trust"}, + {"enabled": True, "slug": "free_space"}, + ], + }, + }, ), ] + async def mock_responses(*args): + nonlocal responses + return responses.pop(0) + + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + side_effect=mock_responses, + ) + aioclient_mock.get( + "http://127.0.0.1/resolution/issue/1234/suggestions", + json={"result": "ok", "data": {"suggestions": []}}, + ) + with patch("homeassistant.components.hassio.issues.REQUEST_REFRESH_DELAY", new=0.1): result = await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -621,11 +625,11 @@ async def test_supervisor_issues_initial_failure( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_add_remove( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Test supervisor issues added and removed from dispatches.""" - mock_resolution_info(supervisor_client) + mock_resolution_info(aioclient_mock) result = await async_setup_component(hass, "hassio", {}) assert result @@ -639,7 +643,7 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_changed", "data": { - "uuid": (issue_uuid := uuid4().hex), + "uuid": "1234", "type": "reboot_required", "context": "system", "reference": None, @@ -657,7 +661,7 @@ async def test_supervisor_issues_add_remove( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid=issue_uuid, + uuid="1234", context="system", type_="reboot_required", fixable=False, @@ -671,13 +675,13 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_changed", "data": { - "uuid": issue_uuid, + "uuid": "1234", "type": "reboot_required", "context": "system", "reference": None, "suggestions": [ { - "uuid": uuid4().hex, + "uuid": "1235", "type": "execute_reboot", "context": "system", "reference": None, @@ -697,7 +701,7 @@ async def test_supervisor_issues_add_remove( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid=issue_uuid, + uuid="1234", context="system", type_="reboot_required", fixable=True, @@ -711,7 +715,7 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_removed", "data": { - "uuid": issue_uuid, + "uuid": "1234", "type": "reboot_required", "context": "system", "reference": None, @@ -732,23 +736,37 @@ async def test_supervisor_issues_add_remove( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_suggestions_fail( hass: HomeAssistant, - supervisor_client: AsyncMock, - resolution_suggestions_for_issue: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Test failing to get suggestions for issue skips it.""" - mock_resolution_info( - supervisor_client, - issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=uuid4(), - ) - ], + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [ + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + } + ], + "checks": [ + {"enabled": True, "slug": "supervisor_trust"}, + {"enabled": True, "slug": "free_space"}, + ], + }, + }, + ) + aioclient_mock.get( + "http://127.0.0.1/resolution/issue/1234/suggestions", + exc=TimeoutError(), ) - resolution_suggestions_for_issue.side_effect = SupervisorTimeoutError result = await async_setup_component(hass, "hassio", {}) assert result @@ -764,11 +782,11 @@ async def test_supervisor_issues_suggestions_fail( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_remove_missing_issue_without_error( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Test HA skips message to remove issue that it didn't know about (sync issue).""" - mock_resolution_info(supervisor_client) + mock_resolution_info(aioclient_mock) result = await async_setup_component(hass, "hassio", {}) assert result @@ -798,12 +816,16 @@ async def test_supervisor_remove_missing_issue_without_error( @pytest.mark.usefixtures("all_setup_requests") async def test_system_is_not_ready( hass: HomeAssistant, - resolution_info: AsyncMock, + aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, ) -> None: """Ensure hassio starts despite error.""" - resolution_info.side_effect = SupervisorBadRequestError( - "System is not ready with state: setup" + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "", + "message": "System is not ready with state: setup", + }, ) assert await async_setup_component(hass, "hassio", {}) @@ -816,11 +838,11 @@ async def test_system_is_not_ready( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_detached_addon_missing( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, ) -> None: """Test supervisor issue for detached addon due to missing repository.""" - mock_resolution_info(supervisor_client) + mock_resolution_info(aioclient_mock) result = await async_setup_component(hass, "hassio", {}) assert result @@ -834,7 +856,7 @@ async def test_supervisor_issues_detached_addon_missing( "data": { "event": "issue_changed", "data": { - "uuid": (issue_uuid := uuid4().hex), + "uuid": "1234", "type": "detached_addon_missing", "context": "addon", "reference": "test", @@ -852,7 +874,7 @@ async def test_supervisor_issues_detached_addon_missing( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid=issue_uuid, + uuid="1234", context="addon", type_="detached_addon_missing", fixable=False, diff --git a/tests/components/hassio/test_repairs.py b/tests/components/hassio/test_repairs.py index f8cac4e1a97..f3ccb5948f1 100644 --- a/tests/components/hassio/test_repairs.py +++ b/tests/components/hassio/test_repairs.py @@ -3,17 +3,8 @@ from collections.abc import Generator from http import HTTPStatus import os -from unittest.mock import AsyncMock, patch -from uuid import uuid4 +from unittest.mock import patch -from aiohasupervisor import SupervisorError -from aiohasupervisor.models import ( - ContextType, - Issue, - IssueType, - Suggestion, - SuggestionType, -) import pytest from homeassistant.core import HomeAssistant @@ -23,6 +14,7 @@ from homeassistant.setup import async_setup_component from .test_init import MOCK_ENVIRON from .test_issues import mock_resolution_info +from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -36,39 +28,34 @@ def fixture_supervisor_environ() -> Generator[None]: @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.MULTIPLE_DATA_DISKS, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "multiple_data_disks", + "context": "system", + "reference": "/dev/sda1", + "suggestions": [ + { + "uuid": "1235", + "type": "rename_data_disk", + "context": "system", + "reference": "/dev/sda1", + } + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.RENAME_DATA_DISK, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=(sugg_uuid := uuid4()), - auto=False, - ) - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -108,53 +95,52 @@ async def test_supervisor_issue_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue with multiple suggestions.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference="test", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": "test", + "suggestions": [ + { + "uuid": "1235", + "type": "execute_reboot", + "context": "system", + "reference": "test", + }, + { + "uuid": "1236", + "type": "test_type", + "context": "system", + "reference": "test", + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBOOT, - context=ContextType.SYSTEM, - reference="test", - uuid=uuid4(), - auto=False, - ), - Suggestion( - type="test_type", - context=ContextType.SYSTEM, - reference="test", - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -203,53 +189,52 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1236" + ) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confirmation( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue with multiple suggestions and choice requires confirmation.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + "suggestions": [ + { + "uuid": "1235", + "type": "execute_reboot", + "context": "system", + "reference": None, + }, + { + "uuid": "1236", + "type": "test_type", + "context": "system", + "reference": None, + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBOOT, - context=ContextType.SYSTEM, - reference=None, - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - Suggestion( - type="test_type", - context=ContextType.SYSTEM, - reference=None, - uuid=uuid4(), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -317,46 +302,46 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confir "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_skip_confirmation( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test confirmation skipped for fix flow for supervisor issue with one suggestion.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.REBOOT_REQUIRED, - context=ContextType.SYSTEM, - reference=None, - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "reboot_required", + "context": "system", + "reference": None, + "suggestions": [ + { + "uuid": "1235", + "type": "execute_reboot", + "context": "system", + "reference": None, + } + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBOOT, - context=ContextType.SYSTEM, - reference=None, - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -396,54 +381,53 @@ async def test_supervisor_issue_repair_flow_skip_confirmation( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) @pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow_error( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test repair flow fails when repair fails to apply.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.MOUNT_FAILED, - context=ContextType.MOUNT, - reference="backup_share", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "mount_failed", + "context": "mount", + "reference": "backup_share", + "suggestions": [ + { + "uuid": "1235", + "type": "execute_reload", + "context": "mount", + "reference": "backup_share", + }, + { + "uuid": "1236", + "type": "execute_remove", + "context": "mount", + "reference": "backup_share", + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_RELOAD, - context=ContextType.MOUNT, - reference="backup_share", - uuid=uuid4(), - auto=False, - ), - Suggestion( - type=SuggestionType.EXECUTE_REMOVE, - context=ContextType.MOUNT, - reference="backup_share", - uuid=uuid4(), - auto=False, - ), - ] - }, - suggestion_result=SupervisorError("boom"), + suggestion_result=False, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -475,52 +459,46 @@ async def test_mount_failed_repair_flow_error( "description_placeholders": None, } - assert issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + assert issue_registry.async_get_issue(domain="hassio", issue_id="1234") @pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test repair flow for mount_failed issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.MOUNT_FAILED, - context=ContextType.MOUNT, - reference="backup_share", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "mount_failed", + "context": "mount", + "reference": "backup_share", + "suggestions": [ + { + "uuid": "1235", + "type": "execute_reload", + "context": "mount", + "reference": "backup_share", + }, + { + "uuid": "1236", + "type": "execute_remove", + "context": "mount", + "reference": "backup_share", + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_RELOAD, - context=ContextType.MOUNT, - reference="backup_share", - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - Suggestion( - type=SuggestionType.EXECUTE_REMOVE, - context=ContextType.MOUNT, - reference="backup_share", - uuid=uuid4(), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -573,8 +551,13 @@ async def test_mount_failed_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) @pytest.mark.parametrize( @@ -583,69 +566,62 @@ async def test_mount_failed_repair_flow( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_docker_config_repair_flow( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.DOCKER_CONFIG, - context=ContextType.SYSTEM, - reference=None, - uuid=(issue1_uuid := uuid4()), - ), - Issue( - type=IssueType.DOCKER_CONFIG, - context=ContextType.CORE, - reference=None, - uuid=(issue2_uuid := uuid4()), - ), - Issue( - type=IssueType.DOCKER_CONFIG, - context=ContextType.ADDON, - reference="test", - uuid=(issue3_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "docker_config", + "context": "system", + "reference": None, + "suggestions": [ + { + "uuid": "1235", + "type": "execute_rebuild", + "context": "system", + "reference": None, + } + ], + }, + { + "uuid": "1236", + "type": "docker_config", + "context": "core", + "reference": None, + "suggestions": [ + { + "uuid": "1237", + "type": "execute_rebuild", + "context": "core", + "reference": None, + } + ], + }, + { + "uuid": "1238", + "type": "docker_config", + "context": "addon", + "reference": "test", + "suggestions": [ + { + "uuid": "1239", + "type": "execute_rebuild", + "context": "addon", + "reference": "test", + } + ], + }, ], - suggestions_by_issue={ - issue1_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBUILD, - context=ContextType.SYSTEM, - reference=None, - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - ], - issue2_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBUILD, - context=ContextType.CORE, - reference=None, - uuid=uuid4(), - auto=False, - ), - ], - issue3_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REBUILD, - context=ContextType.ADDON, - reference="test", - uuid=uuid4(), - auto=False, - ), - ], - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue1_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -685,53 +661,52 @@ async def test_supervisor_issue_docker_config_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue1_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_multiple_data_disks( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for multiple data disks supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.MULTIPLE_DATA_DISKS, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "multiple_data_disks", + "context": "system", + "reference": "/dev/sda1", + "suggestions": [ + { + "uuid": "1235", + "type": "rename_data_disk", + "context": "system", + "reference": "/dev/sda1", + }, + { + "uuid": "1236", + "type": "adopt_data_disk", + "context": "system", + "reference": "/dev/sda1", + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.RENAME_DATA_DISK, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=uuid4(), - auto=False, - ), - Suggestion( - type=SuggestionType.ADOPT_DATA_DISK, - context=ContextType.SYSTEM, - reference="/dev/sda1", - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -799,8 +774,13 @@ async def test_supervisor_issue_repair_flow_multiple_data_disks( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1236" + ) @pytest.mark.parametrize( @@ -809,39 +789,34 @@ async def test_supervisor_issue_repair_flow_multiple_data_disks( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_detached_addon_removed( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type=IssueType.DETACHED_ADDON_REMOVED, - context=ContextType.ADDON, - reference="test", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "detached_addon_removed", + "context": "addon", + "reference": "test", + "suggestions": [ + { + "uuid": "1235", + "type": "execute_remove", + "context": "addon", + "reference": "test", + } + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type=SuggestionType.EXECUTE_REMOVE, - context=ContextType.ADDON, - reference="test", - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -886,8 +861,13 @@ async def test_supervisor_issue_detached_addon_removed( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) @pytest.mark.parametrize( @@ -896,46 +876,40 @@ async def test_supervisor_issue_detached_addon_removed( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_addon_boot_fail( hass: HomeAssistant, - supervisor_client: AsyncMock, + aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - supervisor_client, + aioclient_mock, issues=[ - Issue( - type="boot_fail", - context=ContextType.ADDON, - reference="test", - uuid=(issue_uuid := uuid4()), - ), + { + "uuid": "1234", + "type": "boot_fail", + "context": "addon", + "reference": "test", + "suggestions": [ + { + "uuid": "1235", + "type": "execute_start", + "context": "addon", + "reference": "test", + }, + { + "uuid": "1236", + "type": "disable_boot", + "context": "addon", + "reference": "test", + }, + ], + }, ], - suggestions_by_issue={ - issue_uuid: [ - Suggestion( - type="execute_start", - context=ContextType.ADDON, - reference="test", - uuid=(sugg_uuid := uuid4()), - auto=False, - ), - Suggestion( - type="disable_boot", - context=ContextType.ADDON, - reference="test", - uuid=uuid4(), - auto=False, - ), - ] - }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue( - domain="hassio", issue_id=issue_uuid.hex - ) + repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") assert repair_issue client = await hass_client() @@ -988,5 +962,10 @@ async def test_supervisor_issue_addon_boot_fail( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) - supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) + assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") + + assert aioclient_mock.mock_calls[-1][0] == "post" + assert ( + str(aioclient_mock.mock_calls[-1][1]) + == "http://127.0.0.1/resolution/suggestion/1235" + ) diff --git a/tests/components/hassio/test_sensor.py b/tests/components/hassio/test_sensor.py index 7160a2cbf16..1b58534d52f 100644 --- a/tests/components/hassio/test_sensor.py +++ b/tests/components/hassio/test_sensor.py @@ -33,7 +33,6 @@ def mock_all( store_info: AsyncMock, addon_stats: AsyncMock, addon_changelog: AsyncMock, - resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" _install_default_mocks(aioclient_mock) @@ -147,6 +146,19 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_update.py b/tests/components/hassio/test_update.py index c1775d6e0b4..0d15eac48c5 100644 --- a/tests/components/hassio/test_update.py +++ b/tests/components/hassio/test_update.py @@ -29,7 +29,6 @@ def mock_all( store_info: AsyncMock, addon_stats: AsyncMock, addon_changelog: AsyncMock, - resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) @@ -150,6 +149,19 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_websocket_api.py b/tests/components/hassio/test_websocket_api.py index 21e6b03678b..1023baa89df 100644 --- a/tests/components/hassio/test_websocket_api.py +++ b/tests/components/hassio/test_websocket_api.py @@ -26,9 +26,7 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) def mock_all( - aioclient_mock: AiohttpClientMocker, - supervisor_is_connected: AsyncMock, - resolution_info: AsyncMock, + aioclient_mock: AiohttpClientMocker, supervisor_is_connected: AsyncMock ) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) @@ -69,6 +67,19 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) @pytest.mark.usefixtures("hassio_env") diff --git a/tests/components/home_connect/test_binary_sensor.py b/tests/components/home_connect/test_binary_sensor.py index b564b003af6..9b3e6e8bd02 100644 --- a/tests/components/home_connect/test_binary_sensor.py +++ b/tests/components/home_connect/test_binary_sensor.py @@ -152,7 +152,6 @@ async def test_create_issue( """Test we create an issue when an automation or script is using a deprecated entity.""" entity_id = "binary_sensor.washer_door" get_appliances.return_value = [appliance] - issue_id = f"deprecated_binary_common_door_sensor_{entity_id}" assert await async_setup_component( hass, @@ -197,11 +196,6 @@ async def test_create_issue( assert scripts_with_entity(hass, entity_id)[0] == "script.test" assert len(issue_registry.issues) == 1 - assert issue_registry.async_get_issue(DOMAIN, issue_id) - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue(DOMAIN, issue_id) - assert len(issue_registry.issues) == 0 + assert issue_registry.async_get_issue( + DOMAIN, f"deprecated_binary_common_door_sensor_{entity_id}" + ) diff --git a/tests/components/homekit/test_type_security_systems.py b/tests/components/homekit/test_type_security_systems.py index 94b0e68e76d..8377d847a7a 100644 --- a/tests/components/homekit/test_type_security_systems.py +++ b/tests/components/homekit/test_type_security_systems.py @@ -10,12 +10,7 @@ from homeassistant.components.alarm_control_panel import ( ) from homeassistant.components.homekit.const import ATTR_VALUE from homeassistant.components.homekit.type_security_systems import SecuritySystem -from homeassistant.const import ( - ATTR_CODE, - ATTR_ENTITY_ID, - STATE_UNAVAILABLE, - STATE_UNKNOWN, -) +from homeassistant.const import ATTR_CODE, ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service @@ -312,33 +307,3 @@ async def test_supported_states(hass: HomeAssistant, hk_driver) -> None: for val in valid_target_values.values(): assert val in test_config.get("target_values") - - -@pytest.mark.parametrize( - ("state"), - [ - (None), - ("None"), - (STATE_UNKNOWN), - (STATE_UNAVAILABLE), - ], -) -async def test_handle_non_alarm_states( - hass: HomeAssistant, hk_driver, events: list[Event], state: str -) -> None: - """Test we can handle states that should not raise.""" - code = "1234" - config = {ATTR_CODE: code} - entity_id = "alarm_control_panel.test" - - hass.states.async_set(entity_id, state) - await hass.async_block_till_done() - acc = SecuritySystem(hass, hk_driver, "SecuritySystem", entity_id, 2, config) - acc.run() - await hass.async_block_till_done() - - assert acc.aid == 2 - assert acc.category == 11 # AlarmSystem - - assert acc.char_current_state.value == 3 - assert acc.char_target_state.value == 3 diff --git a/tests/components/http/test_ban.py b/tests/components/http/test_ban.py index 59011de0cfd..7ffd0263157 100644 --- a/tests/components/http/test_ban.py +++ b/tests/components/http/test_ban.py @@ -197,7 +197,6 @@ async def test_access_from_supervisor_ip( hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, hassio_env, - resolution_info: AsyncMock, ) -> None: """Test accessing to server from supervisor IP.""" app = web.Application() @@ -219,7 +218,17 @@ async def test_access_from_supervisor_ip( manager = app[KEY_BAN_MANAGER] - assert await async_setup_component(hass, "hassio", {"hassio": {}}) + with patch( + "homeassistant.components.hassio.HassIO.get_resolution_info", + return_value={ + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + ): + assert await async_setup_component(hass, "hassio", {"hassio": {}}) m_open = mock_open() diff --git a/tests/components/husqvarna_automower/conftest.py b/tests/components/husqvarna_automower/conftest.py index 0202cec05b9..2814e1558d1 100644 --- a/tests/components/husqvarna_automower/conftest.py +++ b/tests/components/husqvarna_automower/conftest.py @@ -1,6 +1,5 @@ """Test helpers for Husqvarna Automower.""" -import asyncio from collections.abc import Generator import time from unittest.mock import AsyncMock, patch @@ -102,17 +101,10 @@ async def setup_credentials(hass: HomeAssistant) -> None: def mock_automower_client(values) -> Generator[AsyncMock]: """Mock a Husqvarna Automower client.""" - async def listen() -> None: - """Mock listen.""" - listen_block = asyncio.Event() - await listen_block.wait() - pytest.fail("Listen was not cancelled!") - mock = AsyncMock(spec=AutomowerSession) mock.auth = AsyncMock(side_effect=ClientWebSocketResponse) mock.commands = AsyncMock(spec_set=_MowerCommands) mock.get_status.return_value = values - mock.start_listening = AsyncMock(side_effect=listen) with patch( "homeassistant.components.husqvarna_automower.AutomowerSession", diff --git a/tests/components/husqvarna_automower/test_init.py b/tests/components/husqvarna_automower/test_init.py index ae688571d2c..ca0c2a04af1 100644 --- a/tests/components/husqvarna_automower/test_init.py +++ b/tests/components/husqvarna_automower/test_init.py @@ -1,16 +1,14 @@ """Tests for init module.""" -from asyncio import Event -from datetime import datetime +from datetime import datetime, timedelta import http import time -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock from aioautomower.exceptions import ( ApiException, AuthException, HusqvarnaWSServerHandshakeError, - TimeoutException, ) from aioautomower.model import MowerAttributes, WorkArea from freezegun.api import FrozenDateTimeFactory @@ -129,77 +127,28 @@ async def test_update_failed( assert entry.state is entry_state -@patch( - "homeassistant.components.husqvarna_automower.coordinator.DEFAULT_RECONNECT_TIME", 0 -) -@pytest.mark.parametrize( - ("method_path", "exception", "error_msg"), - [ - ( - ["auth", "websocket_connect"], - HusqvarnaWSServerHandshakeError, - "Failed to connect to websocket.", - ), - ( - ["start_listening"], - TimeoutException, - "Failed to listen to websocket.", - ), - ], -) async def test_websocket_not_available( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, caplog: pytest.LogCaptureFixture, freezer: FrozenDateTimeFactory, - method_path: list[str], - exception: type[Exception], - error_msg: str, ) -> None: - """Test trying to reload the websocket.""" - calls = [] - mock_called = Event() - mock_stall = Event() - - async def mock_function(): - mock_called.set() - await mock_stall.wait() - # Raise the first time the method is awaited - if not calls: - calls.append(None) - raise exception("Boom") - if mock_side_effect: - await mock_side_effect() - - # Find the method to mock - mock = mock_automower_client - for itm in method_path: - mock = getattr(mock, itm) - mock_side_effect = mock.side_effect - mock.side_effect = mock_function - - # Setup integration and verify log error message + """Test trying reload the websocket.""" + mock_automower_client.start_listening.side_effect = HusqvarnaWSServerHandshakeError( + "Boom" + ) await setup_integration(hass, mock_config_entry) - await mock_called.wait() - mock_called.clear() - # Allow the exception to be raised - mock_stall.set() - assert mock.call_count == 1 + assert "Failed to connect to websocket. Trying to reconnect: Boom" in caplog.text + assert mock_automower_client.auth.websocket_connect.call_count == 1 + assert mock_automower_client.start_listening.call_count == 1 + assert mock_config_entry.state is ConfigEntryState.LOADED + freezer.tick(timedelta(seconds=2)) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert f"{error_msg} Trying to reconnect: Boom" in caplog.text - - # Simulate a successful connection - caplog.clear() - await mock_called.wait() - mock_called.clear() - await hass.async_block_till_done() - assert mock.call_count == 2 - assert "Trying to reconnect: Boom" not in caplog.text - - # Simulate hass shutting down - await hass.async_stop() - assert mock.call_count == 2 + assert mock_automower_client.auth.websocket_connect.call_count == 2 + assert mock_automower_client.start_listening.call_count == 2 + assert mock_config_entry.state is ConfigEntryState.LOADED async def test_device_info( diff --git a/tests/components/jewish_calendar/test_config_flow.py b/tests/components/jewish_calendar/test_config_flow.py index e00fe41749f..dbd4ecd802d 100644 --- a/tests/components/jewish_calendar/test_config_flow.py +++ b/tests/components/jewish_calendar/test_config_flow.py @@ -2,6 +2,8 @@ from unittest.mock import AsyncMock +import pytest + from homeassistant import config_entries, setup from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, @@ -18,10 +20,12 @@ from homeassistant.const import ( CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE, + CONF_NAME, CONF_TIME_ZONE, ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -55,6 +59,51 @@ async def test_step_user(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No assert entries[0].data[CONF_TIME_ZONE] == hass.config.time_zone +@pytest.mark.parametrize("diaspora", [True, False]) +@pytest.mark.parametrize("language", ["hebrew", "english"]) +async def test_import_no_options(hass: HomeAssistant, language, diaspora) -> None: + """Test that the import step works.""" + conf = { + DOMAIN: {CONF_NAME: "test", CONF_LANGUAGE: language, CONF_DIASPORA: diaspora} + } + + assert await async_setup_component(hass, DOMAIN, conf.copy()) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert CONF_LANGUAGE in entries[0].data + assert CONF_DIASPORA in entries[0].data + for entry_key, entry_val in entries[0].data.items(): + assert entry_val == conf[DOMAIN][entry_key] + + +async def test_import_with_options(hass: HomeAssistant) -> None: + """Test that the import step works.""" + conf = { + DOMAIN: { + CONF_NAME: "test", + CONF_DIASPORA: DEFAULT_DIASPORA, + CONF_LANGUAGE: DEFAULT_LANGUAGE, + CONF_CANDLE_LIGHT_MINUTES: 20, + CONF_HAVDALAH_OFFSET_MINUTES: 50, + CONF_LATITUDE: 31.76, + CONF_LONGITUDE: 35.235, + } + } + + # Simulate HomeAssistant setting up the component + assert await async_setup_component(hass, DOMAIN, conf.copy()) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + for entry_key, entry_val in entries[0].data.items(): + assert entry_val == conf[DOMAIN][entry_key] + for entry_key, entry_val in entries[0].options.items(): + assert entry_val == conf[DOMAIN][entry_key] + + async def test_single_instance_allowed( hass: HomeAssistant, mock_config_entry: MockConfigEntry, diff --git a/tests/components/jewish_calendar/test_init.py b/tests/components/jewish_calendar/test_init.py index cb982afec0f..b8454b41a60 100644 --- a/tests/components/jewish_calendar/test_init.py +++ b/tests/components/jewish_calendar/test_init.py @@ -1 +1,76 @@ """Tests for the Jewish Calendar component's init.""" + +from hdate import Location + +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSORS +from homeassistant.components.jewish_calendar import get_unique_prefix +from homeassistant.components.jewish_calendar.const import ( + CONF_CANDLE_LIGHT_MINUTES, + CONF_DIASPORA, + CONF_HAVDALAH_OFFSET_MINUTES, + DEFAULT_DIASPORA, + DEFAULT_LANGUAGE, + DOMAIN, +) +from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er +from homeassistant.setup import async_setup_component + + +async def test_import_unique_id_migration(hass: HomeAssistant) -> None: + """Test unique_id migration.""" + yaml_conf = { + DOMAIN: { + CONF_NAME: "test", + CONF_DIASPORA: DEFAULT_DIASPORA, + CONF_LANGUAGE: DEFAULT_LANGUAGE, + CONF_CANDLE_LIGHT_MINUTES: 20, + CONF_HAVDALAH_OFFSET_MINUTES: 50, + CONF_LATITUDE: 31.76, + CONF_LONGITUDE: 35.235, + } + } + + # Create an entry in the entity registry with the data from conf + ent_reg = er.async_get(hass) + location = Location( + latitude=yaml_conf[DOMAIN][CONF_LATITUDE], + longitude=yaml_conf[DOMAIN][CONF_LONGITUDE], + timezone=hass.config.time_zone, + diaspora=DEFAULT_DIASPORA, + ) + old_prefix = get_unique_prefix(location, DEFAULT_LANGUAGE, 20, 50) + sample_entity = ent_reg.async_get_or_create( + BINARY_SENSORS, + DOMAIN, + unique_id=f"{old_prefix}_erev_shabbat_hag", + suggested_object_id=f"{DOMAIN}_erev_shabbat_hag", + ) + # Save the existing unique_id, DEFAULT_LANGUAGE should be part of it + old_unique_id = sample_entity.unique_id + assert DEFAULT_LANGUAGE in old_unique_id + + # Simulate HomeAssistant setting up the component + assert await async_setup_component(hass, DOMAIN, yaml_conf.copy()) + await hass.async_block_till_done() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + for entry_key, entry_val in entries[0].data.items(): + assert entry_val == yaml_conf[DOMAIN][entry_key] + for entry_key, entry_val in entries[0].options.items(): + assert entry_val == yaml_conf[DOMAIN][entry_key] + + # Assert that the unique_id was updated + new_unique_id = ent_reg.async_get(sample_entity.entity_id).unique_id + assert new_unique_id != old_unique_id + assert DEFAULT_LANGUAGE not in new_unique_id + + # Confirm that when the component is reloaded, the unique_id is not changed + assert ent_reg.async_get(sample_entity.entity_id).unique_id == new_unique_id + + # Confirm that all the unique_ids are prefixed correctly + await hass.config_entries.async_reload(entries[0].entry_id) + er_entries = er.async_entries_for_config_entry(ent_reg, entries[0].entry_id) + assert all(entry.unique_id.startswith(entries[0].entry_id) for entry in er_entries) diff --git a/tests/components/lamarzocco/__init__.py b/tests/components/lamarzocco/__init__.py index f6ca0fe40df..f88fa474f8b 100644 --- a/tests/components/lamarzocco/__init__.py +++ b/tests/components/lamarzocco/__init__.py @@ -1,6 +1,6 @@ """Mock inputs for tests.""" -from pylamarzocco.const import MachineModel +from lmcloud.const import MachineModel from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 210dd9406cc..d8047dfbabf 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -5,9 +5,9 @@ import json from unittest.mock import MagicMock, patch from bleak.backends.device import BLEDevice -from pylamarzocco.const import FirmwareType, MachineModel, SteamLevel -from pylamarzocco.lm_machine import LaMarzoccoMachine -from pylamarzocco.models import LaMarzoccoDeviceInfo +from lmcloud.const import FirmwareType, MachineModel, SteamLevel +from lmcloud.lm_machine import LaMarzoccoMachine +from lmcloud.models import LaMarzoccoDeviceInfo import pytest from homeassistant.components.lamarzocco.const import DOMAIN diff --git a/tests/components/lamarzocco/test_binary_sensor.py b/tests/components/lamarzocco/test_binary_sensor.py index 956bfe90dd4..120d825c804 100644 --- a/tests/components/lamarzocco/test_binary_sensor.py +++ b/tests/components/lamarzocco/test_binary_sensor.py @@ -4,7 +4,7 @@ from datetime import timedelta from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.exceptions import RequestNotSuccessful from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE diff --git a/tests/components/lamarzocco/test_button.py b/tests/components/lamarzocco/test_button.py index 61b7ba77c22..b754688f369 100644 --- a/tests/components/lamarzocco/test_button.py +++ b/tests/components/lamarzocco/test_button.py @@ -1,8 +1,8 @@ """Tests for the La Marzocco Buttons.""" -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import MagicMock -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -33,18 +33,14 @@ async def test_start_backflush( assert entry assert entry == snapshot - with patch( - "homeassistant.components.lamarzocco.button.asyncio.sleep", - new_callable=AsyncMock, - ): - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - { - ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", - }, - blocking=True, - ) + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", + }, + blocking=True, + ) assert len(mock_lamarzocco.start_backflush.mock_calls) == 1 mock_lamarzocco.start_backflush.assert_called_once() diff --git a/tests/components/lamarzocco/test_config_flow.py b/tests/components/lamarzocco/test_config_flow.py index be93779848f..13cf6a72b81 100644 --- a/tests/components/lamarzocco/test_config_flow.py +++ b/tests/components/lamarzocco/test_config_flow.py @@ -2,9 +2,9 @@ from unittest.mock import MagicMock, patch -from pylamarzocco.const import MachineModel -from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful -from pylamarzocco.models import LaMarzoccoDeviceInfo +from lmcloud.const import MachineModel +from lmcloud.exceptions import AuthFail, RequestNotSuccessful +from lmcloud.models import LaMarzoccoDeviceInfo import pytest from homeassistant.components.dhcp import DhcpServiceInfo diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index b99077a9059..2c812f79438 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -2,8 +2,8 @@ from unittest.mock import AsyncMock, MagicMock, patch -from pylamarzocco.const import FirmwareType -from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful +from lmcloud.const import FirmwareType +from lmcloud.exceptions import AuthFail, RequestNotSuccessful import pytest from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE diff --git a/tests/components/lamarzocco/test_number.py b/tests/components/lamarzocco/test_number.py index 710a0220e06..352271f26cf 100644 --- a/tests/components/lamarzocco/test_number.py +++ b/tests/components/lamarzocco/test_number.py @@ -3,14 +3,14 @@ from typing import Any from unittest.mock import MagicMock -from pylamarzocco.const import ( +from lmcloud.const import ( KEYS_PER_MODEL, BoilerType, MachineModel, PhysicalKey, PrebrewMode, ) -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lamarzocco/test_select.py b/tests/components/lamarzocco/test_select.py index 24b96f84f37..415954d30be 100644 --- a/tests/components/lamarzocco/test_select.py +++ b/tests/components/lamarzocco/test_select.py @@ -2,8 +2,8 @@ from unittest.mock import MagicMock -from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from lmcloud.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lamarzocco/test_sensor.py b/tests/components/lamarzocco/test_sensor.py index 6f14d52d1fc..760dcffd28f 100644 --- a/tests/components/lamarzocco/test_sensor.py +++ b/tests/components/lamarzocco/test_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock -from pylamarzocco.const import MachineModel +from lmcloud.const import MachineModel import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lamarzocco/test_switch.py b/tests/components/lamarzocco/test_switch.py index 5c6d1cb1e42..802ab59148e 100644 --- a/tests/components/lamarzocco/test_switch.py +++ b/tests/components/lamarzocco/test_switch.py @@ -3,7 +3,7 @@ from typing import Any from unittest.mock import MagicMock -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lamarzocco/test_update.py b/tests/components/lamarzocco/test_update.py index aef37d7c921..3dc2a86b574 100644 --- a/tests/components/lamarzocco/test_update.py +++ b/tests/components/lamarzocco/test_update.py @@ -2,8 +2,8 @@ from unittest.mock import MagicMock -from pylamarzocco.const import FirmwareType -from pylamarzocco.exceptions import RequestNotSuccessful +from lmcloud.const import FirmwareType +from lmcloud.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index b7967c247ec..4ef83aeaf8a 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -23,7 +23,9 @@ from homeassistant.const import ( CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import issue_registry as ir from tests.common import MockConfigEntry @@ -46,6 +48,83 @@ IMPORT_DATA = { } +async def test_step_import( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test for import step.""" + + with ( + patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.async_setup", return_value=True), + patch("homeassistant.components.lcn.async_setup_entry", return_value=True), + ): + data = IMPORT_DATA.copy() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "pchk" + assert result["data"] == IMPORT_DATA + assert issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) + + +async def test_step_import_existing_host( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test for update of config_entry if imported host already exists.""" + + # Create config entry and add it to hass + mock_data = IMPORT_DATA.copy() + mock_data.update({CONF_SK_NUM_TRIES: 3, CONF_DIM_MODE: 50}) + mock_entry = MockConfigEntry(domain=DOMAIN, data=mock_data) + mock_entry.add_to_hass(hass) + # Initialize a config flow with different data but same host address + with patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"): + imported_data = IMPORT_DATA.copy() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=imported_data + ) + + # Check if config entry was updated + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "existing_configuration_updated" + assert mock_entry.source == config_entries.SOURCE_IMPORT + assert mock_entry.data == IMPORT_DATA + assert issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) + + +@pytest.mark.parametrize( + ("error", "reason"), + [ + (PchkAuthenticationError, "authentication_error"), + (PchkLicenseError, "license_error"), + (TimeoutError, "connection_refused"), + ], +) +async def test_step_import_error( + hass: HomeAssistant, issue_registry: ir.IssueRegistry, error, reason +) -> None: + """Test for error in import is handled correctly.""" + with patch( + "homeassistant.components.lcn.PchkConnectionManager.async_connect", + side_effect=error, + ): + data = IMPORT_DATA.copy() + data.update({CONF_HOST: "pchk"}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + assert issue_registry.async_get_issue(DOMAIN, reason) + + async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" flow = LcnFlowHandler() @@ -61,6 +140,7 @@ async def test_step_user(hass: HomeAssistant) -> None: """Test for user step.""" with ( patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.async_setup", return_value=True), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): data = CONNECTION_DATA.copy() @@ -130,6 +210,7 @@ async def test_step_reconfigure(hass: HomeAssistant, entry: MockConfigEntry) -> with ( patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.async_setup", return_value=True), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index 2327635e356..1bd225c5d47 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -16,6 +16,7 @@ from .conftest import ( MockPchkConnectionManager, create_config_entry, init_integration, + setup_component, ) @@ -82,6 +83,18 @@ async def test_async_setup_entry_update( assert dummy_entity in entity_registry.entities.values() assert dummy_device in device_registry.devices.values() + # setup new entry with same data via import step (should cleanup dummy device) + with patch( + "homeassistant.components.lcn.config_flow.validate_connection", + return_value=None, + ): + await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry.data + ) + + assert dummy_device not in device_registry.devices.values() + assert dummy_entity not in entity_registry.entities.values() + @pytest.mark.parametrize( "exception", [PchkAuthenticationError, PchkLicenseError, TimeoutError] @@ -101,6 +114,20 @@ async def test_async_setup_entry_raises_authentication_error( assert entry.state is ConfigEntryState.SETUP_ERROR +async def test_async_setup_from_configuration_yaml(hass: HomeAssistant) -> None: + """Test a successful setup using data from configuration.yaml.""" + with ( + patch( + "homeassistant.components.lcn.config_flow.validate_connection", + return_value=None, + ), + patch("homeassistant.components.lcn.async_setup_entry") as async_setup_entry, + ): + await setup_component(hass) + + assert async_setup_entry.await_count == 2 + + @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_migrate_1_1(hass: HomeAssistant, entry) -> None: """Test migration config entry.""" diff --git a/tests/components/lektrico/fixtures/get_info.json b/tests/components/lektrico/fixtures/get_info.json index 2b099a666e5..bcd84a9a9df 100644 --- a/tests/components/lektrico/fixtures/get_info.json +++ b/tests/components/lektrico/fixtures/get_info.json @@ -14,15 +14,5 @@ "dynamic_current": 32, "user_current": 32, "lb_mode": 0, - "require_auth": true, - "state_e_activated": false, - "undervoltage_error": true, - "rcd_error": false, - "meter_fault": false, - "overcurrent": false, - "overtemp": false, - "overvoltage_error": false, - "contactor_failure": false, - "cp_diode_failure": false, - "critical_temp": false + "require_auth": true } diff --git a/tests/components/lektrico/snapshots/test_binary_sensor.ambr b/tests/components/lektrico/snapshots/test_binary_sensor.ambr deleted file mode 100644 index 6a28e7c60de..00000000000 --- a/tests/components/lektrico/snapshots/test_binary_sensor.ambr +++ /dev/null @@ -1,471 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[binary_sensor.1p7k_500006_ev_diode_short-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ev diode short', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cp_diode_failure', - 'unique_id': '500006_cp_diode_failure', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_ev_diode_short-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Ev diode short', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_ev_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_ev_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Ev error', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'state_e_activated', - 'unique_id': '500006_state_e_activated', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_ev_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Ev error', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_ev_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_metering_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_metering_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Metering error', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'meter_fault', - 'unique_id': '500006_meter_fault', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_metering_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Metering error', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_metering_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overcurrent-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_overcurrent', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Overcurrent', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'overcurrent', - 'unique_id': '500006_overcurrent', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overcurrent-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Overcurrent', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_overcurrent', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overheating-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_overheating', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Overheating', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'critical_temp', - 'unique_id': '500006_critical_temp', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overheating-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Overheating', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_overheating', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overvoltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_overvoltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Overvoltage', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'overvoltage', - 'unique_id': '500006_overvoltage', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_overvoltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Overvoltage', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_overvoltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_rcd_error-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Rcd error', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'rcd_error', - 'unique_id': '500006_rcd_error', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_rcd_error-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Rcd error', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_relay_contacts_welded-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_relay_contacts_welded', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Relay contacts welded', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'contactor_failure', - 'unique_id': '500006_contactor_failure', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_relay_contacts_welded-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Relay contacts welded', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_relay_contacts_welded', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_thermal_throttling-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_thermal_throttling', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Thermal throttling', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'overtemp', - 'unique_id': '500006_overtemp', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_thermal_throttling-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Thermal throttling', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_thermal_throttling', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_undervoltage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.1p7k_500006_undervoltage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Undervoltage', - 'platform': 'lektrico', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'undervoltage', - 'unique_id': '500006_undervoltage', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.1p7k_500006_undervoltage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Undervoltage', - }), - 'context': , - 'entity_id': 'binary_sensor.1p7k_500006_undervoltage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/lektrico/test_binary_sensor.py b/tests/components/lektrico/test_binary_sensor.py deleted file mode 100644 index d49eac6cc23..00000000000 --- a/tests/components/lektrico/test_binary_sensor.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Tests for the Lektrico binary sensor platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_device: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - - with patch.multiple( - "homeassistant.components.lektrico", - CHARGERS_PLATFORMS=[Platform.BINARY_SENSOR], - LB_DEVICES_PLATFORMS=[Platform.BINARY_SENSOR], - ): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/linkplay/__init__.py b/tests/components/linkplay/__init__.py index f825826f196..5962f7fdaba 100644 --- a/tests/components/linkplay/__init__.py +++ b/tests/components/linkplay/__init__.py @@ -1,16 +1 @@ """Tests for the LinkPlay integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration( - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/linkplay/conftest.py b/tests/components/linkplay/conftest.py index 81ae993f6c3..be83dd2412d 100644 --- a/tests/components/linkplay/conftest.py +++ b/tests/components/linkplay/conftest.py @@ -1,22 +1,12 @@ """Test configuration and mocks for LinkPlay component.""" -from collections.abc import Generator, Iterator -from contextlib import contextmanager -from typing import Any -from unittest import mock +from collections.abc import Generator from unittest.mock import AsyncMock, patch from aiohttp import ClientSession from linkplay.bridge import LinkPlayBridge, LinkPlayDevice import pytest -from homeassistant.components.linkplay.const import DOMAIN -from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_CLOSE -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry, load_fixture -from tests.conftest import AiohttpClientMocker - HOST = "10.0.0.150" HOST_REENTRY = "10.0.0.66" UUID = "FF31F09E-5001-FBDE-0546-2DBFFF31F09E" @@ -34,15 +24,15 @@ def mock_linkplay_factory_bridge() -> Generator[AsyncMock]: ), patch( "homeassistant.components.linkplay.config_flow.linkplay_factory_httpapi_bridge", - ) as conf_factory, + ) as factory, ): bridge = AsyncMock(spec=LinkPlayBridge) bridge.endpoint = HOST bridge.device = AsyncMock(spec=LinkPlayDevice) bridge.device.uuid = UUID bridge.device.name = NAME - conf_factory.return_value = bridge - yield conf_factory + factory.return_value = bridge + yield factory @pytest.fixture @@ -53,55 +43,3 @@ def mock_setup_entry() -> Generator[AsyncMock]: return_value=True, ) as mock_setup_entry: yield mock_setup_entry - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry( - domain=DOMAIN, - title=NAME, - data={CONF_HOST: HOST}, - unique_id=UUID, - ) - - -@pytest.fixture -def mock_player_ex( - mock_player_ex: AsyncMock, -) -> AsyncMock: - """Mock a update_status of the LinkPlayPlayer.""" - mock_player_ex.return_value = load_fixture("getPlayerEx.json", DOMAIN) - return mock_player_ex - - -@pytest.fixture -def mock_status_ex( - mock_status_ex: AsyncMock, -) -> AsyncMock: - """Mock a update_status of the LinkPlayDevice.""" - mock_status_ex.return_value = load_fixture("getStatusEx.json", DOMAIN) - return mock_status_ex - - -@contextmanager -def mock_lp_aiohttp_client() -> Iterator[AiohttpClientMocker]: - """Context manager to mock aiohttp client.""" - mocker = AiohttpClientMocker() - - def create_session(hass: HomeAssistant, *args: Any, **kwargs: Any) -> ClientSession: - session = mocker.create_session(hass.loop) - - async def close_session(event): - """Close session.""" - await session.close() - - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, close_session) - - return session - - with mock.patch( - "homeassistant.components.linkplay.async_get_client_session", - side_effect=create_session, - ): - yield mocker diff --git a/tests/components/linkplay/fixtures/getPlayerEx.json b/tests/components/linkplay/fixtures/getPlayerEx.json deleted file mode 100644 index 79d09f942df..00000000000 --- a/tests/components/linkplay/fixtures/getPlayerEx.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "type": "0", - "ch": "0", - "mode": "0", - "loop": "0", - "eq": "0", - "status": "stop", - "curpos": "0", - "offset_pts": "0", - "totlen": "0", - "Title": "", - "Artist": "", - "Album": "", - "alarmflag": "0", - "plicount": "0", - "plicurr": "0", - "vol": "80", - "mute": "0" -} diff --git a/tests/components/linkplay/fixtures/getStatusEx.json b/tests/components/linkplay/fixtures/getStatusEx.json deleted file mode 100644 index 17eda4aeee8..00000000000 --- a/tests/components/linkplay/fixtures/getStatusEx.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "uuid": "FF31F09E5001FBDE05462DBFFF31F09E", - "DeviceName": "Smart Zone 1_54B9", - "GroupName": "Smart Zone 1_54B9", - "ssid": "Smart Zone 1_54B9", - "language": "en_us", - "firmware": "4.6.415145", - "hardware": "A31", - "build": "release", - "project": "SMART_ZONE4_AMP", - "priv_prj": "SMART_ZONE4_AMP", - "project_build_name": "a31rakoit", - "Release": "20220427", - "temp_uuid": "97296CE38DE8CC3D", - "hideSSID": "1", - "SSIDStrategy": "2", - "branch": "A31_stable_4.6", - "group": "0", - "wmrm_version": "4.2", - "internet": "1", - "MAC": "00:22:6C:21:7F:1D", - "STA_MAC": "00:00:00:00:00:00", - "CountryCode": "CN", - "CountryRegion": "1", - "netstat": "0", - "essid": "", - "apcli0": "", - "eth2": "192.168.168.197", - "ra0": "10.10.10.254", - "eth_dhcp": "1", - "VersionUpdate": "0", - "NewVer": "0", - "set_dns_enable": "1", - "mcu_ver": "37", - "mcu_ver_new": "0", - "dsp_ver": "0", - "dsp_ver_new": "0", - "date": "2024:10:29", - "time": "17:13:22", - "tz": "1.0000", - "dst_enable": "1", - "region": "unknown", - "prompt_status": "1", - "iot_ver": "1.0.0", - "upnp_version": "1005", - "cap1": "0x305200", - "capability": "0x28e90b80", - "languages": "0x6", - "streams_all": "0x7bff7ffe", - "streams": "0x7b9831fe", - "external": "0x0", - "plm_support": "0x40152", - "preset_key": "10", - "spotify_active": "0", - "lbc_support": "0", - "privacy_mode": "0", - "WifiChannel": "11", - "RSSI": "0", - "BSSID": "", - "battery": "0", - "battery_percent": "0", - "securemode": "1", - "auth": "WPAPSKWPA2PSK", - "encry": "AES", - "upnp_uuid": "uuid:FF31F09E-5001-FBDE-0546-2DBFFF31F09E", - "uart_pass_port": "8899", - "communication_port": "8819", - "web_firmware_update_hide": "0", - "ignore_talkstart": "0", - "web_login_result": "-1", - "silenceOTATime": "", - "ignore_silenceOTATime": "1", - "new_tunein_preset_and_alarm": "1", - "iheartradio_new": "1", - "new_iheart_podcast": "1", - "tidal_version": "2.0", - "service_version": "1.0", - "ETH_MAC": "00:22:6C:21:7F:20", - "security": "https/2.0", - "security_version": "2.0" -} diff --git a/tests/components/linkplay/snapshots/test_diagnostics.ambr b/tests/components/linkplay/snapshots/test_diagnostics.ambr deleted file mode 100644 index d8c52a25649..00000000000 --- a/tests/components/linkplay/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,115 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'device_info': dict({ - 'device': dict({ - 'properties': dict({ - 'BSSID': '', - 'CountryCode': 'CN', - 'CountryRegion': '1', - 'DeviceName': 'Smart Zone 1_54B9', - 'ETH_MAC': '00:22:6C:21:7F:20', - 'GroupName': 'Smart Zone 1_54B9', - 'MAC': '00:22:6C:21:7F:1D', - 'NewVer': '0', - 'RSSI': '0', - 'Release': '20220427', - 'SSIDStrategy': '2', - 'STA_MAC': '00:00:00:00:00:00', - 'VersionUpdate': '0', - 'WifiChannel': '11', - 'apcli0': '', - 'auth': 'WPAPSKWPA2PSK', - 'battery': '0', - 'battery_percent': '0', - 'branch': 'A31_stable_4.6', - 'build': 'release', - 'cap1': '0x305200', - 'capability': '0x28e90b80', - 'communication_port': '8819', - 'date': '2024:10:29', - 'dsp_ver': '0', - 'dsp_ver_new': '0', - 'dst_enable': '1', - 'encry': 'AES', - 'essid': '', - 'eth2': '192.168.168.197', - 'eth_dhcp': '1', - 'external': '0x0', - 'firmware': '4.6.415145', - 'group': '0', - 'hardware': 'A31', - 'hideSSID': '1', - 'ignore_silenceOTATime': '1', - 'ignore_talkstart': '0', - 'iheartradio_new': '1', - 'internet': '1', - 'iot_ver': '1.0.0', - 'language': 'en_us', - 'languages': '0x6', - 'lbc_support': '0', - 'mcu_ver': '37', - 'mcu_ver_new': '0', - 'netstat': '0', - 'new_iheart_podcast': '1', - 'new_tunein_preset_and_alarm': '1', - 'plm_support': '0x40152', - 'preset_key': '10', - 'priv_prj': 'SMART_ZONE4_AMP', - 'privacy_mode': '0', - 'project': 'SMART_ZONE4_AMP', - 'project_build_name': 'a31rakoit', - 'prompt_status': '1', - 'ra0': '10.10.10.254', - 'region': 'unknown', - 'securemode': '1', - 'security': 'https/2.0', - 'security_version': '2.0', - 'service_version': '1.0', - 'set_dns_enable': '1', - 'silenceOTATime': '', - 'spotify_active': '0', - 'ssid': 'Smart Zone 1_54B9', - 'streams': '0x7b9831fe', - 'streams_all': '0x7bff7ffe', - 'temp_uuid': '97296CE38DE8CC3D', - 'tidal_version': '2.0', - 'time': '17:13:22', - 'tz': '1.0000', - 'uart_pass_port': '8899', - 'upnp_uuid': 'uuid:FF31F09E-5001-FBDE-0546-2DBFFF31F09E', - 'upnp_version': '1005', - 'uuid': 'FF31F09E5001FBDE05462DBFFF31F09E', - 'web_firmware_update_hide': '0', - 'web_login_result': '-1', - 'wmrm_version': '4.2', - }), - }), - 'endpoint': dict({ - 'endpoint': 'https://10.0.0.150', - }), - 'multiroom': None, - 'player': dict({ - 'properties': dict({ - 'Album': '', - 'Artist': '', - 'Title': '', - 'alarmflag': '0', - 'ch': '0', - 'curpos': '0', - 'eq': '0', - 'loop': '0', - 'mode': '0', - 'mute': '0', - 'offset_pts': '0', - 'plicount': '0', - 'plicurr': '0', - 'status': 'stop', - 'totlen': '0', - 'type': '0', - 'vol': '80', - }), - }), - }), - }) -# --- diff --git a/tests/components/linkplay/test_diagnostics.py b/tests/components/linkplay/test_diagnostics.py deleted file mode 100644 index 369142978a3..00000000000 --- a/tests/components/linkplay/test_diagnostics.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Tests for the LinkPlay diagnostics.""" - -from unittest.mock import patch - -from linkplay.bridge import LinkPlayMultiroom -from linkplay.consts import API_ENDPOINT -from linkplay.endpoint import LinkPlayApiEndpoint -from syrupy import SnapshotAssertion - -from homeassistant.components.linkplay.const import DOMAIN -from homeassistant.core import HomeAssistant - -from . import setup_integration -from .conftest import HOST, mock_lp_aiohttp_client - -from tests.common import MockConfigEntry, load_fixture -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test diagnostics.""" - - with ( - mock_lp_aiohttp_client() as mock_session, - patch.object(LinkPlayMultiroom, "update_status", return_value=None), - ): - endpoints = [ - LinkPlayApiEndpoint(protocol="https", endpoint=HOST, session=None), - LinkPlayApiEndpoint(protocol="http", endpoint=HOST, session=None), - ] - for endpoint in endpoints: - mock_session.get( - API_ENDPOINT.format(str(endpoint), "getPlayerStatusEx"), - text=load_fixture("getPlayerEx.json", DOMAIN), - ) - - mock_session.get( - API_ENDPOINT.format(str(endpoint), "getStatusEx"), - text=load_fixture("getStatusEx.json", DOMAIN), - ) - - await setup_integration(hass, mock_config_entry) - - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) - == snapshot - ) diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index cdea046ceea..5c612f9f8ad 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -57,7 +57,7 @@ def check_config_loaded_fixture(): @pytest.fixture(name="register_words") def register_words_fixture(): """Set default for register_words.""" - return [0x00] + return [0x00, 0x00] @pytest.fixture(name="config_addon") diff --git a/tests/components/nasweb/__init__.py b/tests/components/nasweb/__init__.py deleted file mode 100644 index d4906d710d5..00000000000 --- a/tests/components/nasweb/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the NASweb integration.""" diff --git a/tests/components/nasweb/conftest.py b/tests/components/nasweb/conftest.py deleted file mode 100644 index 7757f40ee44..00000000000 --- a/tests/components/nasweb/conftest.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Common fixtures for the NASweb tests.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.nasweb.async_setup_entry", return_value=True - ) as mock_setup_entry: - yield mock_setup_entry - - -BASE_CONFIG_FLOW = "homeassistant.components.nasweb.config_flow." -BASE_NASWEB_DATA = "homeassistant.components.nasweb.nasweb_data." -BASE_COORDINATOR = "homeassistant.components.nasweb.coordinator." -TEST_SERIAL_NUMBER = "0011223344556677" - - -@pytest.fixture -def validate_input_all_ok() -> Generator[dict[str, AsyncMock | MagicMock]]: - """Yield dictionary of mocked functions required for successful test_form execution.""" - with ( - patch( - BASE_CONFIG_FLOW + "WebioAPI.check_connection", - return_value=True, - ) as check_connection, - patch( - BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info", - return_value=True, - ) as refresh_device_info, - patch( - BASE_NASWEB_DATA + "NASwebData.get_webhook_url", - return_value="http://127.0.0.1:8123/api/webhook/de705e77291402afa0dd961426e9f19bb53631a9f2a106c52cfd2d2266913c04", - ) as get_webhook_url, - patch( - BASE_CONFIG_FLOW + "WebioAPI.get_serial_number", - return_value=TEST_SERIAL_NUMBER, - ) as get_serial, - patch( - BASE_CONFIG_FLOW + "WebioAPI.status_subscription", - return_value=True, - ) as status_subscription, - patch( - BASE_NASWEB_DATA + "NotificationCoordinator.check_connection", - return_value=True, - ) as check_status_confirmation, - ): - yield { - BASE_CONFIG_FLOW + "WebioAPI.check_connection": check_connection, - BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info": refresh_device_info, - BASE_NASWEB_DATA + "NASwebData.get_webhook_url": get_webhook_url, - BASE_CONFIG_FLOW + "WebioAPI.get_serial_number": get_serial, - BASE_CONFIG_FLOW + "WebioAPI.status_subscription": status_subscription, - BASE_NASWEB_DATA - + "NotificationCoordinator.check_connection": check_status_confirmation, - } diff --git a/tests/components/nasweb/test_config_flow.py b/tests/components/nasweb/test_config_flow.py deleted file mode 100644 index a5f2dca680d..00000000000 --- a/tests/components/nasweb/test_config_flow.py +++ /dev/null @@ -1,208 +0,0 @@ -"""Test the NASweb config flow.""" - -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from webio_api.api_client import AuthError - -from homeassistant import config_entries -from homeassistant.components.nasweb.const import DOMAIN -from homeassistant.config_entries import ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.network import NoURLAvailableError - -from .conftest import ( - BASE_CONFIG_FLOW, - BASE_COORDINATOR, - BASE_NASWEB_DATA, - TEST_SERIAL_NUMBER, -) - -pytestmark = pytest.mark.usefixtures("mock_setup_entry") - - -TEST_USER_INPUT = { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", -} - - -async def _add_test_config_entry(hass: HomeAssistant) -> ConfigFlowResult: - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result.get("type") == FlowResultType.FORM - assert not result.get("errors") - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - await hass.async_block_till_done() - return result2 - - -async def test_form( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test the form.""" - result = await _add_test_config_entry(hass) - - assert result.get("type") == FlowResultType.CREATE_ENTRY - assert result.get("title") == "1.1.1.1" - assert result.get("data") == TEST_USER_INPUT - - config_entry = result.get("result") - assert config_entry is not None - assert config_entry.unique_id == TEST_SERIAL_NUMBER - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_cannot_connect( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test cannot connect error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch(BASE_CONFIG_FLOW + "WebioAPI.check_connection", return_value=False): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "cannot_connect"} - - -async def test_form_invalid_auth( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test invalid auth.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info", - side_effect=AuthError, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "invalid_auth"} - - -async def test_form_missing_internal_url( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test missing internal url.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - BASE_NASWEB_DATA + "NASwebData.get_webhook_url", side_effect=NoURLAvailableError - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "missing_internal_url"} - - -async def test_form_missing_nasweb_data( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test invalid auth.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - BASE_CONFIG_FLOW + "WebioAPI.get_serial_number", - return_value=None, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "missing_nasweb_data"} - with patch(BASE_CONFIG_FLOW + "WebioAPI.status_subscription", return_value=False): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "missing_nasweb_data"} - - -async def test_missing_status( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test missing status update.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - BASE_COORDINATOR + "NotificationCoordinator.check_connection", - return_value=False, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "missing_status"} - - -async def test_form_exception( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test other exceptions.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch( - "homeassistant.components.nasweb.config_flow.validate_input", - side_effect=Exception, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], TEST_USER_INPUT - ) - assert result2.get("type") == FlowResultType.FORM - assert result2.get("errors") == {"base": "unknown"} - - -async def test_form_already_configured( - hass: HomeAssistant, - validate_input_all_ok: dict[str, AsyncMock | MagicMock], -) -> None: - """Test already configured device.""" - result = await _add_test_config_entry(hass) - config_entry = result.get("result") - assert config_entry is not None - assert config_entry.unique_id == TEST_SERIAL_NUMBER - - result2_1 = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - result2_2 = await hass.config_entries.flow.async_configure( - result2_1["flow_id"], TEST_USER_INPUT - ) - await hass.async_block_till_done() - - assert result2_2.get("type") == FlowResultType.ABORT - assert result2_2.get("reason") == "already_configured" diff --git a/tests/components/nest/common.py b/tests/components/nest/common.py index f34c40e09f9..9c8de0224f0 100644 --- a/tests/components/nest/common.py +++ b/tests/components/nest/common.py @@ -30,7 +30,6 @@ CLIENT_ID = "some-client-id" CLIENT_SECRET = "some-client-secret" CLOUD_PROJECT_ID = "cloud-id-9876" SUBSCRIBER_ID = "projects/cloud-id-9876/subscriptions/subscriber-id-9876" -SUBSCRIPTION_NAME = "projects/cloud-id-9876/subscriptions/subscriber-id-9876" @dataclass @@ -87,17 +86,6 @@ TEST_CONFIG_ENTRY_LEGACY = NestTestConfig( }, ) -TEST_CONFIG_NEW_SUBSCRIPTION = NestTestConfig( - config_entry_data={ - "sdm": {}, - "project_id": PROJECT_ID, - "cloud_project_id": CLOUD_PROJECT_ID, - "subscription_name": SUBSCRIPTION_NAME, - "auth_implementation": "imported-cred", - }, - credential=ClientCredential(CLIENT_ID, CLIENT_SECRET), -) - class FakeSubscriber(GoogleNestSubscriber): """Fake subscriber that supplies a FakeDeviceManager.""" @@ -107,7 +95,6 @@ class FakeSubscriber(GoogleNestSubscriber): def __init__(self) -> None: # pylint: disable=super-init-not-called """Initialize Fake Subscriber.""" self._device_manager = DeviceManager() - self._subscriber_name = "fake-name" def set_update_callback(self, target: Callable[[EventMessage], Awaitable[None]]): """Capture the callback set by Home Assistant.""" diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index b070d025612..85c64aff379 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -22,7 +22,6 @@ from homeassistant.components.application_credentials import ( ) from homeassistant.components.nest import DOMAIN from homeassistant.components.nest.const import CONF_SUBSCRIBER_ID, SDM_SCOPES -from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -288,8 +287,6 @@ async def setup_base_platform( await hass.async_block_till_done() yield _setup_func - if config_entry and config_entry.state == ConfigEntryState.LOADED: - await hass.config_entries.async_unload(config_entry.entry_id) @pytest.fixture diff --git a/tests/components/nest/test_camera.py b/tests/components/nest/test_camera.py index 029879f1413..500dbc0f46f 100644 --- a/tests/components/nest/test_camera.py +++ b/tests/components/nest/test_camera.py @@ -483,50 +483,6 @@ async def test_stream_response_already_expired( assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" -async def test_extending_stream_already_expired( - hass: HomeAssistant, - auth: FakeAuth, - setup_platform: PlatformSetup, - camera_device: None, -) -> None: - """Test a API response when extending the stream returns an expired stream url.""" - now = utcnow() - stream_1_expiration = now + datetime.timedelta(seconds=180) - stream_2_expiration = now + datetime.timedelta(seconds=30) # Will be in the past - stream_3_expiration = now + datetime.timedelta(seconds=600) - auth.responses = [ - make_stream_url_response(stream_1_expiration, token_num=1), - make_stream_url_response(stream_2_expiration, token_num=2), - make_stream_url_response(stream_3_expiration, token_num=3), - ] - await setup_platform() - - assert len(hass.states.async_all()) == 1 - cam = hass.states.get("camera.my_camera") - assert cam is not None - assert cam.state == CameraState.STREAMING - - # The stream is expired, but we return it anyway - stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source == "rtsp://some/url?auth=g.1.streamingToken" - - # Jump to when the stream will be refreshed - await fire_alarm(hass, now + datetime.timedelta(seconds=160)) - stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" - - # The stream will have expired in the past, but 1 minute min refresh interval is applied. - # The stream token is not updated. - await fire_alarm(hass, now + datetime.timedelta(seconds=170)) - stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source == "rtsp://some/url?auth=g.2.streamingToken" - - # Now go past the min update interval and the stream is refreshed - await fire_alarm(hass, now + datetime.timedelta(seconds=225)) - stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") - assert stream_source == "rtsp://some/url?auth=g.3.streamingToken" - - async def test_camera_removed( hass: HomeAssistant, auth: FakeAuth, diff --git a/tests/components/nest/test_init.py b/tests/components/nest/test_init.py index a17803a6cde..4c238683130 100644 --- a/tests/components/nest/test_init.py +++ b/tests/components/nest/test_init.py @@ -31,7 +31,6 @@ from .common import ( SUBSCRIBER_ID, TEST_CONFIG_ENTRY_LEGACY, TEST_CONFIG_LEGACY, - TEST_CONFIG_NEW_SUBSCRIPTION, TEST_CONFIGFLOW_APP_CREDS, FakeSubscriber, PlatformSetup, @@ -98,19 +97,6 @@ async def test_setup_success( assert entries[0].state is ConfigEntryState.LOADED -@pytest.mark.parametrize("nest_test_config", [(TEST_CONFIG_NEW_SUBSCRIPTION)]) -async def test_setup_success_new_subscription_format( - hass: HomeAssistant, error_caplog: pytest.LogCaptureFixture, setup_platform -) -> None: - """Test successful setup.""" - await setup_platform() - assert not error_caplog.records - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - assert entries[0].state is ConfigEntryState.LOADED - - @pytest.mark.parametrize("subscriber_id", [("invalid-subscriber-format")]) async def test_setup_configuration_failure( hass: HomeAssistant, diff --git a/tests/components/nina/test_config_flow.py b/tests/components/nina/test_config_flow.py index 309c8860c20..cd0904b181d 100644 --- a/tests/components/nina/test_config_flow.py +++ b/tests/components/nina/test_config_flow.py @@ -8,6 +8,7 @@ from typing import Any from unittest.mock import patch from pynina import ApiError +import pytest from homeassistant.components.nina.const import ( CONF_AREA_FILTER, @@ -278,6 +279,10 @@ async def test_options_flow_connection_error(hass: HomeAssistant) -> None: assert result["errors"] == {"base": "cannot_connect"} +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.nina.options.error.unknown"], +) async def test_options_flow_unexpected_exception(hass: HomeAssistant) -> None: """Test config flow options but with an unexpected exception.""" config_entry = MockConfigEntry( diff --git a/tests/components/nordpool/__init__.py b/tests/components/nordpool/__init__.py deleted file mode 100644 index 20d74d38486..00000000000 --- a/tests/components/nordpool/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Tests for the Nord Pool integration.""" - -from homeassistant.components.nordpool.const import CONF_AREAS -from homeassistant.const import CONF_CURRENCY - -ENTRY_CONFIG = { - CONF_AREAS: ["SE3", "SE4"], - CONF_CURRENCY: "SEK", -} diff --git a/tests/components/nordpool/conftest.py b/tests/components/nordpool/conftest.py deleted file mode 100644 index d1c1972c568..00000000000 --- a/tests/components/nordpool/conftest.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Fixtures for the Nord Pool integration.""" - -from __future__ import annotations - -from datetime import datetime -import json -from typing import Any -from unittest.mock import patch - -from pynordpool import NordPoolClient -from pynordpool.const import Currency -from pynordpool.model import DeliveryPeriodData -import pytest - -from homeassistant.components.nordpool.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util - -from . import ENTRY_CONFIG - -from tests.common import MockConfigEntry, load_fixture -from tests.test_util.aiohttp import AiohttpClientMocker - - -@pytest.fixture -async def load_int( - hass: HomeAssistant, get_data: DeliveryPeriodData -) -> MockConfigEntry: - """Set up the Nord Pool integration in Home Assistant.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - data=ENTRY_CONFIG, - ) - - config_entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry - - -@pytest.fixture(name="get_data") -async def get_data_from_library( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, load_json: dict[str, Any] -) -> DeliveryPeriodData: - """Retrieve data from Nord Pool library.""" - - client = NordPoolClient(aioclient_mock.create_session(hass.loop)) - with patch("pynordpool.NordPoolClient._get", return_value=load_json): - output = await client.async_get_delivery_period( - datetime(2024, 11, 5, 13, tzinfo=dt_util.UTC), Currency.SEK, ["SE3", "SE4"] - ) - await client._session.close() - return output - - -@pytest.fixture(name="load_json") -def load_json_from_fixture(load_data: str) -> dict[str, Any]: - """Load fixture with json data and return.""" - return json.loads(load_data) - - -@pytest.fixture(name="load_data", scope="package") -def load_data_from_fixture() -> str: - """Load fixture with fixture data and return.""" - return load_fixture("delivery_period.json", DOMAIN) diff --git a/tests/components/nordpool/fixtures/delivery_period.json b/tests/components/nordpool/fixtures/delivery_period.json deleted file mode 100644 index 77d51dc9433..00000000000 --- a/tests/components/nordpool/fixtures/delivery_period.json +++ /dev/null @@ -1,272 +0,0 @@ -{ - "deliveryDateCET": "2024-11-05", - "version": 3, - "updatedAt": "2024-11-04T12:15:03.9456464Z", - "deliveryAreas": ["SE3", "SE4"], - "market": "DayAhead", - "multiAreaEntries": [ - { - "deliveryStart": "2024-11-04T23:00:00Z", - "deliveryEnd": "2024-11-05T00:00:00Z", - "entryPerArea": { - "SE3": 250.73, - "SE4": 283.79 - } - }, - { - "deliveryStart": "2024-11-05T00:00:00Z", - "deliveryEnd": "2024-11-05T01:00:00Z", - "entryPerArea": { - "SE3": 76.36, - "SE4": 81.36 - } - }, - { - "deliveryStart": "2024-11-05T01:00:00Z", - "deliveryEnd": "2024-11-05T02:00:00Z", - "entryPerArea": { - "SE3": 73.92, - "SE4": 79.15 - } - }, - { - "deliveryStart": "2024-11-05T02:00:00Z", - "deliveryEnd": "2024-11-05T03:00:00Z", - "entryPerArea": { - "SE3": 61.69, - "SE4": 65.19 - } - }, - { - "deliveryStart": "2024-11-05T03:00:00Z", - "deliveryEnd": "2024-11-05T04:00:00Z", - "entryPerArea": { - "SE3": 64.6, - "SE4": 68.44 - } - }, - { - "deliveryStart": "2024-11-05T04:00:00Z", - "deliveryEnd": "2024-11-05T05:00:00Z", - "entryPerArea": { - "SE3": 453.27, - "SE4": 516.71 - } - }, - { - "deliveryStart": "2024-11-05T05:00:00Z", - "deliveryEnd": "2024-11-05T06:00:00Z", - "entryPerArea": { - "SE3": 996.28, - "SE4": 1240.85 - } - }, - { - "deliveryStart": "2024-11-05T06:00:00Z", - "deliveryEnd": "2024-11-05T07:00:00Z", - "entryPerArea": { - "SE3": 1406.14, - "SE4": 1648.25 - } - }, - { - "deliveryStart": "2024-11-05T07:00:00Z", - "deliveryEnd": "2024-11-05T08:00:00Z", - "entryPerArea": { - "SE3": 1346.54, - "SE4": 1570.5 - } - }, - { - "deliveryStart": "2024-11-05T08:00:00Z", - "deliveryEnd": "2024-11-05T09:00:00Z", - "entryPerArea": { - "SE3": 1150.28, - "SE4": 1345.37 - } - }, - { - "deliveryStart": "2024-11-05T09:00:00Z", - "deliveryEnd": "2024-11-05T10:00:00Z", - "entryPerArea": { - "SE3": 1031.32, - "SE4": 1206.51 - } - }, - { - "deliveryStart": "2024-11-05T10:00:00Z", - "deliveryEnd": "2024-11-05T11:00:00Z", - "entryPerArea": { - "SE3": 927.37, - "SE4": 1085.8 - } - }, - { - "deliveryStart": "2024-11-05T11:00:00Z", - "deliveryEnd": "2024-11-05T12:00:00Z", - "entryPerArea": { - "SE3": 925.05, - "SE4": 1081.72 - } - }, - { - "deliveryStart": "2024-11-05T12:00:00Z", - "deliveryEnd": "2024-11-05T13:00:00Z", - "entryPerArea": { - "SE3": 949.49, - "SE4": 1130.38 - } - }, - { - "deliveryStart": "2024-11-05T13:00:00Z", - "deliveryEnd": "2024-11-05T14:00:00Z", - "entryPerArea": { - "SE3": 1042.03, - "SE4": 1256.91 - } - }, - { - "deliveryStart": "2024-11-05T14:00:00Z", - "deliveryEnd": "2024-11-05T15:00:00Z", - "entryPerArea": { - "SE3": 1258.89, - "SE4": 1765.82 - } - }, - { - "deliveryStart": "2024-11-05T15:00:00Z", - "deliveryEnd": "2024-11-05T16:00:00Z", - "entryPerArea": { - "SE3": 1816.45, - "SE4": 2522.55 - } - }, - { - "deliveryStart": "2024-11-05T16:00:00Z", - "deliveryEnd": "2024-11-05T17:00:00Z", - "entryPerArea": { - "SE3": 2512.65, - "SE4": 3533.03 - } - }, - { - "deliveryStart": "2024-11-05T17:00:00Z", - "deliveryEnd": "2024-11-05T18:00:00Z", - "entryPerArea": { - "SE3": 1819.83, - "SE4": 2524.06 - } - }, - { - "deliveryStart": "2024-11-05T18:00:00Z", - "deliveryEnd": "2024-11-05T19:00:00Z", - "entryPerArea": { - "SE3": 1011.77, - "SE4": 1804.46 - } - }, - { - "deliveryStart": "2024-11-05T19:00:00Z", - "deliveryEnd": "2024-11-05T20:00:00Z", - "entryPerArea": { - "SE3": 835.53, - "SE4": 1112.57 - } - }, - { - "deliveryStart": "2024-11-05T20:00:00Z", - "deliveryEnd": "2024-11-05T21:00:00Z", - "entryPerArea": { - "SE3": 796.19, - "SE4": 1051.69 - } - }, - { - "deliveryStart": "2024-11-05T21:00:00Z", - "deliveryEnd": "2024-11-05T22:00:00Z", - "entryPerArea": { - "SE3": 522.3, - "SE4": 662.44 - } - }, - { - "deliveryStart": "2024-11-05T22:00:00Z", - "deliveryEnd": "2024-11-05T23:00:00Z", - "entryPerArea": { - "SE3": 289.14, - "SE4": 349.21 - } - } - ], - "blockPriceAggregates": [ - { - "blockName": "Off-peak 1", - "deliveryStart": "2024-11-04T23:00:00Z", - "deliveryEnd": "2024-11-05T07:00:00Z", - "averagePricePerArea": { - "SE3": { - "average": 422.87, - "min": 61.69, - "max": 1406.14 - }, - "SE4": { - "average": 497.97, - "min": 65.19, - "max": 1648.25 - } - } - }, - { - "blockName": "Peak", - "deliveryStart": "2024-11-05T07:00:00Z", - "deliveryEnd": "2024-11-05T19:00:00Z", - "averagePricePerArea": { - "SE3": { - "average": 1315.97, - "min": 925.05, - "max": 2512.65 - }, - "SE4": { - "average": 1735.59, - "min": 1081.72, - "max": 3533.03 - } - } - }, - { - "blockName": "Off-peak 2", - "deliveryStart": "2024-11-05T19:00:00Z", - "deliveryEnd": "2024-11-05T23:00:00Z", - "averagePricePerArea": { - "SE3": { - "average": 610.79, - "min": 289.14, - "max": 835.53 - }, - "SE4": { - "average": 793.98, - "min": 349.21, - "max": 1112.57 - } - } - } - ], - "currency": "SEK", - "exchangeRate": 11.6402, - "areaStates": [ - { - "state": "Final", - "areas": ["SE3", "SE4"] - } - ], - "areaAverages": [ - { - "areaCode": "SE3", - "price": 900.74 - }, - { - "areaCode": "SE4", - "price": 1166.12 - } - ] -} diff --git a/tests/components/nordpool/snapshots/test_diagnostics.ambr b/tests/components/nordpool/snapshots/test_diagnostics.ambr deleted file mode 100644 index dde2eca0022..00000000000 --- a/tests/components/nordpool/snapshots/test_diagnostics.ambr +++ /dev/null @@ -1,283 +0,0 @@ -# serializer version: 1 -# name: test_diagnostics - dict({ - 'raw': dict({ - 'areaAverages': list([ - dict({ - 'areaCode': 'SE3', - 'price': 900.74, - }), - dict({ - 'areaCode': 'SE4', - 'price': 1166.12, - }), - ]), - 'areaStates': list([ - dict({ - 'areas': list([ - 'SE3', - 'SE4', - ]), - 'state': 'Final', - }), - ]), - 'blockPriceAggregates': list([ - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 422.87, - 'max': 1406.14, - 'min': 61.69, - }), - 'SE4': dict({ - 'average': 497.97, - 'max': 1648.25, - 'min': 65.19, - }), - }), - 'blockName': 'Off-peak 1', - 'deliveryEnd': '2024-11-05T07:00:00Z', - 'deliveryStart': '2024-11-04T23:00:00Z', - }), - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 1315.97, - 'max': 2512.65, - 'min': 925.05, - }), - 'SE4': dict({ - 'average': 1735.59, - 'max': 3533.03, - 'min': 1081.72, - }), - }), - 'blockName': 'Peak', - 'deliveryEnd': '2024-11-05T19:00:00Z', - 'deliveryStart': '2024-11-05T07:00:00Z', - }), - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 610.79, - 'max': 835.53, - 'min': 289.14, - }), - 'SE4': dict({ - 'average': 793.98, - 'max': 1112.57, - 'min': 349.21, - }), - }), - 'blockName': 'Off-peak 2', - 'deliveryEnd': '2024-11-05T23:00:00Z', - 'deliveryStart': '2024-11-05T19:00:00Z', - }), - ]), - 'currency': 'SEK', - 'deliveryAreas': list([ - 'SE3', - 'SE4', - ]), - 'deliveryDateCET': '2024-11-05', - 'exchangeRate': 11.6402, - 'market': 'DayAhead', - 'multiAreaEntries': list([ - dict({ - 'deliveryEnd': '2024-11-05T00:00:00Z', - 'deliveryStart': '2024-11-04T23:00:00Z', - 'entryPerArea': dict({ - 'SE3': 250.73, - 'SE4': 283.79, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T01:00:00Z', - 'deliveryStart': '2024-11-05T00:00:00Z', - 'entryPerArea': dict({ - 'SE3': 76.36, - 'SE4': 81.36, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T02:00:00Z', - 'deliveryStart': '2024-11-05T01:00:00Z', - 'entryPerArea': dict({ - 'SE3': 73.92, - 'SE4': 79.15, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T03:00:00Z', - 'deliveryStart': '2024-11-05T02:00:00Z', - 'entryPerArea': dict({ - 'SE3': 61.69, - 'SE4': 65.19, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T04:00:00Z', - 'deliveryStart': '2024-11-05T03:00:00Z', - 'entryPerArea': dict({ - 'SE3': 64.6, - 'SE4': 68.44, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T05:00:00Z', - 'deliveryStart': '2024-11-05T04:00:00Z', - 'entryPerArea': dict({ - 'SE3': 453.27, - 'SE4': 516.71, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T06:00:00Z', - 'deliveryStart': '2024-11-05T05:00:00Z', - 'entryPerArea': dict({ - 'SE3': 996.28, - 'SE4': 1240.85, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T07:00:00Z', - 'deliveryStart': '2024-11-05T06:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1406.14, - 'SE4': 1648.25, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T08:00:00Z', - 'deliveryStart': '2024-11-05T07:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1346.54, - 'SE4': 1570.5, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T09:00:00Z', - 'deliveryStart': '2024-11-05T08:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1150.28, - 'SE4': 1345.37, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T10:00:00Z', - 'deliveryStart': '2024-11-05T09:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1031.32, - 'SE4': 1206.51, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T11:00:00Z', - 'deliveryStart': '2024-11-05T10:00:00Z', - 'entryPerArea': dict({ - 'SE3': 927.37, - 'SE4': 1085.8, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T12:00:00Z', - 'deliveryStart': '2024-11-05T11:00:00Z', - 'entryPerArea': dict({ - 'SE3': 925.05, - 'SE4': 1081.72, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T13:00:00Z', - 'deliveryStart': '2024-11-05T12:00:00Z', - 'entryPerArea': dict({ - 'SE3': 949.49, - 'SE4': 1130.38, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T14:00:00Z', - 'deliveryStart': '2024-11-05T13:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1042.03, - 'SE4': 1256.91, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T15:00:00Z', - 'deliveryStart': '2024-11-05T14:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1258.89, - 'SE4': 1765.82, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T16:00:00Z', - 'deliveryStart': '2024-11-05T15:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1816.45, - 'SE4': 2522.55, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T17:00:00Z', - 'deliveryStart': '2024-11-05T16:00:00Z', - 'entryPerArea': dict({ - 'SE3': 2512.65, - 'SE4': 3533.03, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T18:00:00Z', - 'deliveryStart': '2024-11-05T17:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1819.83, - 'SE4': 2524.06, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T19:00:00Z', - 'deliveryStart': '2024-11-05T18:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1011.77, - 'SE4': 1804.46, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T20:00:00Z', - 'deliveryStart': '2024-11-05T19:00:00Z', - 'entryPerArea': dict({ - 'SE3': 835.53, - 'SE4': 1112.57, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T21:00:00Z', - 'deliveryStart': '2024-11-05T20:00:00Z', - 'entryPerArea': dict({ - 'SE3': 796.19, - 'SE4': 1051.69, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T22:00:00Z', - 'deliveryStart': '2024-11-05T21:00:00Z', - 'entryPerArea': dict({ - 'SE3': 522.3, - 'SE4': 662.44, - }), - }), - dict({ - 'deliveryEnd': '2024-11-05T23:00:00Z', - 'deliveryStart': '2024-11-05T22:00:00Z', - 'entryPerArea': dict({ - 'SE3': 289.14, - 'SE4': 349.21, - }), - }), - ]), - 'updatedAt': '2024-11-04T12:15:03.9456464Z', - 'version': 3, - }), - }) -# --- diff --git a/tests/components/nordpool/snapshots/test_sensor.ambr b/tests/components/nordpool/snapshots/test_sensor.ambr deleted file mode 100644 index 01600352861..00000000000 --- a/tests/components/nordpool/snapshots/test_sensor.ambr +++ /dev/null @@ -1,2215 +0,0 @@ -# serializer version: 1 -# name: test_sensor[sensor.nord_pool_se3_currency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se3_currency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Currency', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'currency', - 'unique_id': 'SE3-currency', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_currency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Currency', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_currency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'SEK', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_current_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_current_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Current price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_price', - 'unique_id': 'SE3-current_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_current_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Current price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_current_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.01177', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_daily_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_daily_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Daily average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_average', - 'unique_id': 'SE3-daily_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_daily_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Daily average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_daily_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.90074', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_exchange_rate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se3_exchange_rate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Exchange rate', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'exchange_rate', - 'unique_id': 'SE3-exchange_rate', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_exchange_rate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Exchange rate', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_exchange_rate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.6402', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_last_updated-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se3_last_updated', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last updated', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'updated_at', - 'unique_id': 'SE3-updated_at', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_last_updated-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Last updated', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_last_updated', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-04T12:15:03+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_next_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_next_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Next price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'next_price', - 'unique_id': 'SE3-next_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_next_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Next price', - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_next_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.83553', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'off_peak_1-SE3-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 1 average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.42287', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'off_peak_1-SE3-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 1 highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.40614', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'off_peak_1-SE3-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 1 lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.06169', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 1 time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'off_peak_1-SE3-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Off-peak 1 time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-04T23:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 1 time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'off_peak_1-SE3-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Off-peak 1 time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T07:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'off_peak_2-SE3-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 2 average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.61079', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'off_peak_2-SE3-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 2 highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.83553', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'off_peak_2-SE3-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Off-peak 2 lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.28914', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 2 time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'off_peak_2-SE3-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Off-peak 2 time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T19:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 2 time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'off_peak_2-SE3-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Off-peak 2 time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T23:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_peak_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'peak-SE3-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Peak average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_peak_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.31597', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_peak_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'peak-SE3-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Peak highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_peak_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.51265', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_peak_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'peak-SE3-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Peak lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_peak_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.92505', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_peak_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Peak time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'peak-SE3-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Peak time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_peak_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T07:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_peak_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Peak time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'peak-SE3-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_peak_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE3 Peak time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_peak_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T19:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_previous_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se3_previous_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Previous price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_price', - 'unique_id': 'SE3-last_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se3_previous_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE3 Previous price', - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se3_previous_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.81983', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_currency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se4_currency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Currency', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'currency', - 'unique_id': 'SE4-currency', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_currency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Currency', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_currency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'SEK', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_current_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_current_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Current price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_price', - 'unique_id': 'SE4-current_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_current_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Current price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_current_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.80446', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_daily_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_daily_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Daily average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_average', - 'unique_id': 'SE4-daily_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_daily_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Daily average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_daily_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.16612', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_exchange_rate-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se4_exchange_rate', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Exchange rate', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'exchange_rate', - 'unique_id': 'SE4-exchange_rate', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_exchange_rate-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Exchange rate', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_exchange_rate', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '11.6402', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_last_updated-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.nord_pool_se4_last_updated', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last updated', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'updated_at', - 'unique_id': 'SE4-updated_at', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_last_updated-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Last updated', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_last_updated', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-04T12:15:03+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_next_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_next_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Next price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'next_price', - 'unique_id': 'SE4-next_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_next_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Next price', - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_next_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.11257', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'off_peak_1-SE4-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 1 average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.49797', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'off_peak_1-SE4-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 1 highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.64825', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 1 lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'off_peak_1-SE4-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 1 lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.06519', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 1 time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'off_peak_1-SE4-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Off-peak 1 time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-04T23:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 1 time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'off_peak_1-SE4-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Off-peak 1 time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T07:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'off_peak_2-SE4-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 2 average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.79398', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'off_peak_2-SE4-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 2 highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.11257', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Off-peak 2 lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'off_peak_2-SE4-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Off-peak 2 lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.34921', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 2 time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'off_peak_2-SE4-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Off-peak 2 time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T19:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Off-peak 2 time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'off_peak_2-SE4-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Off-peak 2 time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T23:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_average-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_peak_average', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak average', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_average', - 'unique_id': 'peak-SE4-block_average', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_average-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Peak average', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_peak_average', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.73559', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_highest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_peak_highest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak highest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_max', - 'unique_id': 'peak-SE4-block_max', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_highest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Peak highest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_peak_highest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3.53303', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_lowest_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_peak_lowest_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Peak lowest price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_min', - 'unique_id': 'peak-SE4-block_min', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_lowest_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Peak lowest price', - 'state_class': , - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_peak_lowest_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.08172', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_time_from-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_peak_time_from', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Peak time from', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_start_time', - 'unique_id': 'peak-SE4-block_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_time_from-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Peak time from', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_peak_time_from', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T07:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_time_until-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_peak_time_until', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Peak time until', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_end_time', - 'unique_id': 'peak-SE4-block_end_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_peak_time_until-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'Nord Pool SE4 Peak time until', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_peak_time_until', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-11-05T19:00:00+00:00', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_previous_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.nord_pool_se4_previous_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Previous price', - 'platform': 'nordpool', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_price', - 'unique_id': 'SE4-last_price', - 'unit_of_measurement': 'SEK/kWh', - }) -# --- -# name: test_sensor[sensor.nord_pool_se4_previous_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Nord Pool SE4 Previous price', - 'unit_of_measurement': 'SEK/kWh', - }), - 'context': , - 'entity_id': 'sensor.nord_pool_se4_previous_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.52406', - }) -# --- diff --git a/tests/components/nordpool/test_config_flow.py b/tests/components/nordpool/test_config_flow.py deleted file mode 100644 index cfdfc63aca7..00000000000 --- a/tests/components/nordpool/test_config_flow.py +++ /dev/null @@ -1,206 +0,0 @@ -"""Test the Nord Pool config flow.""" - -from __future__ import annotations - -from unittest.mock import patch - -from pynordpool import ( - DeliveryPeriodData, - NordPoolConnectionError, - NordPoolEmptyResponseError, - NordPoolError, - NordPoolResponseError, -) -import pytest - -from homeassistant import config_entries -from homeassistant.components.nordpool.const import CONF_AREAS, DOMAIN -from homeassistant.const import CONF_CURRENCY -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from . import ENTRY_CONFIG - -from tests.common import MockConfigEntry - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: - """Test we get the form.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - ENTRY_CONFIG, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["version"] == 1 - assert result["title"] == "Nord Pool" - assert result["data"] == {"areas": ["SE3", "SE4"], "currency": "SEK"} - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -async def test_single_config_entry( - hass: HomeAssistant, load_int: None, get_data: DeliveryPeriodData -) -> None: - """Test abort for single config entry.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -@pytest.mark.parametrize( - ("error_message", "p_error"), - [ - (NordPoolConnectionError, "cannot_connect"), - (NordPoolEmptyResponseError, "no_data"), - (NordPoolError, "cannot_connect"), - (NordPoolResponseError, "cannot_connect"), - ], -) -async def test_cannot_connect( - hass: HomeAssistant, - get_data: DeliveryPeriodData, - error_message: Exception, - p_error: str, -) -> None: - """Test cannot connect error.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == config_entries.SOURCE_USER - - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - side_effect=error_message, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=ENTRY_CONFIG, - ) - - assert result["errors"] == {"base": p_error} - - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=ENTRY_CONFIG, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Nord Pool" - assert result["data"] == {"areas": ["SE3", "SE4"], "currency": "SEK"} - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -async def test_reconfigure( - hass: HomeAssistant, - load_int: MockConfigEntry, - get_data: DeliveryPeriodData, -) -> None: - """Test reconfiguration.""" - - result = await load_int.start_reconfigure_flow(hass) - - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_AREAS: ["SE3"], - CONF_CURRENCY: "EUR", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert load_int.data == { - "areas": [ - "SE3", - ], - "currency": "EUR", - } - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -@pytest.mark.parametrize( - ("error_message", "p_error"), - [ - (NordPoolConnectionError, "cannot_connect"), - (NordPoolEmptyResponseError, "no_data"), - (NordPoolError, "cannot_connect"), - (NordPoolResponseError, "cannot_connect"), - ], -) -async def test_reconfigure_cannot_connect( - hass: HomeAssistant, - load_int: MockConfigEntry, - get_data: DeliveryPeriodData, - error_message: Exception, - p_error: str, -) -> None: - """Test cannot connect error in a reeconfigure flow.""" - - result = await load_int.start_reconfigure_flow(hass) - - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - side_effect=error_message, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_AREAS: ["SE3"], - CONF_CURRENCY: "EUR", - }, - ) - - assert result["errors"] == {"base": p_error} - - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_AREAS: ["SE3"], - CONF_CURRENCY: "EUR", - }, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - assert load_int.data == { - "areas": [ - "SE3", - ], - "currency": "EUR", - } diff --git a/tests/components/nordpool/test_coordinator.py b/tests/components/nordpool/test_coordinator.py deleted file mode 100644 index d2d912b1b99..00000000000 --- a/tests/components/nordpool/test_coordinator.py +++ /dev/null @@ -1,106 +0,0 @@ -"""The test for the Nord Pool coordinator.""" - -from __future__ import annotations - -from datetime import timedelta -from unittest.mock import patch - -from freezegun.api import FrozenDateTimeFactory -from pynordpool import ( - DeliveryPeriodData, - NordPoolAuthenticationError, - NordPoolEmptyResponseError, - NordPoolError, - NordPoolResponseError, -) -import pytest - -from homeassistant.components.nordpool.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant - -from . import ENTRY_CONFIG - -from tests.common import MockConfigEntry, async_fire_time_changed - - -@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") -async def test_coordinator( - hass: HomeAssistant, - get_data: DeliveryPeriodData, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the Nord Pool coordinator with errors.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - data=ENTRY_CONFIG, - ) - - config_entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - ) as mock_data, - ): - mock_data.return_value = get_data - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == "0.92737" - mock_data.reset_mock() - - mock_data.side_effect = NordPoolError("error") - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == STATE_UNAVAILABLE - mock_data.reset_mock() - - assert "Authentication error" not in caplog.text - mock_data.side_effect = NordPoolAuthenticationError("Authentication error") - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == STATE_UNAVAILABLE - assert "Authentication error" in caplog.text - mock_data.reset_mock() - - assert "Empty response" not in caplog.text - mock_data.side_effect = NordPoolEmptyResponseError("Empty response") - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == STATE_UNAVAILABLE - assert "Empty response" in caplog.text - mock_data.reset_mock() - - assert "Response error" not in caplog.text - mock_data.side_effect = NordPoolResponseError("Response error") - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == STATE_UNAVAILABLE - assert "Response error" in caplog.text - mock_data.reset_mock() - - mock_data.return_value = get_data - mock_data.side_effect = None - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == "1.81645" diff --git a/tests/components/nordpool/test_diagnostics.py b/tests/components/nordpool/test_diagnostics.py deleted file mode 100644 index 4639186ecf1..00000000000 --- a/tests/components/nordpool/test_diagnostics.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Test Nord Pool diagnostics.""" - -from __future__ import annotations - -from syrupy.assertion import SnapshotAssertion - -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant - -from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.typing import ClientSessionGenerator - - -async def test_diagnostics( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - load_int: ConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test generating diagnostics for a config entry.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, load_int) == snapshot - ) diff --git a/tests/components/nordpool/test_init.py b/tests/components/nordpool/test_init.py deleted file mode 100644 index 5ec1c4b3a0b..00000000000 --- a/tests/components/nordpool/test_init.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Test for Nord Pool component Init.""" - -from __future__ import annotations - -from unittest.mock import patch - -from pynordpool import DeliveryPeriodData - -from homeassistant.components.nordpool.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER, ConfigEntryState -from homeassistant.core import HomeAssistant - -from . import ENTRY_CONFIG - -from tests.common import MockConfigEntry - - -async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: - """Test load and unload an entry.""" - entry = MockConfigEntry( - domain=DOMAIN, - source=SOURCE_USER, - data=ENTRY_CONFIG, - ) - entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - - assert entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/nordpool/test_sensor.py b/tests/components/nordpool/test_sensor.py deleted file mode 100644 index c7a305c8a40..00000000000 --- a/tests/components/nordpool/test_sensor.py +++ /dev/null @@ -1,25 +0,0 @@ -"""The test for the Nord Pool sensor platform.""" - -from __future__ import annotations - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from tests.common import snapshot_platform - - -@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_sensor( - hass: HomeAssistant, - load_int: ConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Nord Pool sensor.""" - - await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) diff --git a/tests/components/ollama/test_config_flow.py b/tests/components/ollama/test_config_flow.py index 7755f2208b4..82c954a1737 100644 --- a/tests/components/ollama/test_config_flow.py +++ b/tests/components/ollama/test_config_flow.py @@ -204,6 +204,10 @@ async def test_form_errors(hass: HomeAssistant, side_effect, error) -> None: assert result2["errors"] == {"base": error} +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.ollama.config.abort.download_failed"], +) async def test_download_error(hass: HomeAssistant) -> None: """Test we handle errors while downloading a model.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index 35f6b7d739c..6df3951249b 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -72,11 +72,23 @@ async def mock_supervisor_fixture( aioclient_mock: AiohttpClientMocker, store_info: AsyncMock, supervisor_is_connected: AsyncMock, - resolution_info: AsyncMock, ) -> AsyncGenerator[None]: """Mock supervisor.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) + aioclient_mock.get( + "http://127.0.0.1/resolution/info", + json={ + "result": "ok", + "data": { + "unsupported": [], + "unhealthy": [], + "suggestions": [], + "issues": [], + "checks": [], + }, + }, + ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/p1_monitor/test_config_flow.py b/tests/components/p1_monitor/test_config_flow.py index cbd89320074..ea1d12055a0 100644 --- a/tests/components/p1_monitor/test_config_flow.py +++ b/tests/components/p1_monitor/test_config_flow.py @@ -36,7 +36,6 @@ async def test_full_user_flow(hass: HomeAssistant) -> None: assert result2.get("type") is FlowResultType.CREATE_ENTRY assert result2.get("title") == "P1 Monitor" assert result2.get("data") == {CONF_HOST: "example.com", CONF_PORT: 80} - assert isinstance(result2["data"][CONF_PORT], int) assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_p1monitor.mock_calls) == 1 diff --git a/tests/components/palazzetti/test_config_flow.py b/tests/components/palazzetti/test_config_flow.py index 03c56c33d0c..960ad7a1184 100644 --- a/tests/components/palazzetti/test_config_flow.py +++ b/tests/components/palazzetti/test_config_flow.py @@ -4,9 +4,8 @@ from unittest.mock import AsyncMock from pypalazzetti.exceptions import CommunicationError -from homeassistant.components import dhcp from homeassistant.components.palazzetti.const import DOMAIN -from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -93,48 +92,3 @@ async def test_duplicate( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -async def test_dhcp_flow( - hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock -) -> None: - """Test the DHCP flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - data=dhcp.DhcpServiceInfo( - hostname="connbox1234", ip="192.168.1.1", macaddress="11:22:33:44:55:66" - ), - context={"source": SOURCE_DHCP}, - ) - - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "discovery_confirm" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {}, - ) - - await hass.async_block_till_done() - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Stove" - assert result["result"].unique_id == "11:22:33:44:55:66" - - -async def test_dhcp_flow_error( - hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock -) -> None: - """Test the DHCP flow.""" - mock_palazzetti_client.connect.side_effect = CommunicationError() - - result = await hass.config_entries.flow.async_init( - DOMAIN, - data=dhcp.DhcpServiceInfo( - hostname="connbox1234", ip="192.168.1.1", macaddress="11:22:33:44:55:66" - ), - context={"source": SOURCE_DHCP}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "cannot_connect" diff --git a/tests/components/powerwall/test_config_flow.py b/tests/components/powerwall/test_config_flow.py index 1ff1470f81c..5074a289d19 100644 --- a/tests/components/powerwall/test_config_flow.py +++ b/tests/components/powerwall/test_config_flow.py @@ -339,11 +339,6 @@ async def test_form_reauth(hass: HomeAssistant) -> None: result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - flow = hass.config_entries.flow.async_get(result["flow_id"]) - assert flow["context"]["title_placeholders"] == { - "ip_address": VALID_CONFIG[CONF_IP_ADDRESS], - "name": entry.title, - } mock_powerwall = await _mock_powerwall_site_name(hass, "My site") diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 37940df437b..3f0e0b92056 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -5,7 +5,6 @@ from functools import lru_cache import logging import os from pathlib import Path -import sys from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory @@ -71,9 +70,6 @@ async def test_basic_usage(hass: HomeAssistant, tmp_path: Path) -> None: await hass.async_block_till_done() -@pytest.mark.skipif( - sys.version_info >= (3, 13), reason="not yet available on Python 3.13" -) async def test_memory_usage(hass: HomeAssistant, tmp_path: Path) -> None: """Test we can setup and the service is registered.""" test_dir = tmp_path / "profiles" @@ -105,24 +101,6 @@ async def test_memory_usage(hass: HomeAssistant, tmp_path: Path) -> None: await hass.async_block_till_done() -@pytest.mark.skipif(sys.version_info < (3, 13), reason="still works on python 3.12") -async def test_memory_usage_py313(hass: HomeAssistant, tmp_path: Path) -> None: - """Test raise an error on python3.13.""" - entry = MockConfigEntry(domain=DOMAIN) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert hass.services.has_service(DOMAIN, SERVICE_MEMORY) - with pytest.raises( - HomeAssistantError, - match="Memory profiling is not supported on Python 3.13. Please use Python 3.12.", - ): - await hass.services.async_call( - DOMAIN, SERVICE_MEMORY, {CONF_SECONDS: 0.000001}, blocking=True - ) - - async def test_object_growth_logging( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, diff --git a/tests/components/roborock/snapshots/test_diagnostics.ambr b/tests/components/roborock/snapshots/test_diagnostics.ambr index 26ecb729312..805a498041a 100644 --- a/tests/components/roborock/snapshots/test_diagnostics.ambr +++ b/tests/components/roborock/snapshots/test_diagnostics.ambr @@ -102,7 +102,6 @@ 'id': '120', 'mode': 'ro', 'name': '错误代码', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -110,7 +109,6 @@ 'id': '121', 'mode': 'ro', 'name': '设备状态', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -118,7 +116,6 @@ 'id': '122', 'mode': 'ro', 'name': '设备电量', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -126,7 +123,6 @@ 'id': '123', 'mode': 'rw', 'name': '清扫模式', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -134,7 +130,6 @@ 'id': '124', 'mode': 'rw', 'name': '拖地模式', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -142,7 +137,6 @@ 'id': '125', 'mode': 'rw', 'name': '主刷寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -150,7 +144,6 @@ 'id': '126', 'mode': 'rw', 'name': '边刷寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -158,7 +151,6 @@ 'id': '127', 'mode': 'rw', 'name': '滤网寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -389,7 +381,6 @@ 'id': '120', 'mode': 'ro', 'name': '错误代码', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -397,7 +388,6 @@ 'id': '121', 'mode': 'ro', 'name': '设备状态', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -405,7 +395,6 @@ 'id': '122', 'mode': 'ro', 'name': '设备电量', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -413,7 +402,6 @@ 'id': '123', 'mode': 'rw', 'name': '清扫模式', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -421,7 +409,6 @@ 'id': '124', 'mode': 'rw', 'name': '拖地模式', - 'property': '{"range": []}', 'type': 'ENUM', }), dict({ @@ -429,7 +416,6 @@ 'id': '125', 'mode': 'rw', 'name': '主刷寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -437,7 +423,6 @@ 'id': '126', 'mode': 'rw', 'name': '边刷寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ @@ -445,7 +430,6 @@ 'id': '127', 'mode': 'rw', 'name': '滤网寿命', - 'property': '{"max": 100, "min": 0, "step": 1, "unit": null, "scale": 1}', 'type': 'VALUE', }), dict({ diff --git a/tests/components/russound_rio/__init__.py b/tests/components/russound_rio/__init__.py index d0e6d77f1ee..96171071907 100644 --- a/tests/components/russound_rio/__init__.py +++ b/tests/components/russound_rio/__init__.py @@ -1,13 +1 @@ """Tests for the Russound RIO integration.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index 09cccd7d83f..91d009f13f4 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -1,19 +1,16 @@ """Test fixtures for Russound RIO integration.""" from collections.abc import Generator -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, patch -from aiorussound import Controller, RussoundTcpConnectionHandler, Source -from aiorussound.rio import ZoneControlSurface -from aiorussound.util import controller_device_str, zone_device_str import pytest from homeassistant.components.russound_rio.const import DOMAIN from homeassistant.core import HomeAssistant -from .const import HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT +from .const import HARDWARE_MAC, MOCK_CONFIG, MOCK_CONTROLLERS, MODEL -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import MockConfigEntry @pytest.fixture @@ -28,13 +25,15 @@ def mock_setup_entry(): @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a Russound RIO config entry.""" - return MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, data=MOCK_CONFIG, unique_id=HARDWARE_MAC, title=MODEL ) + entry.add_to_hass(hass) + return entry @pytest.fixture -def mock_russound_client() -> Generator[AsyncMock]: +def mock_russound() -> Generator[AsyncMock]: """Mock the Russound RIO client.""" with ( patch( @@ -42,32 +41,8 @@ def mock_russound_client() -> Generator[AsyncMock]: ) as mock_client, patch( "homeassistant.components.russound_rio.config_flow.RussoundClient", - new=mock_client, + return_value=mock_client, ), ): - client = mock_client.return_value - zones = { - int(k): ZoneControlSurface.from_dict(v) - for k, v in load_json_object_fixture("get_zones.json", DOMAIN).items() - } - client.sources = { - int(k): Source.from_dict(v) - for k, v in load_json_object_fixture("get_sources.json", DOMAIN).items() - } - for k, v in zones.items(): - v.device_str = zone_device_str(1, k) - v.fetch_current_source = Mock( - side_effect=lambda current_source=v.current_source: client.sources.get( - int(current_source) - ) - ) - - client.controllers = { - 1: Controller( - 1, "MCA-C5", client, controller_device_str(1), HARDWARE_MAC, None, zones - ) - } - client.connection_handler = RussoundTcpConnectionHandler(HOST, PORT) - client.is_connected = Mock(return_value=True) - client.unregister_state_update_callbacks.return_value = True - yield client + mock_client.controllers = MOCK_CONTROLLERS + yield mock_client diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index 3d2924693d2..527f4fe3377 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -2,8 +2,6 @@ from collections import namedtuple -from homeassistant.components.media_player import DOMAIN as MP_DOMAIN - HOST = "127.0.0.1" PORT = 9621 MODEL = "MCA-C5" @@ -16,7 +14,3 @@ MOCK_CONFIG = { _CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} - -DEVICE_NAME = "mca_c5" -NAME_ZONE_1 = "backyard" -ENTITY_ID_ZONE_1 = f"{MP_DOMAIN}.{DEVICE_NAME}_{NAME_ZONE_1}" diff --git a/tests/components/russound_rio/fixtures/get_sources.json b/tests/components/russound_rio/fixtures/get_sources.json deleted file mode 100644 index e39d702b8a1..00000000000 --- a/tests/components/russound_rio/fixtures/get_sources.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "1": { - "name": "Aux", - "type": "Miscellaneous Audio" - }, - "2": { - "name": "Spotify", - "type": "Russound Media Streamer" - } -} diff --git a/tests/components/russound_rio/fixtures/get_zones.json b/tests/components/russound_rio/fixtures/get_zones.json deleted file mode 100644 index 396310339b3..00000000000 --- a/tests/components/russound_rio/fixtures/get_zones.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "1": { - "name": "Backyard", - "volume": "10", - "status": "ON", - "enabled": "True", - "current_source": "1" - }, - "2": { - "name": "Kitchen", - "volume": "50", - "status": "OFF", - "enabled": "True", - "current_source": "2" - }, - "3": { - "name": "Bedroom", - "volume": "10", - "status": "OFF", - "enabled": "False" - } -} diff --git a/tests/components/russound_rio/snapshots/test_init.ambr b/tests/components/russound_rio/snapshots/test_init.ambr deleted file mode 100644 index fcd59dd06f7..00000000000 --- a/tests/components/russound_rio/snapshots/test_init.ambr +++ /dev/null @@ -1,37 +0,0 @@ -# serializer version: 1 -# name: test_device_info - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://127.0.0.1', - 'connections': set({ - tuple( - 'mac', - '00:11:22:33:44:55', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'russound_rio', - '00:11:22:33:44:55', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Russound', - 'model': 'MCA-C5', - 'model_id': None, - 'name': 'MCA-C5', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py index cf754852731..9461fe1d5be 100644 --- a/tests/components/russound_rio/test_config_flow.py +++ b/tests/components/russound_rio/test_config_flow.py @@ -11,7 +11,7 @@ from .const import MOCK_CONFIG, MODEL async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -32,13 +32,13 @@ async def test_form( async def test_form_cannot_connect( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock ) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - mock_russound_client.connect.side_effect = TimeoutError + mock_russound.connect.side_effect = TimeoutError result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_CONFIG, @@ -48,7 +48,7 @@ async def test_form_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} # Recover with correct information - mock_russound_client.connect.side_effect = None + mock_russound.connect.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_CONFIG, @@ -61,7 +61,7 @@ async def test_form_cannot_connect( async def test_import( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock ) -> None: """Test we import a config entry.""" result = await hass.config_entries.flow.async_init( @@ -77,10 +77,10 @@ async def test_import( async def test_import_cannot_connect( - hass: HomeAssistant, mock_russound_client: AsyncMock + hass: HomeAssistant, mock_russound: AsyncMock ) -> None: """Test we handle import cannot connect error.""" - mock_russound_client.connect.side_effect = TimeoutError + mock_russound.connect.side_effect = TimeoutError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG diff --git a/tests/components/russound_rio/test_init.py b/tests/components/russound_rio/test_init.py deleted file mode 100644 index 6787ee37c79..00000000000 --- a/tests/components/russound_rio/test_init.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Tests for the Russound RIO integration.""" - -from unittest.mock import AsyncMock - -from syrupy import SnapshotAssertion - -from homeassistant.components.russound_rio.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_integration - -from tests.common import MockConfigEntry - - -async def test_config_entry_not_ready( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_russound_client: AsyncMock, -) -> None: - """Test the Cambridge Audio configuration entry not ready.""" - mock_russound_client.connect.side_effect = TimeoutError - await setup_integration(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - mock_russound_client.connect = AsyncMock(return_value=True) - - -async def test_device_info( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_russound_client: AsyncMock, - mock_config_entry: MockConfigEntry, - device_registry: dr.DeviceRegistry, -) -> None: - """Test device registry integration.""" - await setup_integration(hass, mock_config_entry) - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} - ) - assert device_entry is not None - assert device_entry == snapshot diff --git a/tests/components/russound_rio/test_media_player.py b/tests/components/russound_rio/test_media_player.py deleted file mode 100644 index e720e2c7f65..00000000000 --- a/tests/components/russound_rio/test_media_player.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Tests for the Russound RIO media player.""" - -from unittest.mock import AsyncMock - -from aiorussound.models import CallbackType, PlayStatus -import pytest - -from homeassistant.const import ( - STATE_BUFFERING, - STATE_IDLE, - STATE_OFF, - STATE_ON, - STATE_PAUSED, - STATE_PLAYING, -) -from homeassistant.core import HomeAssistant - -from . import setup_integration -from .const import ENTITY_ID_ZONE_1 - -from tests.common import MockConfigEntry - - -async def mock_state_update(client: AsyncMock) -> None: - """Trigger a callback in the media player.""" - for callback in client.register_state_update_callbacks.call_args_list: - await callback[0][0](client, CallbackType.STATE) - - -@pytest.mark.parametrize( - ("zone_status", "source_play_status", "media_player_state"), - [ - (True, None, STATE_ON), - (True, PlayStatus.PLAYING, STATE_PLAYING), - (True, PlayStatus.PAUSED, STATE_PAUSED), - (True, PlayStatus.TRANSITIONING, STATE_BUFFERING), - (True, PlayStatus.STOPPED, STATE_IDLE), - (False, None, STATE_OFF), - (False, PlayStatus.STOPPED, STATE_OFF), - ], -) -async def test_entity_state( - hass: HomeAssistant, - mock_russound_client: AsyncMock, - mock_config_entry: MockConfigEntry, - zone_status: bool, - source_play_status: PlayStatus | None, - media_player_state: str, -) -> None: - """Test media player state.""" - await setup_integration(hass, mock_config_entry) - mock_russound_client.controllers[1].zones[1].status = zone_status - mock_russound_client.sources[1].play_status = source_play_status - await mock_state_update(mock_russound_client) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID_ZONE_1) - assert state.state == media_player_state diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 0e8c2a5e188..37f080d2de2 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -4233,8 +4233,8 @@ async def async_record_states( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -4445,8 +4445,8 @@ async def test_validate_statistics_unit_ignore_device_class( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, diff --git a/tests/components/seventeentrack/snapshots/test_services.ambr b/tests/components/seventeentrack/snapshots/test_services.ambr index e172a2de594..568acea33a5 100644 --- a/tests/components/seventeentrack/snapshots/test_services.ambr +++ b/tests/components/seventeentrack/snapshots/test_services.ambr @@ -71,32 +71,3 @@ ]), }) # --- -# name: test_packages_with_none_timestamp - dict({ - 'packages': list([ - dict({ - 'destination_country': 'Belgium', - 'friendly_name': 'friendly name 1', - 'info_text': 'info text 1', - 'location': 'location 1', - 'origin_country': 'Belgium', - 'package_type': 'Registered Parcel', - 'status': 'In Transit', - 'tracking_info_language': 'Unknown', - 'tracking_number': '456', - }), - dict({ - 'destination_country': 'Belgium', - 'friendly_name': 'friendly name 2', - 'info_text': 'info text 1', - 'location': 'location 1', - 'origin_country': 'Belgium', - 'package_type': 'Registered Parcel', - 'status': 'Delivered', - 'timestamp': '2020-08-10T10:32:00+00:00', - 'tracking_info_language': 'Unknown', - 'tracking_number': '789', - }), - ]), - }) -# --- diff --git a/tests/components/seventeentrack/test_services.py b/tests/components/seventeentrack/test_services.py index bbd5644ad63..54c9349c121 100644 --- a/tests/components/seventeentrack/test_services.py +++ b/tests/components/seventeentrack/test_services.py @@ -150,28 +150,6 @@ async def test_archive_package( ) -async def test_packages_with_none_timestamp( - hass: HomeAssistant, - mock_seventeentrack: AsyncMock, - mock_config_entry: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Ensure service returns all packages when non provided.""" - await _mock_invalid_packages(mock_seventeentrack) - await init_integration(hass, mock_config_entry) - service_response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_PACKAGES, - { - CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, - }, - blocking=True, - return_response=True, - ) - - assert service_response == snapshot - - async def _mock_packages(mock_seventeentrack): package1 = get_package(status=10) package2 = get_package( @@ -189,19 +167,3 @@ async def _mock_packages(mock_seventeentrack): package2, package3, ] - - -async def _mock_invalid_packages(mock_seventeentrack): - package1 = get_package( - status=10, - timestamp=None, - ) - package2 = get_package( - tracking_number="789", - friendly_name="friendly name 2", - status=40, - ) - mock_seventeentrack.return_value.profile.packages.return_value = [ - package1, - package2, - ] diff --git a/tests/components/sky_remote/__init__.py b/tests/components/sky_remote/__init__.py deleted file mode 100644 index 83d68330d5b..00000000000 --- a/tests/components/sky_remote/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Tests for the Sky Remote component.""" - -from homeassistant.core import HomeAssistant - -from tests.common import MockConfigEntry - - -async def setup_mock_entry(hass: HomeAssistant, entry: MockConfigEntry): - """Initialize a mock config entry.""" - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - - await hass.async_block_till_done() diff --git a/tests/components/sky_remote/conftest.py b/tests/components/sky_remote/conftest.py deleted file mode 100644 index d6c453d81f7..00000000000 --- a/tests/components/sky_remote/conftest.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Test mocks and fixtures.""" - -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest - -from homeassistant.components.sky_remote.const import DEFAULT_PORT, DOMAIN -from homeassistant.const import CONF_HOST, CONF_PORT - -from tests.common import MockConfigEntry - -SAMPLE_CONFIG = {CONF_HOST: "example.com", CONF_PORT: DEFAULT_PORT} - - -@pytest.fixture -def mock_config_entry() -> MockConfigEntry: - """Mock a config entry.""" - return MockConfigEntry(domain=DOMAIN, data=SAMPLE_CONFIG) - - -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Stub out setup function.""" - with patch( - "homeassistant.components.sky_remote.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - -@pytest.fixture -def mock_remote_control(request: pytest.FixtureRequest) -> Generator[MagicMock]: - """Mock skyboxremote library.""" - with ( - patch( - "homeassistant.components.sky_remote.RemoteControl" - ) as mock_remote_control, - patch( - "homeassistant.components.sky_remote.config_flow.RemoteControl", - mock_remote_control, - ), - ): - mock_remote_control._instance_mock = MagicMock(host="example.com") - mock_remote_control._instance_mock.check_connectable = AsyncMock(True) - mock_remote_control.return_value = mock_remote_control._instance_mock - yield mock_remote_control diff --git a/tests/components/sky_remote/test_config_flow.py b/tests/components/sky_remote/test_config_flow.py deleted file mode 100644 index aaeda20788c..00000000000 --- a/tests/components/sky_remote/test_config_flow.py +++ /dev/null @@ -1,125 +0,0 @@ -"""Test the Sky Remote config flow.""" - -from __future__ import annotations - -from unittest.mock import AsyncMock - -import pytest -from skyboxremote import LEGACY_PORT, SkyBoxConnectionError - -from homeassistant.components.sky_remote.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .conftest import SAMPLE_CONFIG - - -async def test_user_flow( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_remote_control -) -> None: - """Test we can setup an entry.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == SAMPLE_CONFIG - - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_device_exists_abort( - hass: HomeAssistant, mock_config_entry, mock_remote_control -) -> None: - """Test we abort flow if device already configured.""" - mock_config_entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_HOST: mock_config_entry.data[CONF_HOST]}, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize("mock_remote_control", [LEGACY_PORT], indirect=True) -async def test_user_flow_legacy_device( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_remote_control, -) -> None: - """Test we can setup an entry with a legacy port.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - async def mock_check_connectable(): - if mock_remote_control.call_args[0][1] == LEGACY_PORT: - return True - raise SkyBoxConnectionError("Wrong port") - - mock_remote_control._instance_mock.check_connectable = mock_check_connectable - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == {**SAMPLE_CONFIG, CONF_PORT: LEGACY_PORT} - - assert len(mock_setup_entry.mock_calls) == 1 - - -@pytest.mark.parametrize("mock_remote_control", [6], indirect=True) -async def test_user_flow_unconnectable( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_remote_control, -) -> None: - """Test we can setup an entry.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["step_id"] == "user" - assert result["type"] is FlowResultType.FORM - - mock_remote_control._instance_mock.check_connectable = AsyncMock( - side_effect=SkyBoxConnectionError("Example") - ) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - assert len(mock_setup_entry.mock_calls) == 0 - - mock_remote_control._instance_mock.check_connectable = AsyncMock(True) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == SAMPLE_CONFIG - - assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/sky_remote/test_init.py b/tests/components/sky_remote/test_init.py deleted file mode 100644 index fe316baa6bf..00000000000 --- a/tests/components/sky_remote/test_init.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Tests for the Sky Remote component.""" - -from unittest.mock import AsyncMock - -from skyboxremote import SkyBoxConnectionError - -from homeassistant.components.sky_remote.const import DEFAULT_PORT, DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr - -from . import setup_mock_entry - -from tests.common import MockConfigEntry - - -async def test_setup_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_remote_control, - device_registry: dr.DeviceRegistry, -) -> None: - """Test successful setup of entry.""" - await setup_mock_entry(hass, mock_config_entry) - - assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - mock_remote_control.assert_called_once_with("example.com", DEFAULT_PORT) - device_entry = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.entry_id)} - ) - assert device_entry is not None - assert device_entry.name == "example.com" - - -async def test_setup_unconnectable_entry( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_remote_control, -) -> None: - """Test unsuccessful setup of entry.""" - mock_remote_control._instance_mock.check_connectable = AsyncMock( - side_effect=SkyBoxConnectionError() - ) - - await setup_mock_entry(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_unload_entry( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_remote_control -) -> None: - """Test unload an entry.""" - await setup_mock_entry(hass, mock_config_entry) - - assert mock_config_entry.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/sky_remote/test_remote.py b/tests/components/sky_remote/test_remote.py deleted file mode 100644 index 301375bc039..00000000000 --- a/tests/components/sky_remote/test_remote.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Test sky_remote remote.""" - -import pytest - -from homeassistant.components.remote import ( - ATTR_COMMAND, - DOMAIN as REMOTE_DOMAIN, - SERVICE_SEND_COMMAND, -) -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError - -from . import setup_mock_entry - -ENTITY_ID = "remote.example_com" - - -async def test_send_command( - hass: HomeAssistant, mock_config_entry, mock_remote_control -) -> None: - """Test "send_command" method.""" - await setup_mock_entry(hass, mock_config_entry) - await hass.services.async_call( - REMOTE_DOMAIN, - SERVICE_SEND_COMMAND, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_COMMAND: ["sky"]}, - blocking=True, - ) - mock_remote_control._instance_mock.send_keys.assert_called_once_with(["sky"]) - - -async def test_send_invalid_command( - hass: HomeAssistant, mock_config_entry, mock_remote_control -) -> None: - """Test "send_command" method.""" - await setup_mock_entry(hass, mock_config_entry) - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - REMOTE_DOMAIN, - SERVICE_SEND_COMMAND, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_COMMAND: ["apple"]}, - blocking=True, - ) - mock_remote_control._instance_mock.send_keys.assert_not_called() diff --git a/tests/components/smarty/conftest.py b/tests/components/smarty/conftest.py index a9b518d88f4..c61ec4b1022 100644 --- a/tests/components/smarty/conftest.py +++ b/tests/components/smarty/conftest.py @@ -50,7 +50,6 @@ def mock_smarty() -> Generator[AsyncMock]: client.filter_timer = 31 client.get_configuration_version.return_value = 111 client.get_software_version.return_value = 127 - client.reset_filters_timer.return_value = True yield client diff --git a/tests/components/smarty/snapshots/test_button.ambr b/tests/components/smarty/snapshots/test_button.ambr deleted file mode 100644 index 38849bd2b2e..00000000000 --- a/tests/components/smarty/snapshots/test_button.ambr +++ /dev/null @@ -1,47 +0,0 @@ -# serializer version: 1 -# name: test_all_entities[button.mock_title_reset_filters_timer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': None, - 'entity_id': 'button.mock_title_reset_filters_timer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Reset filters timer', - 'platform': 'smarty', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reset_filters_timer', - 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_reset_filters_timer', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[button.mock_title_reset_filters_timer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mock Title Reset filters timer', - }), - 'context': , - 'entity_id': 'button.mock_title_reset_filters_timer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- diff --git a/tests/components/smarty/test_button.py b/tests/components/smarty/test_button.py deleted file mode 100644 index 0a7b67f2be6..00000000000 --- a/tests/components/smarty/test_button.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Tests for the Smarty button platform.""" - -from unittest.mock import AsyncMock, patch - -from syrupy import SnapshotAssertion - -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ATTR_ENTITY_ID, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, snapshot_platform - - -async def test_all_entities( - hass: HomeAssistant, - snapshot: SnapshotAssertion, - mock_smarty: AsyncMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, -) -> None: - """Test all entities.""" - with patch("homeassistant.components.smarty.PLATFORMS", [Platform.BUTTON]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -async def test_setting_value( - hass: HomeAssistant, - mock_smarty: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test setting value.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - target={ATTR_ENTITY_ID: "button.mock_title_reset_filters_timer"}, - blocking=True, - ) - mock_smarty.reset_filters_timer.assert_called_once_with() diff --git a/tests/components/sonarr/__init__.py b/tests/components/sonarr/__init__.py index 660102ed082..b6050808a34 100644 --- a/tests/components/sonarr/__init__.py +++ b/tests/components/sonarr/__init__.py @@ -5,6 +5,6 @@ from homeassistant.const import CONF_API_KEY, CONF_URL MOCK_REAUTH_INPUT = {CONF_API_KEY: "test-api-key-reauth"} MOCK_USER_INPUT = { - CONF_URL: "http://192.168.1.189:8989/", + CONF_URL: "http://192.168.1.189:8989", CONF_API_KEY: "MOCK_API_KEY", } diff --git a/tests/components/sonarr/test_config_flow.py b/tests/components/sonarr/test_config_flow.py index efbfbd749b3..118d5020cba 100644 --- a/tests/components/sonarr/test_config_flow.py +++ b/tests/components/sonarr/test_config_flow.py @@ -50,34 +50,6 @@ async def test_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} -async def test_url_rewrite( - hass: HomeAssistant, - mock_sonarr_config_flow: MagicMock, - mock_setup_entry: None, -) -> None: - """Test the full manual user flow from start to finish.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={CONF_SOURCE: SOURCE_USER}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - user_input = MOCK_USER_INPUT.copy() - user_input[CONF_URL] = "https://192.168.1.189" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=user_input, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "192.168.1.189" - - assert result["data"] - assert result["data"][CONF_URL] == "https://192.168.1.189:443/" - - async def test_invalid_auth( hass: HomeAssistant, mock_sonarr_config_flow: MagicMock ) -> None: @@ -173,7 +145,7 @@ async def test_full_user_flow_implementation( assert result["title"] == "192.168.1.189" assert result["data"] - assert result["data"][CONF_URL] == "http://192.168.1.189:8989/" + assert result["data"][CONF_URL] == "http://192.168.1.189:8989" async def test_full_user_flow_advanced_options( @@ -203,7 +175,7 @@ async def test_full_user_flow_advanced_options( assert result["title"] == "192.168.1.189" assert result["data"] - assert result["data"][CONF_URL] == "http://192.168.1.189:8989/" + assert result["data"][CONF_URL] == "http://192.168.1.189:8989" assert result["data"][CONF_VERIFY_SSL] diff --git a/tests/components/srp_energy/test_config_flow.py b/tests/components/srp_energy/test_config_flow.py index e3abb3c98df..149e08014ac 100644 --- a/tests/components/srp_energy/test_config_flow.py +++ b/tests/components/srp_energy/test_config_flow.py @@ -100,6 +100,10 @@ async def test_form_invalid_auth( assert result["errors"] == {"base": "invalid_auth"} +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.srp_energy.config.abort.unknown"], +) async def test_form_unknown_error( hass: HomeAssistant, mock_srp_energy_config_flow: MagicMock, diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 6abc544c92a..d930aafbdfb 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -192,6 +192,10 @@ async def test_two_factor_request_success( assert len(mock_two_factor_request.mock_calls) == 1 +@pytest.mark.parametrize( # Remove when translations fixed + "ignore_translations", + ["component.subaru.config.abort.two_factor_request_failed"], +) async def test_two_factor_request_fail( hass: HomeAssistant, two_factor_start_form ) -> None: diff --git a/tests/components/suez_water/conftest.py b/tests/components/suez_water/conftest.py index f634a053c65..bcb817a5025 100644 --- a/tests/components/suez_water/conftest.py +++ b/tests/components/suez_water/conftest.py @@ -1,10 +1,8 @@ """Common fixtures for the Suez Water tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch -from pysuez import AggregatedData, PriceResult -from pysuez.const import ATTRIBUTION import pytest from homeassistant.components.suez_water.const import DOMAIN @@ -39,7 +37,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(name="suez_client") -def mock_suez_client() -> Generator[AsyncMock]: +def mock_suez_client() -> Generator[MagicMock]: """Create mock for suez_water external api.""" with ( patch( @@ -50,31 +48,28 @@ def mock_suez_client() -> Generator[AsyncMock]: new=mock_client, ), ): - suez_client = mock_client.return_value - suez_client.check_credentials.return_value = True - - result = AggregatedData( - value=160, - current_month={ + client = mock_client.return_value + client.check_credentials.return_value = True + client.update.return_value = None + client.state = 160 + client.attributes = { + "thisMonthConsumption": { "2024-01-01": 130, "2024-01-02": 145, }, - previous_month={ + "previousMonthConsumption": { "2024-12-01": 154, "2024-12-02": 166, }, - current_year=1500, - previous_year=1000, - attribution=ATTRIBUTION, - highest_monthly_consumption=2558, - history={ + "highestMonthlyConsumption": 2558, + "lastYearOverAll": 1000, + "thisYearOverAll": 1500, + "history": { "2024-01-01": 130, "2024-01-02": 145, "2024-12-01": 154, "2024-12-02": 166, }, - ) - - suez_client.fetch_aggregated_data.return_value = result - suez_client.get_price.return_value = PriceResult("4.74") - yield suez_client + "attribution": "suez water mock test", + } + yield client diff --git a/tests/components/suez_water/snapshots/test_sensor.ambr b/tests/components/suez_water/snapshots/test_sensor.ambr index da0ed3df7dd..acc3042f93b 100644 --- a/tests/components/suez_water/snapshots/test_sensor.ambr +++ b/tests/components/suez_water/snapshots/test_sensor.ambr @@ -1,53 +1,4 @@ # serializer version: 1 -# name: test_sensors_valid_state[sensor.suez_mock_device_water_price-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.suez_mock_device_water_price', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water price', - 'platform': 'suez_water', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_price', - 'unique_id': 'test-counter_water_price', - 'unit_of_measurement': '€', - }) -# --- -# name: test_sensors_valid_state[sensor.suez_mock_device_water_price-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by toutsurmoneau.fr', - 'device_class': 'monetary', - 'friendly_name': 'Suez mock device Water price', - 'unit_of_measurement': '€', - }), - 'context': , - 'entity_id': 'sensor.suez_mock_device_water_price', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.74', - }) -# --- # name: test_sensors_valid_state[sensor.suez_mock_device_water_usage_yesterday-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -84,7 +35,7 @@ # name: test_sensors_valid_state[sensor.suez_mock_device_water_usage_yesterday-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by toutsurmoneau.fr', + 'attribution': 'suez water mock test', 'device_class': 'water', 'friendly_name': 'Suez mock device Water usage yesterday', 'highest_monthly_consumption': 2558, diff --git a/tests/components/suez_water/test_config_flow.py b/tests/components/suez_water/test_config_flow.py index 6779b4c7d02..ddf7bcd3d80 100644 --- a/tests/components/suez_water/test_config_flow.py +++ b/tests/components/suez_water/test_config_flow.py @@ -1,12 +1,12 @@ """Test the Suez Water config flow.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch -from pysuez.exception import PySuezError +from pysuez.client import PySuezError import pytest from homeassistant import config_entries -from homeassistant.components.suez_water.const import CONF_COUNTER_ID, DOMAIN +from homeassistant.components.suez_water.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -15,9 +15,7 @@ from .conftest import MOCK_DATA from tests.common import MockConfigEntry -async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock -) -> None: +async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -25,11 +23,12 @@ async def test_form( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) - await hass.async_block_till_done() + with patch("homeassistant.components.suez_water.config_flow.SuezClient"): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -39,28 +38,37 @@ async def test_form( async def test_form_invalid_auth( - hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock ) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - suez_client.check_credentials.return_value = False - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + with ( + patch( + "homeassistant.components.suez_water.config_flow.SuezClient.__init__", + return_value=None, + ), + patch( + "homeassistant.components.suez_water.config_flow.SuezClient.check_credentials", + return_value=False, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_auth"} - suez_client.check_credentials.return_value = True - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) - await hass.async_block_till_done() + with patch("homeassistant.components.suez_water.config_flow.SuezClient"): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -96,71 +104,34 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: ("exception", "error"), [(PySuezError, "cannot_connect"), (Exception, "unknown")] ) async def test_form_error( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - exception: Exception, - suez_client: AsyncMock, - error: str, + hass: HomeAssistant, mock_setup_entry: AsyncMock, exception: Exception, error: str ) -> None: """Test we handle errors.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - suez_client.check_credentials.side_effect = exception - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + with patch( + "homeassistant.components.suez_water.config_flow.SuezClient", + side_effect=exception, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": error} - suez_client.check_credentials.return_value = True - suez_client.check_credentials.side_effect = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + with patch( + "homeassistant.components.suez_water.config_flow.SuezClient", + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" assert result["data"] == MOCK_DATA assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_auto_counter( - hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock -) -> None: - """Test form set counter if not set by user.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - partial_form = {**MOCK_DATA} - partial_form.pop(CONF_COUNTER_ID) - suez_client.find_counter.side_effect = PySuezError("test counter not found") - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - partial_form, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "counter_not_found"} - - suez_client.find_counter.side_effect = None - suez_client.find_counter.return_value = MOCK_DATA[CONF_COUNTER_ID] - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - partial_form, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "test-username" - assert result["result"].unique_id == "test-username" - assert result["data"] == MOCK_DATA - assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/suez_water/test_init.py b/tests/components/suez_water/test_init.py index 78d086af38f..b9a8875a8a1 100644 --- a/tests/components/suez_water/test_init.py +++ b/tests/components/suez_water/test_init.py @@ -1,7 +1,5 @@ """Test Suez_water integration initialization.""" -from unittest.mock import AsyncMock - from homeassistant.components.suez_water.coordinator import PySuezError from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -13,7 +11,7 @@ from tests.common import MockConfigEntry async def test_initialization_invalid_credentials( hass: HomeAssistant, - suez_client: AsyncMock, + suez_client, mock_config_entry: MockConfigEntry, ) -> None: """Test that suez_water can't be loaded with invalid credentials.""" @@ -26,7 +24,7 @@ async def test_initialization_invalid_credentials( async def test_initialization_setup_api_error( hass: HomeAssistant, - suez_client: AsyncMock, + suez_client, mock_config_entry: MockConfigEntry, ) -> None: """Test that suez_water needs to retry loading if api failed to connect.""" diff --git a/tests/components/suez_water/test_sensor.py b/tests/components/suez_water/test_sensor.py index cb578432f62..d3da159ee28 100644 --- a/tests/components/suez_water/test_sensor.py +++ b/tests/components/suez_water/test_sensor.py @@ -1,9 +1,8 @@ """Test Suez_water sensor platform.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory -import pytest from syrupy import SnapshotAssertion from homeassistant.components.suez_water.const import DATA_REFRESH_INTERVAL @@ -21,7 +20,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_plat async def test_sensors_valid_state( hass: HomeAssistant, snapshot: SnapshotAssertion, - suez_client: AsyncMock, + suez_client: MagicMock, mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, ) -> None: @@ -33,13 +32,11 @@ async def test_sensors_valid_state( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -@pytest.mark.parametrize("method", [("fetch_aggregated_data"), ("get_price")]) async def test_sensors_failed_update( hass: HomeAssistant, - suez_client: AsyncMock, + suez_client, mock_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, - method: str, ) -> None: """Test that suez_water sensor reflect failure when api fails.""" @@ -48,20 +45,18 @@ async def test_sensors_failed_update( assert mock_config_entry.state is ConfigEntryState.LOADED entity_ids = await hass.async_add_executor_job(hass.states.entity_ids) - assert len(entity_ids) == 2 + assert len(entity_ids) == 1 - for entity in entity_ids: - state = hass.states.get(entity) - assert entity - assert state.state != STATE_UNAVAILABLE + state = hass.states.get(entity_ids[0]) + assert entity_ids[0] + assert state.state != STATE_UNAVAILABLE - getattr(suez_client, method).side_effect = PySuezError("Should fail to update") + suez_client.update.side_effect = PySuezError("Should fail to update") freezer.tick(DATA_REFRESH_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(True) - for entity in entity_ids: - state = hass.states.get(entity) - assert entity - assert state.state == STATE_UNAVAILABLE + state = hass.states.get(entity_ids[0]) + assert state + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/switchbot_cloud/test_init.py b/tests/components/switchbot_cloud/test_init.py index 43431ae04c0..25ea370efe5 100644 --- a/tests/components/switchbot_cloud/test_init.py +++ b/tests/components/switchbot_cloud/test_init.py @@ -50,18 +50,6 @@ async def test_setup_entry_success( remoteType="DIY Plug", hubDeviceId="test-hub-id", ), - Remote( - deviceId="meter-pro-1", - deviceName="meter-pro-name-1", - deviceType="MeterPro(CO2)", - hubDeviceId="test-hub-id", - ), - Remote( - deviceId="hub2-1", - deviceName="hub2-name-1", - deviceType="Hub 2", - hubDeviceId="test-hub-id", - ), ] mock_get_status.return_value = {"power": PowerState.ON.value} entry = configure_integration(hass) diff --git a/tests/components/switcher_kis/consts.py b/tests/components/switcher_kis/consts.py index fe77ee0236b..ab0bef4e335 100644 --- a/tests/components/switcher_kis/consts.py +++ b/tests/components/switcher_kis/consts.py @@ -5,7 +5,6 @@ from aioswitcher.device import ( DeviceType, ShutterDirection, SwitcherDualShutterSingleLight, - SwitcherLight, SwitcherPowerPlug, SwitcherShutter, SwitcherSingleShutterDualLight, @@ -24,27 +23,18 @@ DUMMY_DEVICE_ID3 = "bada77" DUMMY_DEVICE_ID4 = "bbd164" DUMMY_DEVICE_ID5 = "bcdb64" DUMMY_DEVICE_ID6 = "bcdc64" -DUMMY_DEVICE_ID7 = "bcdd64" -DUMMY_DEVICE_ID8 = "bcde64" -DUMMY_DEVICE_ID9 = "bcdf64" DUMMY_DEVICE_KEY1 = "18" DUMMY_DEVICE_KEY2 = "01" DUMMY_DEVICE_KEY3 = "12" DUMMY_DEVICE_KEY4 = "07" DUMMY_DEVICE_KEY5 = "15" DUMMY_DEVICE_KEY6 = "16" -DUMMY_DEVICE_KEY7 = "17" -DUMMY_DEVICE_KEY8 = "18" -DUMMY_DEVICE_KEY9 = "19" DUMMY_DEVICE_NAME1 = "Plug 23BC" DUMMY_DEVICE_NAME2 = "Heater FE12" DUMMY_DEVICE_NAME3 = "Breeze AB39" DUMMY_DEVICE_NAME4 = "Runner DD77" DUMMY_DEVICE_NAME5 = "RunnerS11 6CF5" DUMMY_DEVICE_NAME6 = "RunnerS12 A9BE" -DUMMY_DEVICE_NAME7 = "Light 36BB" -DUMMY_DEVICE_NAME8 = "Light 36CB" -DUMMY_DEVICE_NAME9 = "Light 36DB" DUMMY_DEVICE_PASSWORD = "12345678" DUMMY_ELECTRIC_CURRENT1 = 0.5 DUMMY_ELECTRIC_CURRENT2 = 12.8 @@ -54,27 +44,18 @@ DUMMY_IP_ADDRESS3 = "192.168.100.159" DUMMY_IP_ADDRESS4 = "192.168.100.160" DUMMY_IP_ADDRESS5 = "192.168.100.161" DUMMY_IP_ADDRESS6 = "192.168.100.162" -DUMMY_IP_ADDRESS7 = "192.168.100.163" -DUMMY_IP_ADDRESS8 = "192.168.100.164" -DUMMY_IP_ADDRESS9 = "192.168.100.165" DUMMY_MAC_ADDRESS1 = "A1:B2:C3:45:67:D8" DUMMY_MAC_ADDRESS2 = "A1:B2:C3:45:67:D9" DUMMY_MAC_ADDRESS3 = "A1:B2:C3:45:67:DA" DUMMY_MAC_ADDRESS4 = "A1:B2:C3:45:67:DB" DUMMY_MAC_ADDRESS5 = "A1:B2:C3:45:67:DC" DUMMY_MAC_ADDRESS6 = "A1:B2:C3:45:67:DD" -DUMMY_MAC_ADDRESS7 = "A1:B2:C3:45:67:DE" -DUMMY_MAC_ADDRESS8 = "A1:B2:C3:45:67:DF" -DUMMY_MAC_ADDRESS9 = "A1:B2:C3:45:67:DG" DUMMY_TOKEN_NEEDED1 = False DUMMY_TOKEN_NEEDED2 = False DUMMY_TOKEN_NEEDED3 = False DUMMY_TOKEN_NEEDED4 = False DUMMY_TOKEN_NEEDED5 = True DUMMY_TOKEN_NEEDED6 = True -DUMMY_TOKEN_NEEDED7 = True -DUMMY_TOKEN_NEEDED8 = True -DUMMY_TOKEN_NEEDED9 = True DUMMY_PHONE_ID = "1234" DUMMY_POWER_CONSUMPTION1 = 100 DUMMY_POWER_CONSUMPTION2 = 2780 @@ -94,7 +75,6 @@ DUMMY_USERNAME = "email" DUMMY_TOKEN = "zvVvd7JxtN7CgvkD1Psujw==" DUMMY_LIGHT = [DeviceState.ON] DUMMY_LIGHT_2 = [DeviceState.ON, DeviceState.ON] -DUMMY_LIGHT_3 = [DeviceState.ON, DeviceState.ON, DeviceState.ON] DUMMY_PLUG_DEVICE = SwitcherPowerPlug( DeviceType.POWER_PLUG, @@ -182,40 +162,4 @@ DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( DUMMY_REMOTE_ID, ) -DUMMY_LIGHT_DEVICE = SwitcherLight( - DeviceType.LIGHT_SL01, - DeviceState.ON, - DUMMY_DEVICE_ID7, - DUMMY_DEVICE_KEY7, - DUMMY_IP_ADDRESS7, - DUMMY_MAC_ADDRESS7, - DUMMY_DEVICE_NAME7, - DUMMY_TOKEN_NEEDED7, - DUMMY_LIGHT, -) - -DUMMY_DUAL_LIGHT_DEVICE = SwitcherLight( - DeviceType.LIGHT_SL02, - DeviceState.ON, - DUMMY_DEVICE_ID8, - DUMMY_DEVICE_KEY8, - DUMMY_IP_ADDRESS8, - DUMMY_MAC_ADDRESS8, - DUMMY_DEVICE_NAME8, - DUMMY_TOKEN_NEEDED8, - DUMMY_LIGHT_2, -) - -DUMMY_TRIPLE_LIGHT_DEVICE = SwitcherLight( - DeviceType.LIGHT_SL03, - DeviceState.ON, - DUMMY_DEVICE_ID9, - DUMMY_DEVICE_KEY9, - DUMMY_IP_ADDRESS9, - DUMMY_MAC_ADDRESS9, - DUMMY_DEVICE_NAME9, - DUMMY_TOKEN_NEEDED9, - DUMMY_LIGHT_3, -) - DUMMY_SWITCHER_DEVICES = [DUMMY_PLUG_DEVICE, DUMMY_WATER_HEATER_DEVICE] diff --git a/tests/components/switcher_kis/test_light.py b/tests/components/switcher_kis/test_light.py index 60c851bf6a9..d360cb11291 100644 --- a/tests/components/switcher_kis/test_light.py +++ b/tests/components/switcher_kis/test_light.py @@ -21,43 +21,26 @@ from homeassistant.util import slugify from . import init_integration from .consts import ( - DUMMY_DUAL_LIGHT_DEVICE as DEVICE4, DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE as DEVICE2, - DUMMY_LIGHT_DEVICE as DEVICE3, DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE as DEVICE, DUMMY_TOKEN as TOKEN, - DUMMY_TRIPLE_LIGHT_DEVICE as DEVICE5, DUMMY_USERNAME as USERNAME, ) ENTITY_ID = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_1" -ENTITY_ID_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_2" -ENTITY_ID2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE2.name)}" -ENTITY_ID3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE3.name)}" -ENTITY_ID4 = f"{LIGHT_DOMAIN}.{slugify(DEVICE4.name)}_light_1" -ENTITY_ID4_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE4.name)}_light_2" -ENTITY_ID5 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_1" -ENTITY_ID5_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_2" -ENTITY_ID5_3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_3" +ENTITY_ID2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_2" +ENTITY_ID3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE2.name)}" @pytest.mark.parametrize( ("device", "entity_id", "light_id", "device_state"), [ (DEVICE, ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), - (DEVICE, ENTITY_ID_2, 1, [DeviceState.ON, DeviceState.OFF]), - (DEVICE2, ENTITY_ID2, 0, [DeviceState.OFF]), - (DEVICE3, ENTITY_ID3, 0, [DeviceState.OFF]), - (DEVICE4, ENTITY_ID4, 0, [DeviceState.OFF, DeviceState.ON]), - (DEVICE4, ENTITY_ID4_2, 1, [DeviceState.ON, DeviceState.OFF]), - (DEVICE5, ENTITY_ID5, 0, [DeviceState.OFF, DeviceState.ON, DeviceState.ON]), - (DEVICE5, ENTITY_ID5_2, 1, [DeviceState.ON, DeviceState.OFF, DeviceState.ON]), - (DEVICE5, ENTITY_ID5_3, 2, [DeviceState.ON, DeviceState.ON, DeviceState.OFF]), + (DEVICE, ENTITY_ID2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE2, ENTITY_ID3, 0, [DeviceState.OFF]), ], ) -@pytest.mark.parametrize( - "mock_bridge", [[DEVICE, DEVICE2, DEVICE3, DEVICE4, DEVICE5]], indirect=True -) +@pytest.mark.parametrize("mock_bridge", [[DEVICE, DEVICE2]], indirect=True) async def test_light( hass: HomeAssistant, mock_bridge, @@ -115,19 +98,11 @@ async def test_light( ("device", "entity_id", "light_id", "device_state"), [ (DEVICE, ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), - (DEVICE, ENTITY_ID_2, 1, [DeviceState.ON, DeviceState.OFF]), - (DEVICE2, ENTITY_ID2, 0, [DeviceState.OFF]), - (DEVICE3, ENTITY_ID3, 0, [DeviceState.OFF]), - (DEVICE4, ENTITY_ID4, 0, [DeviceState.OFF, DeviceState.ON]), - (DEVICE4, ENTITY_ID4_2, 1, [DeviceState.ON, DeviceState.OFF]), - (DEVICE5, ENTITY_ID5, 0, [DeviceState.OFF, DeviceState.ON, DeviceState.ON]), - (DEVICE5, ENTITY_ID5_2, 1, [DeviceState.ON, DeviceState.OFF, DeviceState.ON]), - (DEVICE5, ENTITY_ID5_3, 2, [DeviceState.ON, DeviceState.ON, DeviceState.OFF]), + (DEVICE, ENTITY_ID2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE2, ENTITY_ID3, 0, [DeviceState.OFF]), ], ) -@pytest.mark.parametrize( - "mock_bridge", [[DEVICE, DEVICE2, DEVICE3, DEVICE4, DEVICE5]], indirect=True -) +@pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_light_control_fail( hass: HomeAssistant, mock_bridge, diff --git a/tests/components/tedee/conftest.py b/tests/components/tedee/conftest.py index 8e028cb5300..68444de640c 100644 --- a/tests/components/tedee/conftest.py +++ b/tests/components/tedee/conftest.py @@ -6,8 +6,8 @@ from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch -from aiotedee.bridge import TedeeBridge -from aiotedee.lock import TedeeLock +from pytedee_async.bridge import TedeeBridge +from pytedee_async.lock import TedeeLock import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN diff --git a/tests/components/tedee/test_binary_sensor.py b/tests/components/tedee/test_binary_sensor.py index dfe70e7a2ea..788d31c84d2 100644 --- a/tests/components/tedee/test_binary_sensor.py +++ b/tests/components/tedee/test_binary_sensor.py @@ -3,8 +3,8 @@ from datetime import timedelta from unittest.mock import MagicMock -from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory +from pytedee_async import TedeeLock import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/tedee/test_config_flow.py b/tests/components/tedee/test_config_flow.py index 825e01aca70..d3654783bd6 100644 --- a/tests/components/tedee/test_config_flow.py +++ b/tests/components/tedee/test_config_flow.py @@ -2,16 +2,15 @@ from unittest.mock import MagicMock, patch -from aiotedee import ( +from pytedee_async import ( TedeeClientException, TedeeDataUpdateException, TedeeLocalAuthException, ) -from aiotedee.bridge import TedeeBridge import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN -from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -135,10 +134,11 @@ async def test_reauth_flow( assert result["reason"] == "reauth_successful" -async def __do_reconfigure_flow( - hass: HomeAssistant, mock_config_entry: MockConfigEntry -) -> ConfigFlowResult: - """Initialize a reconfigure flow.""" +async def test_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock +) -> None: + """Test that the reconfigure flow works.""" + mock_config_entry.add_to_hass(hass) reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) @@ -146,19 +146,11 @@ async def __do_reconfigure_flow( assert reconfigure_result["type"] is FlowResultType.FORM assert reconfigure_result["step_id"] == "reconfigure" - return await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( reconfigure_result["flow_id"], {CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, CONF_HOST: "192.168.1.43"}, ) - -async def test_reconfigure_flow( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock -) -> None: - """Test that the reconfigure flow works.""" - - result = await __do_reconfigure_flow(hass, mock_config_entry) - assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" @@ -170,18 +162,3 @@ async def test_reconfigure_flow( CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, CONF_WEBHOOK_ID: WEBHOOK_ID, } - - -async def test_reconfigure_unique_id_mismatch( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock -) -> None: - """Ensure reconfigure flow aborts when the bride changes.""" - - mock_tedee.get_local_bridge.return_value = TedeeBridge( - 0, "1111-1111", "Bridge-R2D2" - ) - - result = await __do_reconfigure_flow(hass, mock_config_entry) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unique_id_mismatch" diff --git a/tests/components/tedee/test_init.py b/tests/components/tedee/test_init.py index 63701bb1788..d4ac1c9d290 100644 --- a/tests/components/tedee/test_init.py +++ b/tests/components/tedee/test_init.py @@ -5,7 +5,7 @@ from typing import Any from unittest.mock import MagicMock, patch from urllib.parse import urlparse -from aiotedee.exception import ( +from pytedee_async.exception import ( TedeeAuthException, TedeeClientException, TedeeWebhookException, diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index 45eae6e22d9..3f6b97e2c70 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -4,13 +4,13 @@ from datetime import timedelta from unittest.mock import MagicMock from urllib.parse import urlparse -from aiotedee import TedeeLock, TedeeLockState -from aiotedee.exception import ( +from freezegun.api import FrozenDateTimeFactory +from pytedee_async import TedeeLock, TedeeLockState +from pytedee_async.exception import ( TedeeClientException, TedeeDataUpdateException, TedeeLocalAuthException, ) -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion diff --git a/tests/components/tedee/test_sensor.py b/tests/components/tedee/test_sensor.py index ddbcd5086af..72fbd9cbe8d 100644 --- a/tests/components/tedee/test_sensor.py +++ b/tests/components/tedee/test_sensor.py @@ -3,8 +3,8 @@ from datetime import timedelta from unittest.mock import MagicMock -from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory +from pytedee_async import TedeeLock import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index a3e53aab9e1..72c453d48dc 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -794,7 +794,7 @@ EARLY_END_ERROR = "invalid template (TemplateSyntaxError: unexpected 'end of tem ), "unit_of_measurement": ( "'None' is not a valid unit for device class 'energy'; " - "expected one of 'cal', 'Gcal', 'GJ', 'GWh', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'TWh', 'Wh'" + "expected one of 'cal', 'Gcal', 'GJ', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'Wh'" ), }, ), diff --git a/tests/helpers/test_trigger_template_entity.py b/tests/components/template/test_manual_trigger_entity.py similarity index 100% rename from tests/helpers/test_trigger_template_entity.py rename to tests/components/template/test_manual_trigger_entity.py diff --git a/tests/components/tesla_fleet/test_button.py b/tests/components/tesla_fleet/test_button.py index ef1cfd90357..07fdc962be9 100644 --- a/tests/components/tesla_fleet/test_button.py +++ b/tests/components/tesla_fleet/test_button.py @@ -77,13 +77,9 @@ async def test_press_signing_error( new_product["response"][0]["command_signing"] = "required" mock_products.return_value = new_product - with ( - patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), - ): - await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) + await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) with ( - patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), patch( "homeassistant.components.tesla_fleet.VehicleSigned.flash_lights", side_effect=NotOnWhitelistFault, diff --git a/tests/components/thethingsnetwork/test_init.py b/tests/components/thethingsnetwork/test_init.py index e39c764d5f9..1e0b64c933d 100644 --- a/tests/components/thethingsnetwork/test_init.py +++ b/tests/components/thethingsnetwork/test_init.py @@ -4,6 +4,22 @@ import pytest from ttn_client import TTNAuthError from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from .conftest import DOMAIN + + +async def test_error_configuration( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test issue is logged when deprecated configuration is used.""" + await async_setup_component( + hass, DOMAIN, {DOMAIN: {"app_id": "123", "access_key": "42"}} + ) + await hass.async_block_till_done() + assert issue_registry.async_get_issue(DOMAIN, "manual_migration") @pytest.mark.parametrize(("exception_class"), [TTNAuthError, Exception]) diff --git a/tests/components/threshold/test_binary_sensor.py b/tests/components/threshold/test_binary_sensor.py index 259009c6319..e0973c7a580 100644 --- a/tests/components/threshold/test_binary_sensor.py +++ b/tests/components/threshold/test_binary_sensor.py @@ -538,7 +538,7 @@ async def test_sensor_no_lower_upper( await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() - assert "Lower or Upper thresholds are not provided" in caplog.text + assert "Lower or Upper thresholds not provided" in caplog.text async def test_device_id( diff --git a/tests/components/tibber/test_diagnostics.py b/tests/components/tibber/test_diagnostics.py index 16c735596d0..34ecb63dfec 100644 --- a/tests/components/tibber/test_diagnostics.py +++ b/tests/components/tibber/test_diagnostics.py @@ -19,9 +19,12 @@ async def test_entry_diagnostics( config_entry, ) -> None: """Test config entry diagnostics.""" - with patch( - "tibber.Tibber.update_info", - return_value=None, + with ( + patch( + "tibber.Tibber.update_info", + return_value=None, + ), + patch("homeassistant.components.tibber.discovery.async_load_platform"), ): assert await async_setup_component(hass, "tibber", {}) diff --git a/tests/components/tibber/test_notify.py b/tests/components/tibber/test_notify.py index 9b731e78bf6..69af92c4d5d 100644 --- a/tests/components/tibber/test_notify.py +++ b/tests/components/tibber/test_notify.py @@ -6,6 +6,7 @@ from unittest.mock import MagicMock import pytest from homeassistant.components.recorder import Recorder +from homeassistant.components.tibber import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -18,8 +19,18 @@ async def test_notification_services( notify_state = hass.states.get("notify.tibber") assert notify_state is not None + # Assert legacy notify service hass been added + assert hass.services.has_service("notify", DOMAIN) + + # Test legacy notify service + service = "tibber" + service_data = {"message": "The message", "title": "A title"} + await hass.services.async_call("notify", service, service_data, blocking=True) calls: MagicMock = mock_tibber_setup.send_notification + calls.assert_called_once_with(message="The message", title="A title") + calls.reset_mock() + # Test notify entity service service = "send_message" service_data = { @@ -33,6 +44,15 @@ async def test_notification_services( calls.side_effect = TimeoutError + with pytest.raises(HomeAssistantError): + # Test legacy notify service + await hass.services.async_call( + "notify", + service="tibber", + service_data={"message": "The message", "title": "A title"}, + blocking=True, + ) + with pytest.raises(HomeAssistantError): # Test notify entity service await hass.services.async_call( diff --git a/tests/components/tibber/test_repairs.py b/tests/components/tibber/test_repairs.py new file mode 100644 index 00000000000..5e5fde4569e --- /dev/null +++ b/tests/components/tibber/test_repairs.py @@ -0,0 +1,56 @@ +"""Test loading of the Tibber config entry.""" + +from unittest.mock import MagicMock + +from homeassistant.components.recorder import Recorder +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from tests.components.repairs import process_repair_fix_flow, start_repair_fix_flow +from tests.typing import ClientSessionGenerator + + +async def test_repair_flow( + recorder_mock: Recorder, + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + mock_tibber_setup: MagicMock, + hass_client: ClientSessionGenerator, +) -> None: + """Test unloading the entry.""" + + # Test legacy notify service + service = "tibber" + service_data = {"message": "The message", "title": "A title"} + await hass.services.async_call("notify", service, service_data, blocking=True) + calls: MagicMock = mock_tibber_setup.send_notification + + calls.assert_called_once_with(message="The message", title="A title") + calls.reset_mock() + + http_client = await hass_client() + # Assert the issue is present + assert issue_registry.async_get_issue( + domain="notify", + issue_id=f"migrate_notify_tibber_{service}", + ) + assert len(issue_registry.issues) == 1 + + data = await start_repair_fix_flow( + http_client, "notify", f"migrate_notify_tibber_{service}" + ) + + flow_id = data["flow_id"] + assert data["step_id"] == "confirm" + + # Simulate the users confirmed the repair flow + data = await process_repair_fix_flow(http_client, flow_id) + assert data["type"] == "create_entry" + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue( + domain="notify", + issue_id=f"migrate_notify_tibber_{service}", + ) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/tplink/conftest.py b/tests/components/tplink/conftest.py index 25a4bd20270..78cc9304bf7 100644 --- a/tests/components/tplink/conftest.py +++ b/tests/components/tplink/conftest.py @@ -37,7 +37,7 @@ def mock_discovery(): device = _mocked_device( device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), credentials_hash=CREDENTIALS_HASH_KLAP, - alias="My Bulb", + alias=None, ) devices = { "127.0.0.1": _mocked_device( diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index 2697696c667..12a5741058c 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -2,7 +2,7 @@ from contextlib import contextmanager import logging -from unittest.mock import ANY, AsyncMock, patch +from unittest.mock import AsyncMock, patch from kasa import TimeoutError import pytest @@ -30,7 +30,6 @@ from homeassistant.const import ( CONF_HOST, CONF_MAC, CONF_PASSWORD, - CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant @@ -666,93 +665,6 @@ async def test_manual_auth_errors( await hass.async_block_till_done() -@pytest.mark.parametrize( - ("host_str", "host", "port"), - [ - (f"{IP_ADDRESS}:1234", IP_ADDRESS, 1234), - ("[2001:db8:0::1]:4321", "2001:db8:0::1", 4321), - ], -) -async def test_manual_port_override( - hass: HomeAssistant, - mock_connect: AsyncMock, - mock_discovery: AsyncMock, - host_str, - host, - port, -) -> None: - """Test manually setup.""" - mock_discovery["mock_device"].config.port_override = port - mock_discovery["mock_device"].host = host - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert not result["errors"] - - # side_effects to cause auth confirm as the port override usually only - # works with direct connections. - mock_discovery["discover_single"].side_effect = TimeoutError - mock_connect["connect"].side_effect = AuthenticationError - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: host_str} - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "user_auth_confirm" - assert not result2["errors"] - - creds = Credentials("fake_username", "fake_password") - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) - await hass.async_block_till_done() - mock_discovery["try_connect_all"].assert_called_once_with( - host, credentials=creds, port=port, http_client=ANY - ) - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == DEFAULT_ENTRY_TITLE - assert result3["data"] == { - **CREATE_ENTRY_DATA_KLAP, - CONF_PORT: port, - CONF_HOST: host, - } - assert result3["context"]["unique_id"] == MAC_ADDRESS - - -async def test_manual_port_override_invalid( - hass: HomeAssistant, mock_connect: AsyncMock, mock_discovery: AsyncMock -) -> None: - """Test manually setup.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert not result["errors"] - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: f"{IP_ADDRESS}:foo"} - ) - await hass.async_block_till_done() - - mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=None, port=None - ) - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == DEFAULT_ENTRY_TITLE - assert result2["data"] == CREATE_ENTRY_DATA_KLAP - assert result2["context"]["unique_id"] == MAC_ADDRESS - - async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: """Test we get the form with discovery and abort for dhcp source when we get both.""" @@ -1160,7 +1072,7 @@ async def test_reauth( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + "127.0.0.1", credentials=credentials ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT @@ -1195,7 +1107,7 @@ async def test_reauth_try_connect_all( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + "127.0.0.1", credentials=credentials ) mock_discovery["try_connect_all"].assert_called_once() assert result2["type"] is FlowResultType.ABORT @@ -1233,7 +1145,7 @@ async def test_reauth_try_connect_all_fail( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + "127.0.0.1", credentials=credentials ) mock_discovery["try_connect_all"].assert_called_once() assert result2["errors"] == {"base": "cannot_connect"} @@ -1302,7 +1214,7 @@ async def test_reauth_update_with_encryption_change( assert "Connection type changed for 127.0.0.2" in caplog.text credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.2", credentials=credentials, port=None + "127.0.0.2", credentials=credentials ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT @@ -1504,7 +1416,7 @@ async def test_reauth_errors( credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + "127.0.0.1", credentials=credentials ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.FORM @@ -1522,7 +1434,7 @@ async def test_reauth_errors( ) mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + "127.0.0.1", credentials=credentials ) mock_discovery["mock_device"].update.assert_called_once_with() @@ -1731,7 +1643,7 @@ async def test_reauth_update_other_flows( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + "127.0.0.1", credentials=credentials ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT diff --git a/tests/components/unifiprotect/test_media_source.py b/tests/components/unifiprotect/test_media_source.py index 18944460ca5..60cd3150884 100644 --- a/tests/components/unifiprotect/test_media_source.py +++ b/tests/components/unifiprotect/test_media_source.py @@ -669,7 +669,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.RING, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=[], @@ -683,7 +683,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.MOTION, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=[], @@ -697,7 +697,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["person"], @@ -706,7 +706,7 @@ async def test_browse_media_recent_truncated( metadata={ "detected_thumbnails": [ { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "person", "cropped_id": "event_id", } @@ -720,7 +720,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "person"], @@ -734,7 +734,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -748,7 +748,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -758,7 +758,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", } @@ -772,7 +772,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -782,7 +782,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -802,7 +802,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -812,7 +812,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -823,7 +823,7 @@ async def test_browse_media_recent_truncated( }, }, { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "person", "cropped_id": "event_id", }, @@ -837,7 +837,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle"], @@ -846,7 +846,7 @@ async def test_browse_media_recent_truncated( metadata={ "detected_thumbnails": [ { - "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -870,7 +870,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_AUDIO_DETECT, - start=datetime(2000, 1, 1, 0, 0, 0), + start=datetime(1000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["alrmSpeak"], diff --git a/tests/components/utility_meter/snapshots/test_diagnostics.ambr b/tests/components/utility_meter/snapshots/test_diagnostics.ambr index 6cdf121d7e3..c69164264da 100644 --- a/tests/components/utility_meter/snapshots/test_diagnostics.ambr +++ b/tests/components/utility_meter/snapshots/test_diagnostics.ambr @@ -41,17 +41,7 @@ 'status': 'collecting', 'tariff': 'tariff0', }), - 'last_sensor_data': dict({ - 'last_period': '0', - 'last_reset': '2024-04-05T00:00:00+00:00', - 'last_valid_state': 3, - 'native_unit_of_measurement': 'kWh', - 'native_value': dict({ - '__type': "", - 'decimal_str': '3', - }), - 'status': 'collecting', - }), + 'last_sensor_data': None, 'name': 'Energy Bill tariff0', 'period': 'monthly', 'source': 'sensor.input1', @@ -67,17 +57,7 @@ 'status': 'paused', 'tariff': 'tariff1', }), - 'last_sensor_data': dict({ - 'last_period': '0', - 'last_reset': '2024-04-05T00:00:00+00:00', - 'last_valid_state': 7, - 'native_unit_of_measurement': 'kWh', - 'native_value': dict({ - '__type': "", - 'decimal_str': '7', - }), - 'status': 'paused', - }), + 'last_sensor_data': None, 'name': 'Energy Bill tariff1', 'period': 'monthly', 'source': 'sensor.input1', diff --git a/tests/components/utility_meter/test_diagnostics.py b/tests/components/utility_meter/test_diagnostics.py index 8be5f949940..9ecabe813b1 100644 --- a/tests/components/utility_meter/test_diagnostics.py +++ b/tests/components/utility_meter/test_diagnostics.py @@ -91,17 +91,7 @@ async def test_diagnostics( ATTR_LAST_RESET: last_reset, }, ), - { - "native_value": { - "__type": "", - "decimal_str": "3", - }, - "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "0", - "last_valid_state": 3, - "status": "collecting", - }, + {}, ), ( State( @@ -111,17 +101,7 @@ async def test_diagnostics( ATTR_LAST_RESET: last_reset, }, ), - { - "native_value": { - "__type": "", - "decimal_str": "7", - }, - "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "0", - "last_valid_state": 7, - "status": "paused", - }, + {}, ), ], ) diff --git a/tests/components/utility_meter/test_sensor.py b/tests/components/utility_meter/test_sensor.py index 0ab78739f7f..745bf0ce012 100644 --- a/tests/components/utility_meter/test_sensor.py +++ b/tests/components/utility_meter/test_sensor.py @@ -26,6 +26,7 @@ from homeassistant.components.utility_meter.const import ( ) from homeassistant.components.utility_meter.sensor import ( ATTR_LAST_RESET, + ATTR_LAST_VALID_STATE, ATTR_STATUS, COLLECTING, PAUSED, @@ -759,6 +760,64 @@ async def test_restore_state( "status": "paused", }, ), + # sensor.energy_bill_tariff2 has missing keys and falls back to + # saved state + ( + State( + "sensor.energy_bill_tariff2", + "2.1", + attributes={ + ATTR_STATUS: PAUSED, + ATTR_LAST_RESET: last_reset_1, + ATTR_LAST_VALID_STATE: None, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, + }, + ), + { + "native_value": { + "__type": "", + "decimal_str": "2.2", + }, + "native_unit_of_measurement": "kWh", + "last_valid_state": "None", + }, + ), + # sensor.energy_bill_tariff3 has invalid data and falls back to + # saved state + ( + State( + "sensor.energy_bill_tariff3", + "3.1", + attributes={ + ATTR_STATUS: COLLECTING, + ATTR_LAST_RESET: last_reset_1, + ATTR_LAST_VALID_STATE: None, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, + }, + ), + { + "native_value": { + "__type": "", + "decimal_str": "3f", # Invalid + }, + "native_unit_of_measurement": "kWh", + "last_valid_state": "None", + }, + ), + # No extra saved data, fall back to saved state + ( + State( + "sensor.energy_bill_tariff4", + "error", + attributes={ + ATTR_STATUS: COLLECTING, + ATTR_LAST_RESET: last_reset_1, + ATTR_LAST_VALID_STATE: None, + ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, + }, + ), + {}, + ), ], ) @@ -793,6 +852,25 @@ async def test_restore_state( assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY + state = hass.states.get("sensor.energy_bill_tariff2") + assert state.state == "2.1" + assert state.attributes.get("status") == PAUSED + assert state.attributes.get("last_reset") == last_reset_1 + assert state.attributes.get("last_valid_state") == "None" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY + + state = hass.states.get("sensor.energy_bill_tariff3") + assert state.state == "3.1" + assert state.attributes.get("status") == COLLECTING + assert state.attributes.get("last_reset") == last_reset_1 + assert state.attributes.get("last_valid_state") == "None" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY + + state = hass.states.get("sensor.energy_bill_tariff4") + assert state.state == STATE_UNKNOWN + # utility_meter is loaded, now set sensors according to utility_meter: hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -804,7 +882,12 @@ async def test_restore_state( state = hass.states.get("sensor.energy_bill_tariff0") assert state.attributes.get("status") == COLLECTING - for entity_id in ("sensor.energy_bill_tariff1",): + for entity_id in ( + "sensor.energy_bill_tariff1", + "sensor.energy_bill_tariff2", + "sensor.energy_bill_tariff3", + "sensor.energy_bill_tariff4", + ): state = hass.states.get(entity_id) assert state.attributes.get("status") == PAUSED @@ -856,18 +939,7 @@ async def test_service_reset_no_tariffs( ATTR_LAST_RESET: last_reset, }, ), - { - "native_value": { - "__type": "", - "decimal_str": "3", - }, - "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "0", - "last_valid_state": None, - "status": "collecting", - "input_device_class": "energy", - }, + {}, ), ], ) @@ -973,33 +1045,21 @@ async def test_service_reset_no_tariffs_correct_with_multi( State( "sensor.energy_bill", "3", - ), - { - "native_value": { - "__type": "", - "decimal_str": "3", + attributes={ + ATTR_LAST_RESET: last_reset, }, - "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "0", - "status": "collecting", - }, + ), + {}, ), ( State( "sensor.water_bill", "6", - ), - { - "native_value": { - "__type": "", - "decimal_str": "6", + attributes={ + ATTR_LAST_RESET: last_reset, }, - "native_unit_of_measurement": "kWh", - "last_reset": last_reset, - "last_period": "0", - "status": "collecting", - }, + ), + {}, ), ], ) @@ -1744,26 +1804,6 @@ async def test_self_reset_hourly_dst(hass: HomeAssistant) -> None: ) -async def test_self_reset_hourly_dst2(hass: HomeAssistant) -> None: - """Test weekly reset of meter in DST change conditions.""" - - hass.config.time_zone = "Europe/Berlin" - dt_util.set_default_time_zone(dt_util.get_time_zone(hass.config.time_zone)) - await _test_self_reset( - hass, gen_config("daily"), "2024-10-26T23:59:00.000000+02:00" - ) - - state = hass.states.get("sensor.energy_bill") - last_reset = dt_util.parse_datetime("2024-10-27T00:00:00.000000+02:00") - assert ( - dt_util.as_local(dt_util.parse_datetime(state.attributes.get("last_reset"))) - == last_reset - ) - - next_reset = dt_util.parse_datetime("2024-10-28T00:00:00.000000+01:00").isoformat() - assert state.attributes.get("next_reset") == next_reset - - async def test_self_reset_daily(hass: HomeAssistant) -> None: """Test daily reset of meter.""" await _test_self_reset( diff --git a/tests/components/vesync/snapshots/test_fan.ambr b/tests/components/vesync/snapshots/test_fan.ambr index 60af4ae3d5b..21985afd7bf 100644 --- a/tests/components/vesync/snapshots/test_fan.ambr +++ b/tests/components/vesync/snapshots/test_fan.ambr @@ -67,7 +67,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': 'vesync', + 'translation_key': None, 'unique_id': 'air-purifier', 'unit_of_measurement': None, }), @@ -158,7 +158,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': 'vesync', + 'translation_key': None, 'unique_id': 'asd_sdfKIHG7IJHGwJGJ7GJ_ag5h3G55', 'unit_of_measurement': None, }), @@ -256,7 +256,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': 'vesync', + 'translation_key': None, 'unique_id': '400s-purifier', 'unit_of_measurement': None, }), @@ -355,7 +355,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': 'vesync', + 'translation_key': None, 'unique_id': '600s-purifier', 'unit_of_measurement': None, }), diff --git a/tests/components/websocket_api/test_auth.py b/tests/components/websocket_api/test_auth.py index d55d2f97017..20a728cf3cd 100644 --- a/tests/components/websocket_api/test_auth.py +++ b/tests/components/websocket_api/test_auth.py @@ -293,6 +293,6 @@ async def test_auth_sending_unknown_type_disconnects( auth_msg = await ws.receive_json() assert auth_msg["type"] == TYPE_AUTH_REQUIRED - await ws._writer.send_frame(b"1" * 130, 0x30) + await ws._writer._send_frame(b"1" * 130, 0x30) auth_msg = await ws.receive() assert auth_msg.type == WSMsgType.close diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 03e30c11ee9..2530d885942 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -5,7 +5,7 @@ from datetime import timedelta from typing import Any, cast from unittest.mock import patch -from aiohttp import ServerDisconnectedError, WSMsgType, web +from aiohttp import WSMsgType, WSServerHandshakeError, web import pytest from homeassistant.components.websocket_api import ( @@ -374,7 +374,7 @@ async def test_prepare_fail_timeout( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", side_effect=(TimeoutError, web.WebSocketResponse.prepare), ), - pytest.raises(ServerDisconnectedError), + pytest.raises(WSServerHandshakeError), ): await hass_ws_client(hass) @@ -392,7 +392,7 @@ async def test_prepare_fail_connection_reset( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", side_effect=(ConnectionResetError, web.WebSocketResponse.prepare), ), - pytest.raises(ServerDisconnectedError), + pytest.raises(WSServerHandshakeError), ): await hass_ws_client(hass) diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index 87ba46a4ced..1382c5c2569 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -21,7 +21,7 @@ import zigpy.types from homeassistant import config_entries from homeassistant.components import ssdp, usb, zeroconf -from homeassistant.components.hassio import AddonError, AddonState +from homeassistant.components.hassio import AddonState from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL from homeassistant.components.zha import config_flow, radio_manager from homeassistant.components.zha.const import ( @@ -1878,23 +1878,10 @@ async def test_config_flow_port_yellow_port_name(hass: HomeAssistant) -> None: ) -async def test_config_flow_ports_no_hassio(hass: HomeAssistant) -> None: - """Test config flow serial port name when this is not a hassio install.""" - - with ( - patch("homeassistant.components.zha.config_flow.is_hassio", return_value=False), - patch("serial.tools.list_ports.comports", MagicMock(return_value=[])), - ): - ports = await config_flow.list_serial_ports(hass) - - assert ports == [] - - async def test_config_flow_port_multiprotocol_port_name(hass: HomeAssistant) -> None: """Test config flow serial port name for multiprotocol add-on.""" with ( - patch("homeassistant.components.zha.config_flow.is_hassio", return_value=True), patch( "homeassistant.components.hassio.addon_manager.AddonManager.async_get_addon_info" ) as async_get_addon_info, @@ -1902,28 +1889,16 @@ async def test_config_flow_port_multiprotocol_port_name(hass: HomeAssistant) -> ): async_get_addon_info.return_value.state = AddonState.RUNNING async_get_addon_info.return_value.hostname = "core-silabs-multiprotocol" - ports = await config_flow.list_serial_ports(hass) - assert len(ports) == 1 - assert ports[0].description == "Multiprotocol add-on" - assert ports[0].manufacturer == "Nabu Casa" - assert ports[0].device == "socket://core-silabs-multiprotocol:9999" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_USER}, + ) - -async def test_config_flow_port_no_multiprotocol(hass: HomeAssistant) -> None: - """Test config flow serial port listing when addon info fails to load.""" - - with ( - patch("homeassistant.components.zha.config_flow.is_hassio", return_value=True), - patch( - "homeassistant.components.hassio.addon_manager.AddonManager.async_get_addon_info", - side_effect=AddonError, - ), - patch("serial.tools.list_ports.comports", MagicMock(return_value=[])), - ): - ports = await config_flow.list_serial_ports(hass) - - assert ports == [] + assert ( + result["data_schema"].schema["path"].container[0] + == "socket://core-silabs-multiprotocol:9999 - Multiprotocol add-on - Nabu Casa" + ) @patch("serial.tools.list_ports.comports", MagicMock(return_value=[com_port()])) diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index df1adbc98e5..8251d7d280f 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -78,8 +78,6 @@ from homeassistant.components.zwave_js.api import ( TYPE, UUID, VALUE, - VALUE_FORMAT, - VALUE_SIZE, VERSION, ) from homeassistant.components.zwave_js.const import ( @@ -3139,180 +3137,6 @@ async def test_get_config_parameters( assert msg["error"]["code"] == ERR_NOT_LOADED -async def test_set_raw_config_parameter( - hass: HomeAssistant, - client, - multisensor_6, - integration, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test that the set_raw_config_parameter WS API call works.""" - entry = integration - ws_client = await hass_ws_client(hass) - device = get_device(hass, multisensor_6) - - # Change from async_send_command to async_send_command_no_wait - client.async_send_command_no_wait.return_value = None - - # Test setting a raw config parameter value - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/set_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - VALUE: 1, - VALUE_SIZE: 2, - VALUE_FORMAT: 1, - } - ) - - msg = await ws_client.receive_json() - assert msg["success"] - assert msg["result"]["status"] == "queued" - - assert len(client.async_send_command_no_wait.call_args_list) == 1 - args = client.async_send_command_no_wait.call_args[0][0] - assert args["command"] == "endpoint.set_raw_config_parameter_value" - assert args["nodeId"] == multisensor_6.node_id - assert args["options"]["parameter"] == 102 - assert args["options"]["value"] == 1 - assert args["options"]["valueSize"] == 2 - assert args["options"]["valueFormat"] == 1 - - # Reset the mock for async_send_command_no_wait instead - client.async_send_command_no_wait.reset_mock() - - # Test getting non-existent node fails - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/set_raw_config_parameter", - DEVICE_ID: "fake_device", - PROPERTY: 102, - VALUE: 1, - VALUE_SIZE: 2, - VALUE_FORMAT: 1, - } - ) - msg = await ws_client.receive_json() - assert not msg["success"] - assert msg["error"]["code"] == ERR_NOT_FOUND - - # Test sending command with not loaded entry fails - await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/set_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - VALUE: 1, - VALUE_SIZE: 2, - VALUE_FORMAT: 1, - } - ) - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == ERR_NOT_LOADED - - -async def test_get_raw_config_parameter( - hass: HomeAssistant, - multisensor_6, - integration, - client, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test the get_raw_config_parameter websocket command.""" - entry = integration - ws_client = await hass_ws_client(hass) - device = get_device(hass, multisensor_6) - - client.async_send_command.return_value = {"value": 1} - - # Test getting a raw config parameter value - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/get_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - } - ) - - msg = await ws_client.receive_json() - assert msg["success"] - assert msg["result"]["value"] == 1 - - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args[0][0] - assert args["command"] == "endpoint.get_raw_config_parameter_value" - assert args["nodeId"] == multisensor_6.node_id - assert args["options"]["parameter"] == 102 - - client.async_send_command.reset_mock() - - # Test FailedZWaveCommand is caught - with patch( - "zwave_js_server.model.node.Node.async_get_raw_config_parameter_value", - side_effect=FailedZWaveCommand("failed_command", 1, "error message"), - ): - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/get_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - } - ) - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == "zwave_error" - assert msg["error"]["message"] == "zwave_error: Z-Wave error 1 - error message" - - # Test getting non-existent node fails - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/get_raw_config_parameter", - DEVICE_ID: "fake_device", - PROPERTY: 102, - } - ) - msg = await ws_client.receive_json() - assert not msg["success"] - assert msg["error"]["code"] == ERR_NOT_FOUND - - # Test FailedCommand exception - client.async_send_command.side_effect = FailedCommand("test", "test") - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/get_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - } - ) - msg = await ws_client.receive_json() - assert not msg["success"] - assert msg["error"]["code"] == "test" - assert msg["error"]["message"] == "Command failed: test" - - # Test sending command with not loaded entry fails - await hass.config_entries.async_unload(entry.entry_id) - await hass.async_block_till_done() - - await ws_client.send_json_auto_id( - { - TYPE: "zwave_js/get_raw_config_parameter", - DEVICE_ID: device.id, - PROPERTY: 102, - } - ) - msg = await ws_client.receive_json() - - assert not msg["success"] - assert msg["error"]["code"] == ERR_NOT_LOADED - - @pytest.mark.parametrize( ("firmware_data", "expected_data"), [({"target": "1"}, {"firmware_target": 1}), ({}, {})], diff --git a/tests/components/zwave_js/test_services.py b/tests/components/zwave_js/test_services.py index 41477f18b97..ec13d0262f8 100644 --- a/tests/components/zwave_js/test_services.py +++ b/tests/components/zwave_js/test_services.py @@ -497,12 +497,13 @@ async def test_set_config_parameter( caplog.clear() + config_value = aeotec_zw164_siren.values["2-112-0-32"] cmd_result = SetConfigParameterResult("accepted", {"status": 255}) # Test accepted return with patch( "homeassistant.components.zwave_js.services.Endpoint.async_set_raw_config_parameter_value", - return_value=cmd_result, + return_value=(config_value, cmd_result), ) as mock_set_raw_config_parameter_value: await hass.services.async_call( DOMAIN, @@ -533,7 +534,7 @@ async def test_set_config_parameter( cmd_result.status = "queued" with patch( "homeassistant.components.zwave_js.services.Endpoint.async_set_raw_config_parameter_value", - return_value=cmd_result, + return_value=(config_value, cmd_result), ) as mock_set_raw_config_parameter_value: await hass.services.async_call( DOMAIN, diff --git a/tests/conftest.py b/tests/conftest.py index 35b65c5653c..c60018413e7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1772,30 +1772,10 @@ def mock_bleak_scanner_start() -> Generator[MagicMock]: @pytest.fixture -def integration_frame_path() -> str: - """Return the path to the integration frame. - - Can be parametrized with - `@pytest.mark.parametrize("integration_frame_path", ["path_to_frame"])` - - - "custom_components/XYZ" for a custom integration - - "homeassistant/components/XYZ" for a core integration - - "homeassistant/XYZ" for core (no integration) - - Defaults to core component `hue` - """ - return "homeassistant/components/hue" - - -@pytest.fixture -def mock_integration_frame(integration_frame_path: str) -> Generator[Mock]: - """Mock where we are calling code from. - - Defaults to calling from `hue` core integration, and can be parametrized - with `integration_frame_path`. - """ +def mock_integration_frame() -> Generator[Mock]: + """Mock as if we're calling code from inside an integration.""" correct_frame = Mock( - filename=f"/home/paulus/{integration_frame_path}/light.py", + filename="/home/paulus/homeassistant/components/hue/light.py", lineno="23", line="self.light.is_on", ) diff --git a/tests/helpers/test_frame.py b/tests/helpers/test_frame.py index a2a4890810b..b3fbb0faaf4 100644 --- a/tests/helpers/test_frame.py +++ b/tests/helpers/test_frame.py @@ -1,6 +1,5 @@ """Test the frame helper.""" -from typing import Any from unittest.mock import ANY, Mock, patch import pytest @@ -157,97 +156,6 @@ async def test_get_integration_logger_no_integration( assert logger.name == __name__ -@pytest.mark.parametrize( - ("integration_frame_path", "keywords", "expected_error", "expected_log"), - [ - pytest.param( - "homeassistant/test_core", - {}, - True, - 0, - id="core default", - ), - pytest.param( - "homeassistant/components/test_core_integration", - {}, - False, - 1, - id="core integration default", - ), - pytest.param( - "custom_components/test_custom_integration", - {}, - False, - 1, - id="custom integration default", - ), - pytest.param( - "custom_components/test_custom_integration", - {"custom_integration_behavior": frame.ReportBehavior.IGNORE}, - False, - 0, - id="custom integration ignore", - ), - pytest.param( - "custom_components/test_custom_integration", - {"custom_integration_behavior": frame.ReportBehavior.ERROR}, - True, - 1, - id="custom integration error", - ), - pytest.param( - "homeassistant/components/test_integration_frame", - {"core_integration_behavior": frame.ReportBehavior.IGNORE}, - False, - 0, - id="core_integration_behavior ignore", - ), - pytest.param( - "homeassistant/components/test_integration_frame", - {"core_integration_behavior": frame.ReportBehavior.ERROR}, - True, - 1, - id="core_integration_behavior error", - ), - pytest.param( - "homeassistant/test_integration_frame", - {"core_behavior": frame.ReportBehavior.IGNORE}, - False, - 0, - id="core_behavior ignore", - ), - pytest.param( - "homeassistant/test_integration_frame", - {"core_behavior": frame.ReportBehavior.LOG}, - False, - 1, - id="core_behavior log", - ), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -async def test_report_usage( - caplog: pytest.LogCaptureFixture, - keywords: dict[str, Any], - expected_error: bool, - expected_log: int, -) -> None: - """Test report.""" - - what = "test_report_string" - - errored = False - try: - with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): - frame.report_usage(what, **keywords) - except RuntimeError: - errored = True - - assert errored == expected_error - - assert caplog.text.count(what) == expected_log - - @patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_prevent_flooding( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock @@ -339,87 +247,3 @@ async def test_report_error_if_integration( ), ): frame.report("did a bad thing", error_if_integration=True) - - -@pytest.mark.parametrize( - ("integration_frame_path", "keywords", "expected_error", "expected_log"), - [ - pytest.param( - "homeassistant/test_core", - {}, - True, - 0, - id="core default", - ), - pytest.param( - "homeassistant/components/test_core_integration", - {}, - False, - 1, - id="core integration default", - ), - pytest.param( - "custom_components/test_custom_integration", - {}, - False, - 1, - id="custom integration default", - ), - pytest.param( - "custom_components/test_integration_frame", - {"log_custom_component_only": True}, - False, - 1, - id="log_custom_component_only with custom integration", - ), - pytest.param( - "homeassistant/components/test_integration_frame", - {"log_custom_component_only": True}, - False, - 0, - id="log_custom_component_only with core integration", - ), - pytest.param( - "homeassistant/test_integration_frame", - {"error_if_core": False}, - False, - 1, - id="disable error_if_core", - ), - pytest.param( - "custom_components/test_integration_frame", - {"error_if_integration": True}, - True, - 1, - id="error_if_integration with custom integration", - ), - pytest.param( - "homeassistant/components/test_integration_frame", - {"error_if_integration": True}, - True, - 1, - id="error_if_integration with core integration", - ), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -async def test_report( - caplog: pytest.LogCaptureFixture, - keywords: dict[str, Any], - expected_error: bool, - expected_log: int, -) -> None: - """Test report.""" - - what = "test_report_string" - - errored = False - try: - with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): - frame.report(what, **keywords) - except RuntimeError: - errored = True - - assert errored == expected_error - - assert caplog.text.count(what) == expected_log diff --git a/tests/script/test_gen_requirements_all.py b/tests/script/test_gen_requirements_all.py index 519a5c21855..793b3de63c5 100644 --- a/tests/script/test_gen_requirements_all.py +++ b/tests/script/test_gen_requirements_all.py @@ -1,7 +1,5 @@ """Tests for the gen_requirements_all script.""" -from unittest.mock import patch - from script import gen_requirements_all @@ -25,27 +23,3 @@ def test_include_overrides_subsets() -> None: for overrides in gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS.values(): for req in overrides["include"]: assert req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL - - -def test_requirement_override_markers() -> None: - """Test override markers are applied to the correct requirements.""" - data = { - "pytest": { - "exclude": set(), - "include": set(), - "markers": {"env-canada": "python_version<'3.13'"}, - } - } - with patch.dict( - gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS, data, clear=True - ): - assert ( - gen_requirements_all.process_action_requirement( - "env-canada==0.7.2", "pytest" - ) - == "env-canada==0.7.2;python_version<'3.13'" - ) - assert ( - gen_requirements_all.process_action_requirement("other==1.0", "pytest") - == "other==1.0" - ) diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 41af8af3f21..700840eb90e 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -5040,24 +5040,6 @@ async def test_async_wait_component_startup(hass: HomeAssistant) -> None: assert "test" in hass.config.components -@pytest.mark.parametrize( - "integration_frame_path", - ["homeassistant/components/my_integration", "homeassistant.core"], -) -@pytest.mark.usefixtures("mock_integration_frame") -async def test_options_flow_with_config_entry_core() -> None: - """Test that OptionsFlowWithConfigEntry cannot be used in core.""" - entry = MockConfigEntry( - domain="hue", - data={"first": True}, - options={"sub_dict": {"1": "one"}, "sub_list": ["one"]}, - ) - - with pytest.raises(RuntimeError, match="inherits from OptionsFlowWithConfigEntry"): - _ = config_entries.OptionsFlowWithConfigEntry(entry) - - -@pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) @pytest.mark.usefixtures("mock_integration_frame") @patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_options_flow_with_config_entry(caplog: pytest.LogCaptureFixture) -> None: @@ -5069,17 +5051,40 @@ async def test_options_flow_with_config_entry(caplog: pytest.LogCaptureFixture) ) options_flow = config_entries.OptionsFlowWithConfigEntry(entry) - assert caplog.text == "" # No deprecation warning for custom components + assert ( + "Detected that integration 'hue' inherits from OptionsFlowWithConfigEntry," + " which is deprecated and will stop working in 2025.12" in caplog.text + ) - # Ensure available at startup - assert options_flow.config_entry is entry - assert options_flow.options == entry.options + options_flow._options["sub_dict"]["2"] = "two" + options_flow._options["sub_list"].append("two") + + assert options_flow._options == { + "sub_dict": {"1": "one", "2": "two"}, + "sub_list": ["one", "two"], + } + assert entry.options == {"sub_dict": {"1": "one"}, "sub_list": ["one"]} + + +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_options_flow_options_not_mutated(hass: HomeAssistant) -> None: + """Test that OptionsFlow doesn't mutate entry options.""" + entry = MockConfigEntry( + domain="test", + data={"first": True}, + options={"sub_dict": {"1": "one"}, "sub_list": ["one"]}, + ) + entry.add_to_hass(hass) + + options_flow = config_entries.OptionsFlow() + options_flow.handler = entry.entry_id + options_flow.hass = hass options_flow.options["sub_dict"]["2"] = "two" - options_flow.options["sub_list"].append("two") + options_flow._options["sub_list"].append("two") - # Ensure it does not mutate the entry options - assert options_flow.options == { + assert options_flow._options == { "sub_dict": {"1": "one", "2": "two"}, "sub_list": ["one", "two"], } @@ -7215,41 +7220,6 @@ async def test_async_update_entry_unique_id_collision( assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) -@pytest.mark.parametrize("domain", ["flipr"]) -async def test_async_update_entry_unique_id_collision_allowed_domain( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - caplog: pytest.LogCaptureFixture, - issue_registry: ir.IssueRegistry, - domain: str, -) -> None: - """Test we warn when async_update_entry creates a unique_id collision. - - This tests we don't warn and don't create issues for domains which have - their own migration path. - """ - assert len(issue_registry.issues) == 0 - - entry1 = MockConfigEntry(domain=domain, unique_id=None) - entry2 = MockConfigEntry(domain=domain, unique_id="not none") - entry3 = MockConfigEntry(domain=domain, unique_id="very unique") - entry4 = MockConfigEntry(domain=domain, unique_id="also very unique") - entry1.add_to_manager(manager) - entry2.add_to_manager(manager) - entry3.add_to_manager(manager) - entry4.add_to_manager(manager) - - manager.async_update_entry(entry2, unique_id=None) - assert len(issue_registry.issues) == 0 - assert len(caplog.record_tuples) == 0 - - manager.async_update_entry(entry4, unique_id="very unique") - assert len(issue_registry.issues) == 0 - assert len(caplog.record_tuples) == 0 - - assert ("already in use") not in caplog.text - - async def test_unique_id_collision_issues( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -7279,12 +7249,6 @@ async def test_unique_id_collision_issues( for _ in range(6): test3.append(MockConfigEntry(domain="test3", unique_id="not_unique")) await manager.async_add(test3[-1]) - # Add an ignored config entry - await manager.async_add( - MockConfigEntry( - domain="test2", unique_id="group_1", source=config_entries.SOURCE_IGNORE - ) - ) # Check we get one issue for domain test2 and one issue for domain test3 assert len(issue_registry.issues) == 2 @@ -7331,7 +7295,7 @@ async def test_unique_id_collision_issues( (HOMEASSISTANT_DOMAIN, "config_entry_unique_id_collision_test2_group_2"), } - # Remove the last test2 group2 duplicate, the issue is cleared + # Remove the last test2 group2 duplicate, a new issue is created await manager.async_remove(test2_group_2[1].entry_id) assert not issue_registry.issues @@ -7501,24 +7465,20 @@ async def test_options_flow_config_entry( assert result["reason"] == "abort" -@pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) @pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_options_flow_deprecated_config_entry_setter( hass: HomeAssistant, manager: config_entries.ConfigEntries, caplog: pytest.LogCaptureFixture, ) -> None: """Test that setting config_entry explicitly still works.""" - original_entry = MockConfigEntry(domain="my_integration", data={}) + original_entry = MockConfigEntry(domain="hue", data={}) original_entry.add_to_hass(hass) mock_setup_entry = AsyncMock(return_value=True) - mock_integration( - hass, MockModule("my_integration", async_setup_entry=mock_setup_entry) - ) - mock_platform(hass, "my_integration.config_flow", None) + mock_integration(hass, MockModule("hue", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "hue.config_flow", None) class TestFlow(config_entries.ConfigFlow): """Test flow.""" @@ -7533,7 +7493,10 @@ async def test_options_flow_deprecated_config_entry_setter( def __init__(self, entry) -> None: """Test initialisation.""" - self.config_entry = entry + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + self.config_entry = entry + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + self.options = entry.options async def async_step_init(self, user_input=None): """Test user step.""" @@ -7552,16 +7515,19 @@ async def test_options_flow_deprecated_config_entry_setter( return _OptionsFlow(config_entry) - with mock_config_flow("my_integration", TestFlow): + with mock_config_flow("hue", TestFlow): result = await hass.config_entries.options.async_init(original_entry.entry_id) options_flow = hass.config_entries.options._progress.get(result["flow_id"]) assert options_flow.config_entry is original_entry assert ( - "Detected that custom integration 'my_integration' sets option flow " - "config_entry explicitly, which is deprecated and will stop working " - "in 2025.12" in caplog.text + "Detected that integration 'hue' sets option flow config_entry explicitly, " + "which is deprecated and will stop working in 2025.12" in caplog.text + ) + assert ( + "Detected that integration 'hue' sets option flow options explicitly, " + "which is deprecated and will stop working in 2025.12" in caplog.text ) diff --git a/tests/test_const.py b/tests/test_const.py index 87a14ecfe9c..c572c4a08d7 100644 --- a/tests/test_const.py +++ b/tests/test_const.py @@ -66,14 +66,7 @@ def test_all() -> None: "DEVICE_CLASS_", ) + _create_tuples(const.UnitOfApparentPower, "POWER_") - + _create_tuples( - [ - const.UnitOfPower.WATT, - const.UnitOfPower.KILO_WATT, - const.UnitOfPower.BTU_PER_HOUR, - ], - "POWER_", - ) + + _create_tuples(const.UnitOfPower, "POWER_") + _create_tuples( [ const.UnitOfEnergy.KILO_WATT_HOUR, diff --git a/tests/test_loader.py b/tests/test_loader.py index 57d3d6fa832..c4bcbed0107 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -6,7 +6,7 @@ import pathlib import sys import threading from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, Mock, patch from awesomeversion import AwesomeVersion import pytest @@ -1295,29 +1295,26 @@ async def test_config_folder_not_in_path() -> None: import tests.testing_config.check_config_not_in_path # noqa: F401 -@pytest.mark.parametrize( - ("integration_frame_path", "expected"), - [ - pytest.param( - "custom_components/test_integration_frame", True, id="custom integration" - ), - pytest.param( - "homeassistant/components/test_integration_frame", - False, - id="core integration", - ), - pytest.param("homeassistant/test_integration_frame", False, id="core"), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_hass_components_use_reported( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - expected: bool, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock ) -> None: - """Test whether use of hass.components is reported.""" + """Test that use of hass.components is reported.""" + mock_integration_frame.filename = ( + "/home/paulus/homeassistant/custom_components/demo/light.py" + ) + integration_frame = frame.IntegrationFrame( + custom_integration=True, + frame=mock_integration_frame, + integration="test_integration_frame", + module="custom_components.test_integration_frame", + relative_filename="custom_components/test_integration_frame/__init__.py", + ) + with ( + patch( + "homeassistant.helpers.frame.get_integration_frame", + return_value=integration_frame, + ), patch( "homeassistant.components.http.start_http_server_and_save_config", return_value=None, @@ -1325,11 +1322,10 @@ async def test_hass_components_use_reported( ): await hass.components.http.start_http_server_and_save_config(hass, [], None) - reported = ( + assert ( "Detected that custom integration 'test_integration_frame'" " accesses hass.components.http. This is deprecated" ) in caplog.text - assert reported == expected async def test_async_get_component_preloads_config_and_config_flow( @@ -1991,29 +1987,24 @@ async def test_has_services(hass: HomeAssistant) -> None: assert integration.has_services is True -@pytest.mark.parametrize( - ("integration_frame_path", "expected"), - [ - pytest.param( - "custom_components/test_integration_frame", True, id="custom integration" - ), - pytest.param( - "homeassistant/components/test_integration_frame", - False, - id="core integration", - ), - pytest.param("homeassistant/test_integration_frame", False, id="core"), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_hass_helpers_use_reported( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - expected: bool, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock ) -> None: - """Test whether use of hass.helpers is reported.""" + """Test that use of hass.components is reported.""" + integration_frame = frame.IntegrationFrame( + custom_integration=True, + frame=mock_integration_frame, + integration="test_integration_frame", + module="custom_components.test_integration_frame", + relative_filename="custom_components/test_integration_frame/__init__.py", + ) + with ( + patch.object(frame, "_REPORTED_INTEGRATIONS", new=set()), + patch( + "homeassistant.helpers.frame.get_integration_frame", + return_value=integration_frame, + ), patch( "homeassistant.helpers.aiohttp_client.async_get_clientsession", return_value=None, @@ -2021,11 +2012,10 @@ async def test_hass_helpers_use_reported( ): hass.helpers.aiohttp_client.async_get_clientsession() - reported = ( + assert ( "Detected that custom integration 'test_integration_frame' " "accesses hass.helpers.aiohttp_client. This is deprecated" ) in caplog.text - assert reported == expected async def test_manifest_json_fragment_round_trip(hass: HomeAssistant) -> None: diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index 609809a96e8..3b8fd3bc466 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -11,7 +11,6 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_BILLION, CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, - UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -33,7 +32,6 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.util import unit_conversion from homeassistant.util.unit_conversion import ( BaseUnitConverter, - BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -61,7 +59,6 @@ INVALID_SYMBOL = "bob" _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = { converter: sorted(converter.VALID_UNITS, key=lambda x: (x is None, x)) for converter in ( - BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -83,11 +80,6 @@ _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = { # Dict containing all converters with a corresponding unit ratio. _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, float]] = { - BloodGlucoseConcentrationConverter: ( - UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, - UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, - 18, - ), ConductivityConverter: ( UnitOfConductivity.MICROSIEMENS_PER_CM, UnitOfConductivity.MILLISIEMENS_PER_CM, @@ -138,20 +130,6 @@ _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, flo _CONVERTED_VALUE: dict[ type[BaseUnitConverter], list[tuple[float, str | None, float, str | None]] ] = { - BloodGlucoseConcentrationConverter: [ - ( - 90, - UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, - 5, - UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, - ), - ( - 1, - UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, - 18, - UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, - ), - ], ConductivityConverter: [ # Deprecated to deprecated (5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS), @@ -379,16 +357,10 @@ _CONVERTED_VALUE: dict[ EnergyConverter: [ (10, UnitOfEnergy.WATT_HOUR, 0.01, UnitOfEnergy.KILO_WATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.00001, UnitOfEnergy.MEGA_WATT_HOUR), - (10, UnitOfEnergy.WATT_HOUR, 0.00000001, UnitOfEnergy.GIGA_WATT_HOUR), - (10, UnitOfEnergy.WATT_HOUR, 0.00000000001, UnitOfEnergy.TERA_WATT_HOUR), (10, UnitOfEnergy.KILO_WATT_HOUR, 10000, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.KILO_WATT_HOUR, 0.01, UnitOfEnergy.MEGA_WATT_HOUR), (10, UnitOfEnergy.MEGA_WATT_HOUR, 10000000, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.MEGA_WATT_HOUR, 10000, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.GIGA_WATT_HOUR, 10e6, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.GIGA_WATT_HOUR, 10e9, UnitOfEnergy.WATT_HOUR), - (10, UnitOfEnergy.TERA_WATT_HOUR, 10e9, UnitOfEnergy.KILO_WATT_HOUR), - (10, UnitOfEnergy.TERA_WATT_HOUR, 10e12, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.GIGA_JOULE, 2777.78, UnitOfEnergy.KILO_WATT_HOUR), (10, UnitOfEnergy.GIGA_JOULE, 2.77778, UnitOfEnergy.MEGA_WATT_HOUR), (10, UnitOfEnergy.MEGA_JOULE, 2.77778, UnitOfEnergy.KILO_WATT_HOUR), @@ -467,9 +439,6 @@ _CONVERTED_VALUE: dict[ ], PowerConverter: [ (10, UnitOfPower.KILO_WATT, 10000, UnitOfPower.WATT), - (10, UnitOfPower.MEGA_WATT, 10e6, UnitOfPower.WATT), - (10, UnitOfPower.GIGA_WATT, 10e9, UnitOfPower.WATT), - (10, UnitOfPower.TERA_WATT, 10e12, UnitOfPower.WATT), (10, UnitOfPower.WATT, 0.01, UnitOfPower.KILO_WATT), ], PressureConverter: [ diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 12a7eca5f9d..8db3f49ab8e 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -494,6 +494,31 @@ def mock_integration_frame() -> Generator[Mock]: yield correct_frame +@pytest.mark.parametrize( + ("loader_class", "message"), + [ + (yaml.loader.SafeLoader, "'SafeLoader' instead of 'FastSafeLoader'"), + ( + yaml.loader.SafeLineLoader, + "'SafeLineLoader' instead of 'PythonSafeLoader'", + ), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_deprecated_loaders( + caplog: pytest.LogCaptureFixture, + loader_class: type, + message: str, +) -> None: + """Test instantiating the deprecated yaml loaders logs a warning.""" + with ( + pytest.raises(TypeError), + patch("homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set()), + ): + loader_class() + assert (f"Detected that integration 'hue' uses deprecated {message}") in caplog.text + + @pytest.mark.usefixtures("try_both_loaders") def test_string_annotated() -> None: """Test strings are annotated with file + line."""